Ejemplo n.º 1
0
class EEGMonitorTest(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__),
                                'EEG_unit_vectors_BrainProducts_62.txt.bz2')

        TestFactory.import_sensors(self.test_user, self.test_project, zip_path,
                                   'EEG Sensors')
        sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
        time_series = self.datatypeFactory.create_timeseries(
            self.connectivity, 'EEG', sensors)
        viewer = EegMonitor()
        result = viewer.launch(time_series)
        expected_keys = [
            'tsStateVars', 'tsModes', 'translationStep', 'total_length',
            'title', 'timeSetPaths', 'number_of_visible_points',
            'normalizedSteps', 'noOfChannels', 'labelsForCheckBoxes',
            'label_x', 'graphLabels', 'entities', 'channelsPage'
        ]
        for key in expected_keys:
            self.assertTrue(key in result)
Ejemplo n.º 2
0
class ConnectivityViewerTest(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        viewer = ConnectivityViewer()
        result = viewer.launch(self.connectivity)
        expected_keys = [
            'weightsMin', 'weightsMax', 'weights', 'urlWeights', 'urlVertices',
            'urlTriangles', 'urlTracts', 'urlPositions', 'urlNormals',
            'rightHemisphereJson', 'raysArray', 'rayMin', 'rayMax',
            'positions', 'leftHemisphereJson', 'connectivity_entity',
            'bothHemisphereJson'
        ]
        for key in expected_keys:
            self.assertTrue(key in result)
Ejemplo n.º 3
0
class CovarianceViewerTest(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        covariance = self.datatypeFactory.create_covariance(time_series)
        viewer = CovarianceVisualizer()
        result = viewer.launch(covariance)
        expected_keys = [
            'matrix_strides', 'matrix_shape', 'matrix_data', 'mainContent',
            'isAdapter'
        ]
        for key in expected_keys:
            self.assertTrue(key in result)
class CrossCoherenceViewerTest(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """
    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        ccoherence = self.datatypeFactory.create_crosscoherence(time_series)
        viewer = CrossCoherenceVisualizer()
        result = viewer.launch(ccoherence)
        expected_keys = [
            'strides', 'shape', 'mainContent', 'isAdapter', 'frequency',
            'coherence'
        ]
        for key in expected_keys:
            self.assertTrue(key in result)
Ejemplo n.º 5
0
class PSETest(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """
    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        timeseries = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = TimeSeries()
        result = viewer.launch(timeseries)
        expected_keys = [
            't0', 'shape', 'preview', 'labelsStateVar', 'labelsModes',
            'mainContent', 'labels', 'labels_json', 'figsize', 'dt'
        ]
        for key in expected_keys:
            self.assertTrue(key in result)
Ejemplo n.º 6
0
class RegionMappingImporterTest(TransactionalTestCase):
    """
    Unit-tests for RegionMapping importer.
    """

    TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__),
                            'original_region_mapping.txt')
    ZIP_FILE = os.path.join(os.path.dirname(demo_data.__file__),
                            'original_region_mapping.zip')
    BZ2_FILE = os.path.join(os.path.dirname(demo_data.__file__),
                            'original_region_mapping.bz2')

    # Wrong data
    WRONG_FILE_1 = os.path.join(os.path.dirname(test_data.__file__),
                                'region_mapping_wrong_1.txt')
    WRONG_FILE_2 = os.path.join(os.path.dirname(test_data.__file__),
                                'region_mapping_wrong_2.txt')
    WRONG_FILE_3 = os.path.join(os.path.dirname(test_data.__file__),
                                'region_mapping_wrong_3.txt')

    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = self._get_entity(Connectivity())
        self.surface = self._get_entity(CorticalSurface())

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def _get_entity(self, expected_data, filters=None):
        data_types = FlowService().get_available_datatypes(
            self.test_project.id,
            expected_data.module + "." + expected_data.type, filters)
        self.assertEqual(
            1, len(data_types), "Project should contain only one data type:" +
            str(expected_data.type))

        entity = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(entity is not None, "Instance should not be none")

        return entity

    def _import(self, import_file_path, surface_gid, connectivity_gid):
        """
        This method is used for importing region mappings
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group(
            'tvb.adapters.uploaders.region_mapping_importer',
            'RegionMapping_Importer')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: "test",
            DataTypeMetaData.KEY_STATE: "RAW"
        }

        args = {
            'mapping_file': import_file_path,
            'surface': surface_gid,
            'connectivity': connectivity_gid
        }

        now = datetime.datetime.now()

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        # During setup we import a CFF which creates an additional RegionMapping
        # So, here we have to find our mapping (just imported)
        data_filter = FilterChain(
            fields=[FilterChain.datatype + ".create_date"],
            operations=[">"],
            values=[now])
        region_mapping = self._get_entity(RegionMapping(), data_filter)

        return region_mapping

    def test_import_no_surface_or_connectivity(self):
        """
            This method tests import of region mapping without providing a surface or connectivity
        """
        try:
            self._import(self.TXT_FILE, None, self.connectivity.gid)
            self.fail("Import should fail in case Surface is missing")
        except OperationException:
            # Expected exception
            pass

        try:
            self._import(self.TXT_FILE, self.surface.gid, None)
            self.fail("Import should fail in case Connectivity is missing")
        except OperationException:
            # Expected exception
            pass

    def test_import_from_txt(self):
        """
            This method tests import of region mapping from TXT file
        """
        self._import_from_file(self.TXT_FILE)

    def test_import_from_zip(self):
        """
            This method tests import of region mapping from TXT file
        """
        self._import_from_file(self.ZIP_FILE)

    def test_import_from_bz2(self):
        """
            This method tests import of region mapping from TXT file
        """
        self._import_from_file(self.BZ2_FILE)

    def _import_from_file(self, import_file):
        """
            This method tests import of region mapping from TXT file
        """
        region_mapping = self._import(import_file, self.surface.gid,
                                      self.connectivity.gid)

        self.assertTrue(region_mapping.surface is not None)
        self.assertTrue(region_mapping.connectivity is not None)

        array_data = region_mapping.array_data
        self.assertTrue(array_data is not None)
        self.assertEqual(16384, len(array_data))

    def test_import_wrong_file_content(self):
        """
            This method tests import of region mapping with:
            - a wrong region number
            - wrong number of regions
            - negative region number
        """
        try:
            self._import(self.WRONG_FILE_1, self.surface.gid,
                         self.connectivity.gid)
            self.fail("Import should fail in case of invalid region number")
        except OperationException:
            # Expected exception
            pass

        try:
            self._import(self.WRONG_FILE_2, self.surface.gid,
                         self.connectivity.gid)
            self.fail("Import should fail in case of invalid regions number")
        except OperationException:
            # Expected exception
            pass

        try:
            self._import(self.WRONG_FILE_3, self.surface.gid,
                         self.connectivity.gid)
            self.fail(
                "Import should fail in case of invalid region number (negative number)"
            )
        except OperationException:
            # Expected exception
            pass
Ejemplo n.º 7
0
class TVBImporterTest(TransactionalTestCase):
    """
    Unit-tests for TVB importer.
    """
    TVB_EXPORTER = "TVBExporter"


    def setUp(self):
        export_manager = ExportManager()

        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()

        # Generate simple data type and export it to H5 file
        self.datatype = self.datatypeFactory.create_datatype_with_storage()
        _, exported_h5_file, _ = export_manager.export_data(self.datatype, self.TVB_EXPORTER, self.test_project)
        # Copy H5 file to another location since the original one / exported 
        # will be deleted with the project
        _, h5_file_name = os.path.split(exported_h5_file)
        shutil.copy(exported_h5_file, cfg.TVB_TEMP_FOLDER)
        self.h5_file_path = os.path.join(cfg.TVB_TEMP_FOLDER, h5_file_name)

        self.assertTrue(os.path.exists(self.h5_file_path), "Simple data type was not exported correct")

        # Generate data type group and export it to ZIP file
        self.datatype_group = self.datatypeFactory.create_datatype_group()
        _, self.zip_file_path, _ = export_manager.export_data(self.datatype_group, self.TVB_EXPORTER, self.test_project)
        self.assertTrue(os.path.exists(self.zip_file_path), "Data type group was not exported correct")

        FilesHelper().remove_project_structure(self.test_project.name)
        self.clean_database(delete_folders=False)

        # Recreate project, but a clean one where to import data
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def _import(self, import_file_path=None):
        """
        This method is used for importing data in TVB format
        :param import_file_path: absolute path of the file to be imported
        """
        ### Retrieve Adapter instance 
        group = dao.find_group('tvb.adapters.uploaders.tvb_importer', 'TVBImporter')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {DataTypeMetaData.KEY_SUBJECT: "",
                              DataTypeMetaData.KEY_STATE: "RAW"}

        args = {'data_file': import_file_path}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)


    def test_zip_import(self):
        """
            This method tests import of TVB data in zip format (which imply multiple data types
            in the same zip file - exported from a group)
        """
        self._import(self.zip_file_path)
        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           self.datatype.module + "." + self.datatype.type)
        self.assertEqual(3, len(data_types), "3 datatypes should have been imported from group.")


    def test_h5_import(self):
        """
            This method tests import of TVB data in h5 format. Single data type / import
        """
        self._import(self.h5_file_path)

        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           self.datatype.module + "." + self.datatype.type)
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        data_type_entity = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(data_type_entity is not None, "Datatype should not be none")
        self.assertEqual(self.datatype.gid, data_type_entity.gid, "Imported datatype should have the same gid")


    def test_import_invalid_file(self):
        """
        This method tests import of a file which does not exists or does not
        have a supported format.
        """
        try:
            self._import("invalid_path")
            self.fail("System should throw an exception if trying to import an invalid file")
        except OperationException:
            # Expected
            pass

        # Now try to generate a file on disk with wrong format and import that
        file_path = os.path.join(cfg.TVB_TEMP_FOLDER, "dummy_file.txt")
        with open(file_path, "w") as f:
            f.write("dummy text")

        try:
            self._import(file_path)
            self.fail("System should throw an exception if trying to import a file with wrong format")
        except OperationException:
            # Expected
            pass
Ejemplo n.º 8
0
class BrainViewerTest(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """
    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        zip_path = os.path.join(os.path.dirname(surface_dataset.__file__),
                                'face-surface.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project,
                                       zip_path, 'Face', 1)
        zip_path = os.path.join(os.path.dirname(surface_dataset.__file__),
                                'eeg_skin_surface.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project,
                                       zip_path, 'EEG Cap', 1)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)
        self.face_surface = TestFactory.get_entity(self.test_project,
                                                   FaceSurface())
        self.assertTrue(self.face_surface is not None)
        self.assertTrue(
            TestFactory.get_entity(self.test_project, EEGCap()) is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = BrainViewer()
        result = viewer.launch(time_series=time_series)
        expected_keys = [
            'urlVertices', 'urlNormals', 'urlTriangles',
            'urlMeasurePointsLabels', 'title', 'time_series', 'shelfObject',
            'pageSize', 'labelsStateVar', 'nrOfPages', 'labelsModes',
            'minActivityLabels', 'minActivity', 'measure_points',
            'maxActivity', 'isOneToOneMapping', 'isAdapter', 'extended_view',
            'base_activity_url', 'alphas_indices'
        ]
        for key in expected_keys:
            self.assertTrue(key in result and result[key] is not None)
        self.assertFalse(result['extended_view'])

    def test_get_required_memory(self):
        """
        Brainviewer should know required memory so expect positive number and not -1.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        self.assertTrue(
            BrainViewer().get_required_memory_size(time_series) > 0)

    def test_generate_preview(self):
        """
        Check that all required keys are present in preview generate by BrainViewer.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = BrainViewer()
        result = viewer.generate_preview(time_series, (500, 200))
        expected_keys = [
            'urlVertices', 'urlNormals', 'urlTriangles', 'pageSize',
            'nrOfPages', 'minActivityLabels', 'minActivity', 'maxActivity',
            'isOneToOneMapping', 'isAdapter', 'base_activity_url',
            'alphas_indices'
        ]
        for key in expected_keys:
            self.assertTrue(key in result and result[key] is not None)

    def test_launch_eeg(self):
        zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__),
                                'EEG_unit_vectors_BrainProducts_62.txt.bz2')

        TestFactory.import_sensors(self.test_user, self.test_project, zip_path,
                                   'EEG Sensors')
        sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
        time_series = self.datatypeFactory.create_timeseries(
            self.connectivity, 'EEG', sensors)
        time_series.configure()
        viewer = BrainEEG()
        result = viewer.launch(time_series)
        expected_keys = [
            'urlVertices', 'urlNormals', 'urlTriangles',
            'urlMeasurePointsLabels', 'title', 'time_series', 'shelfObject',
            'pageSize', 'labelsStateVar', 'nrOfPages', 'labelsModes',
            'minActivityLabels', 'minActivity', 'measure_points',
            'maxActivity', 'isOneToOneMapping', 'isAdapter', 'extended_view',
            'base_activity_url', 'alphas_indices'
        ]
        for key in expected_keys:
            self.assertTrue(key in result and result[key] is not None)
        self.assertTrue(result['extended_view'])
Ejemplo n.º 9
0
class NIFTIImporterTest(TransactionalTestCase):
    """
    Unit-tests for NIFTI importer.
    """

    NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii')
    GZ_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__),
                               'minimal.nii.gz')
    TVB_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__),
                                'tvb_nifti_demo_data.nii.gz')
    WRONG_NII_FILE = os.path.abspath(__file__)

    DEFAULT_ORIGIN = [[0.0, 0.0, 0.0]]
    UNKNOWN_STR = "unknown"

    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def _import(self, import_file_path=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.nifti_importer',
                               'NIFTIImporter')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: "",
            DataTypeMetaData.KEY_STATE: "RAW"
        }

        args = {'data_file': import_file_path}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        time_series = TimeSeries()
        data_types = FlowService().get_available_datatypes(
            self.test_project.id, time_series.module + "." + time_series.type)
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None,
                        "TimeSeries should not be none")

        return time_series

    def test_import_demo_nii_data(self):
        """
            This method tests import of a NIFTI file.
        """
        time_series = self._import(self.TVB_NII_FILE)

        # Since self.assertAlmostEquals is not available on all machine
        # We compare floats as following
        self.assertTrue(abs(2.0 - time_series.sample_period) <= 0.001)
        self.assertEqual("sec", str(time_series.sample_period_unit))
        self.assertEqual(0.0, time_series.start_time)
        self.assertTrue(time_series.title is not None)

        data_shape = time_series.read_data_shape()
        self.assertEquals(4, len(data_shape))
        # We have only one entry for time dimension
        self.assertEqual(150, data_shape[0])
        dimension_labels = time_series.labels_ordering
        self.assertTrue(dimension_labels is not None)
        self.assertEquals(4, len(dimension_labels))

        volume = time_series.volume
        self.assertTrue(volume is not None)
        self.assertTrue(numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all())
        self.assertEquals("mm", volume.voxel_unit)

    def test_import_nii_without_time_dimension(self):
        """
            This method tests import of a NIFTI file.
        """
        time_series = self._import(self.NII_FILE)

        self.assertEqual(1.0, time_series.sample_period)
        self.assertEqual(self.UNKNOWN_STR, str(time_series.sample_period_unit))
        self.assertEqual(0.0, time_series.start_time)
        self.assertTrue(time_series.title is not None)

        data_shape = time_series.read_data_shape()
        self.assertEquals(4, len(data_shape))
        # We have only one entry for time dimension
        self.assertEqual(1, data_shape[0])
        dimension_labels = time_series.labels_ordering
        self.assertTrue(dimension_labels is not None)
        self.assertEquals(4, len(dimension_labels))

        volume = time_series.volume
        self.assertTrue(volume is not None)
        self.assertTrue(numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all())
        self.assertTrue(numpy.equal([3.0, 3.0, 3.0], volume.voxel_size).all())
        self.assertEquals(self.UNKNOWN_STR, volume.voxel_unit)

    def test_import_nifti_compressed(self):
        """
            This method tests import of a NIFTI file compressed in GZ format.
        """
        time_series = self._import(self.GZ_NII_FILE)

        self.assertEqual(1.0, time_series.sample_period)
        self.assertEqual(self.UNKNOWN_STR, str(time_series.sample_period_unit))
        self.assertEqual(0.0, time_series.start_time)
        self.assertTrue(time_series.title is not None)

        data_shape = time_series.read_data_shape()
        self.assertEquals(4, len(data_shape))
        # We have only one entry for time dimension
        self.assertEqual(1, data_shape[0])
        dimension_labels = time_series.labels_ordering
        self.assertTrue(dimension_labels is not None)
        self.assertEquals(4, len(dimension_labels))

        volume = time_series.volume
        self.assertTrue(volume is not None)
        self.assertTrue(numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all())
        self.assertTrue(numpy.equal([3.0, 3.0, 3.0], volume.voxel_size).all())
        self.assertEquals(self.UNKNOWN_STR, volume.voxel_unit)

    def test_import_wrong_nii_file(self):
        """ 
        This method tests import of a file in a wrong format
        """
        try:
            self._import(self.WRONG_NII_FILE)
            self.fail("Import should fail in case of a wrong NIFTI format.")
        except OperationException:
            # Expected exception
            pass
Ejemplo n.º 10
0
class GIFTISurfaceImporterTest(TransactionalTestCase):
    """
    Unit-tests for GIFTI Surface importer.
    """

    GIFTI_SURFACE_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'sample.cortex.surf.gii')
    GIFTI_TIME_SERIES_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'gifti.case1.time_series.L.time.gii')
    WRONG_GII_FILE = os.path.abspath(__file__)


    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)


    def _importSurface(self, import_file_path=None):
        """
        This method is used for importing data in GIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        group = dao.find_group('tvb.adapters.uploaders.gifti_surface_importer', 'GIFTISurfaceImporter')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {DataTypeMetaData.KEY_SUBJECT: "",
                              DataTypeMetaData.KEY_STATE: "RAW"}

        args = {'data_file': import_file_path}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        surface = CorticalSurface()
        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           surface.module + "." + surface.type)
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "TimeSeries should not be none")

        return surface


    def test_import_surface_gifti_data(self):
        """
            This method tests import of a surface from GIFTI file.
            !!! Important: We changed this test to execute only GIFTI parse
                because storing surface it takes too long (~ 9min) since
                normals needs to be calculated.
        """
        operation_id = self.datatypeFactory.get_operation().id
        storage_path = FilesHelper().get_operation_folder(self.test_project.name, operation_id)

        parser = GIFTIParser(storage_path, operation_id)
        surface = parser.parse(self.GIFTI_SURFACE_FILE)

        self.assertEqual(131342, len(surface.vertices))
        self.assertEqual(262680, len(surface.triangles))


    def test_import_timeseries_gifti_data(self):
        """
            This method tests import of a time series from GIFTI file.
            !!! Important: We changed this test to execute only GIFTI parse
                because storing surface it takes too long (~ 9min) since
                normals needs to be calculated.
        """
        operation_id = self.datatypeFactory.get_operation().id
        storage_path = FilesHelper().get_operation_folder(self.test_project.name, operation_id)

        parser = GIFTIParser(storage_path, operation_id)
        time_series = parser.parse(self.GIFTI_TIME_SERIES_FILE)

        data_shape = time_series.read_data_shape()

        self.assertEqual(135, data_shape[0])
        self.assertEqual(143479, data_shape[1])


    def test_import_wrong_gii_file(self):
        """ 
        This method tests import of a file in a wrong format
        """
        try:
            self._importSurface(self.WRONG_GII_FILE)
            self.fail("Import should fail in case of a wrong GIFTI format.")
        except OperationException:
            # Expected exception
            pass
Ejemplo n.º 11
0
class SensorsImporterTest(TransactionalTestCase):
    """
    Unit-tests for Sensors importer.
    """
    EEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'EEG_unit_vectors_BrainProducts_62.txt.bz2')
    MEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'meg_channels_reg13.txt.bz2')


    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
        self.importer = Sensors_Importer()


    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)


    def _import(self, import_file_path, sensors_type, expected_data):
        """
        This method is used for importing sensors
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        group = dao.find_group('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {DataTypeMetaData.KEY_SUBJECT: "",
                              DataTypeMetaData.KEY_STATE: "RAW"}

        args = {'sensors_file': import_file_path, 'sensors_type': sensors_type}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           expected_data.module + "." + expected_data.type)
        self.assertEqual(1, len(data_types), "Project should contain only one data type = Sensors.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None, "Sensors instance should not be none")

        return time_series


    def test_import_eeg_sensors(self):
        """
        This method tests import of a file containing EEG sensors.
        """
        eeg_sensors = self._import(self.EEG_FILE, self.importer.EEG_SENSORS, SensorsEEG())

        expected_size = 62
        self.assertTrue(eeg_sensors.labels is not None)
        self.assertEqual(expected_size, len(eeg_sensors.labels))
        self.assertEqual(expected_size, len(eeg_sensors.locations))
        self.assertEqual((expected_size, 3), eeg_sensors.locations.shape)
        self.assertEqual(expected_size, eeg_sensors.number_of_sensors)


    def test_import_meg_sensors(self):
        """
        This method tests import of a file containing MEG sensors.
        """
        meg_sensors = self._import(self.MEG_FILE, self.importer.MEG_SENSORS, SensorsMEG())

        expected_size = 151
        self.assertTrue(meg_sensors.labels is not None)
        self.assertEqual(expected_size, len(meg_sensors.labels))
        self.assertEqual(expected_size, len(meg_sensors.locations))
        self.assertEqual((expected_size, 3), meg_sensors.locations.shape)
        self.assertEqual(expected_size, meg_sensors.number_of_sensors)
        self.assertTrue(meg_sensors.has_orientation)
        self.assertEqual(expected_size, len(meg_sensors.orientations))
        self.assertEqual((expected_size, 3), meg_sensors.orientations.shape)


    def test_import_meg_without_orientation(self):
        """
        This method tests import of a file without orientation.
        """
        try:
            self._import(self.EEG_FILE, self.importer.MEG_SENSORS, SensorsMEG())
            self.fail("Import should fail in case of a MEG import without orientation.")
        except OperationException:
            # Expected exception
            pass


    def test_import_internal_sensors(self):
        """
        This method tests import of a file containing internal sensors.
        """
        internal_sensors = self._import(self.EEG_FILE, self.importer.INTERNAL_SENSORS, SensorsInternal())

        expected_size = 62
        self.assertTrue(internal_sensors.labels is not None)
        self.assertEqual(expected_size, len(internal_sensors.labels))
        self.assertEqual(expected_size, len(internal_sensors.locations))
        self.assertEqual((expected_size, 3), internal_sensors.locations.shape)
        self.assertEqual(expected_size, internal_sensors.number_of_sensors)