Пример #1
0
class ObjSurfaceImporterTest(TransactionalTestCase):
    """
    Unit-tests for Obj Surface importer.
    """

    torrus = os.path.join(os.path.dirname(tvb_data.obj.__file__),
                          'test_torus.obj')
    face = os.path.join(os.path.dirname(tvb_data.obj.__file__),
                        'face_surface.obj')

    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

    def tearDown(self):
        FilesHelper().remove_project_structure(self.test_project.name)

    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')

        args = {
            'data_file': import_file_path,
            "surface_type": FACE,
            DataTypeMetaData.KEY_SUBJECT: "John"
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(
            self.test_project.id, FaceSurface)[0]
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "Surface should not be None")
        return surface

    def test_import_quads_no_normals(self):
        """
        Test that import works with a file which contains quads and no normals
        """
        surface = self._importSurface(self.face)
        self.assertEqual(8614, len(surface.vertices))
        self.assertEqual(8614, len(surface.vertex_normals))
        self.assertEqual(17224, len(surface.triangles))

    def test_import_simple_with_normals(self):
        """
        Test that import works with an OBJ file which included normals
        """
        surface = self._importSurface(self.torrus)
        self.assertEqual(441, surface.number_of_vertices)
        self.assertEqual(441, len(surface.vertex_normals))
        self.assertEqual(800, surface.number_of_triangles)
class ObjSurfaceImporterTest(TransactionalTestCase):
    """
    Unit-tests for Obj Surface importer.
    """

    torrus = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'test_torus.obj')
    face = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'face_surface.obj')


    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def tearDown(self):
        FilesHelper().remove_project_structure(self.test_project.name)


    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')
        importer = ABCAdapter.build_adapter(group)

        args = {'data_file': import_file_path,
                "surface_type": FACE,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id, FaceSurface)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "Surface should not be None")
        return surface


    def test_import_quads_no_normals(self):
        """
        Test that import works with a file which contains quads and no normals
        """
        surface = self._importSurface(self.face)
        self.assertEqual(8614, len(surface.vertices))
        self.assertEqual(8614, len(surface.vertex_normals))
        self.assertEqual(17224, len(surface.triangles))


    def test_import_simple_with_normals(self):
        """
        Test that import works with an OBJ file which included normals
        """
        surface = self._importSurface(self.torrus)
        self.assertEqual(441, surface.number_of_vertices)
        self.assertEqual(441, len(surface.vertex_normals))
        self.assertEqual(800, surface.number_of_triangles)
Пример #3
0
class MatTimeSeriesImporterTest(TransactionalTestCase):

    base_pth = os.path.join(os.path.dirname(tvb_data.__file__),
                            'berlinSubjects', 'QL_20120814')
    bold_path = os.path.join(base_pth, 'QL_BOLD_regiontimecourse.mat')
    connectivity_path = os.path.join(base_pth, 'QL_20120814_Connectivity.zip')

    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
        self._import_connectivity()

    def tearDown(self):
        FilesHelper().remove_project_structure(self.test_project.name)

    def _import_connectivity(self):
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.zip_connectivity_importer',
            'ZIPConnectivityImporter')

        ### Launch Operation
        FlowService().fire_operation(importer,
                                     self.test_user,
                                     self.test_project.id,
                                     uploaded=self.connectivity_path,
                                     Data_Subject='QL')

        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())

    def test_import_bold(self):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.mat_timeseries_importer',
            'MatTimeSeriesImporter')

        args = dict(
            data_file=self.bold_path,
            dataset_name='QL_20120824_DK_BOLD_timecourse',
            structure_path='',
            transpose=False,
            slice=None,
            sampling_rate=1000,
            start_time=0,
            tstype='region',
            tstype_parameters_option_region_connectivity=self.connectivity.gid,
            Data_Subject="QL")

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        tsr = TestFactory.get_entity(self.test_project, TimeSeriesRegion())

        self.assertEqual((661, 1, 68, 1), tsr.read_data_shape())
Пример #4
0
class EEGMonitorTest(TransactionalTestCase):
    """
    Unit-tests for EEG Viewer.
    """
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
        
        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        self.assertTrue(self.connectivity is not None)

                
    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)
    
    
    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__), 
                                'EEG_unit_vectors_BrainProducts_62.txt.bz2')
        
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path, 'EEG Sensors')
        sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
        time_series = self.datatypeFactory.create_timeseries(self.connectivity, 'EEG', sensors)
        viewer = EegMonitor()
        result = viewer.launch(time_series)
        expected_keys = ['tsNames', 'groupedLabels', 'tsModes', 'tsStateVars', 'longestChannelLength',
                         'label_x', 'entities', 'page_size', 'number_of_visible_points',
                         'extended_view', 'initialSelection', 'ag_settings', 'ag_settings']

        for key in expected_keys:
            self.assertTrue(key in result, "key not found %s" % key)

        expected_ag_settings = ['channelsPerSet', 'channelLabels', 'noOfChannels', 'translationStep',
                                'normalizedSteps', 'nan_value_found', 'baseURLS', 'pageSize',
                                'nrOfPages', 'timeSetPaths', 'totalLength', 'number_of_visible_points',
                                'extended_view', 'measurePointsSelectionGIDs']

        ag_settings = json.loads(result['ag_settings'])

        for key in expected_ag_settings:
            self.assertTrue(key in ag_settings, "ag_settings should have the key %s" % key)
Пример #5
0
class TestObjSurfaceImporter(TransactionalTestCase):
    """
    Unit-tests for Obj Surface importer.
    """

    torus = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'test_torus.obj')
    face = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'face_surface.obj')

    def transactional_setup_method(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

    def transactional_teardown_method(self):
        FilesHelper().remove_project_structure(self.test_project.name)

    def _import_surface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')

        args = {'data_file': import_file_path,
                "surface_type": FACE,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id, FaceSurface)[0]
        assert 1, len(data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None, "Surface should not be None"
        return surface

    def test_import_quads_no_normals(self):
        """
        Test that import works with a file which contains quads and no normals
        """
        surface = self._import_surface(self.face)
        assert 8614 == len(surface.vertices)
        assert 8614 == len(surface.vertex_normals)
        assert 17224 == len(surface.triangles)

    def test_import_simple_with_normals(self):
        """
        Test that import works with an OBJ file which included normals
        """
        surface = self._import_surface(self.torus)
        assert 441 == surface.number_of_vertices
        assert 441 == len(surface.vertex_normals)
        assert 800 == surface.number_of_triangles
class TestZIPSurfaceImporter(TransactionalTestCase):
    """
    Unit-tests for Zip Surface importer.
    """

    surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__),
                              'outer_skull_4096.zip')

    def transactional_setup_method(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

    def transactional_teardown_method(self):
        FilesHelper().remove_project_structure(self.test_project.name)

    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.zip_surface_importer',
            'ZIPSurfaceImporter')
        args = {
            'uploaded': import_file_path,
            'surface_type': OUTER_SKULL,
            'zero_based_triangles': True,
            DataTypeMetaData.KEY_SUBJECT: "John"
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(
            self.test_project.id, SkullSkin)[0]
        assert 1, len(
            data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None, "Surface should not be None"
        return surface

    def test_import_surf_zip(self):
        surface = self._importSurface(self.surf_skull)
        assert 4096 == len(surface.vertices)
        assert 4096 == surface.number_of_vertices
        assert 8188 == len(surface.triangles)
        assert 8188 == surface.number_of_triangles
        assert '' == surface.user_tag_3
        assert surface.valid_for_simulations
Пример #7
0
class ZIPSurfaceImporterTest(TransactionalTestCase):
    """
    Unit-tests for Zip Surface importer.
    """

    surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__),
                              'outer_skull_4096.zip')

    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

    def tearDown(self):
        FilesHelper().remove_project_structure(self.test_project.name)

    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.zip_surface_importer',
                               'ZIPSurfaceImporter')
        importer = ABCAdapter.build_adapter(group)
        args = {
            'uploaded': import_file_path,
            'surface_type': OUTER_SKULL,
            'zero_based_triangles': True,
            DataTypeMetaData.KEY_SUBJECT: "John"
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(
            self.test_project.id, SkullSkin)[0]
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "Surface should not be None")
        return surface

    def test_import_surf_zip(self):
        surface = self._importSurface(self.surf_skull)
        self.assertEqual(4096, len(surface.vertices))
        self.assertEqual(4096, surface.number_of_vertices)
        self.assertEqual(8188, len(surface.triangles))
        self.assertEqual(8188, surface.number_of_triangles)
        self.assertEqual('', surface.user_tag_3)
        self.assertTrue(surface.valid_for_simulations)
class ZIPSurfaceImporterTest(TransactionalTestCase):
    """
    Unit-tests for Zip Surface importer.
    """

    surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'outer_skull_4096.zip')


    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def tearDown(self):
        FilesHelper().remove_project_structure(self.test_project.name)


    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.zip_surface_importer', 'ZIPSurfaceImporter')
        importer = ABCAdapter.build_adapter(group)
        args = {
            'uploaded': import_file_path, 'surface_type': OUTER_SKULL,
            'zero_based_triangles': True,
            DataTypeMetaData.KEY_SUBJECT: "John"
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id, SkullSkin)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "Surface should not be None")
        return surface


    def test_import_surf_zip(self):
        surface = self._importSurface(self.surf_skull)
        self.assertEqual(4096, len(surface.vertices))
        self.assertEqual(4096, surface.number_of_vertices)
        self.assertEqual(8188, len(surface.triangles))
        self.assertEqual(8188, surface.number_of_triangles)
        self.assertEqual('', surface.user_tag_3)
        self.assertTrue(surface.valid_for_simulations)
Пример #9
0
class TimeSeriesTest(TransactionalTestCase):
    """
    Unit-tests for Time Series Viewer.
    """

    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        self.assertTrue(self.connectivity is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        timeseries = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = TimeSeries()
        result = viewer.launch(timeseries)
        expected_keys = [
            "t0",
            "shape",
            "preview",
            "labelsStateVar",
            "labelsModes",
            "mainContent",
            "labels",
            "labels_json",
            "figsize",
            "dt",
        ]
        for key in expected_keys:
            self.assertTrue(key in result)
class MatTimeSeriesImporterTest(TransactionalTestCase):

    base_pth = os.path.join(os.path.dirname(tvb_data.__file__), 'berlinSubjects', 'QL_20120814')
    bold_path = os.path.join(base_pth, 'QL_BOLD_regiontimecourse.mat')
    connectivity_path = os.path.join(base_pth, 'QL_20120814_Connectivity.zip')

    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
        self._import_connectivity()


    def tearDown(self):
        FilesHelper().remove_project_structure(self.test_project.name)


    def _import_connectivity(self):
        group = dao.find_group('tvb.adapters.uploaders.zip_connectivity_importer', 'ZIPConnectivityImporter')
        importer = ABCAdapter.build_adapter(group)

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id,
                                     uploaded=self.connectivity_path, Data_Subject='QL')

        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())


    def test_import_bold(self):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.mat_timeseries_importer', 'MatTimeSeriesImporter')
        importer = ABCAdapter.build_adapter(group)

        args = dict(data_file=self.bold_path, dataset_name='QL_20120824_DK_BOLD_timecourse', structure_path='',
                    transpose=False, slice=None, sampling_rate=1000, start_time=0,
                    tstype='region',
                    tstype_parameters_option_region_connectivity=self.connectivity.gid,
                    Data_Subject="QL")

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        tsr = TestFactory.get_entity(self.test_project, TimeSeriesRegion())

        self.assertEqual((661, 1, 68, 1), tsr.read_data_shape())
class TestZIPSurfaceImporter(TransactionalTestCase):
    """
    Unit-tests for Zip Surface importer.
    """

    surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'outer_skull_4096.zip')


    def transactional_setup_method(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def transactional_teardown_method(self):
        FilesHelper().remove_project_structure(self.test_project.name)


    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.zip_surface_importer', 'ZIPSurfaceImporter')
        args = {'uploaded': import_file_path, 'surface_type': OUTER_SKULL,
                'zero_based_triangles': True,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id, SkullSkin)[0]
        assert 1, len(data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None, "Surface should not be None"
        return surface


    def test_import_surf_zip(self):
        surface = self._importSurface(self.surf_skull)
        assert 4096 == len(surface.vertices)
        assert 4096 == surface.number_of_vertices
        assert 8188 == len(surface.triangles)
        assert 8188 == surface.number_of_triangles
        assert '' == surface.user_tag_3
        assert surface.valid_for_simulations
Пример #12
0
class ICATest(TransactionalTestCase):
    """
    Unit-tests for ICA Viewer.
    """
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        conn_measure = self.datatypeFactory.create_ICA(time_series)
        viewer = ICA()
        result = viewer.launch(conn_measure)
        expected_keys = [
            'matrix_strides', 'matrix_shape', 'matrix_data', 'mainContent',
            'isAdapter'
        ]
        for key in expected_keys:
            self.assertTrue(key in result)
class ConnectivityViewerTest(TransactionalTestCase):
    """
    Unit-tests for Connectivity Viewer.
    """

    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
        
        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        self.assertTrue(self.connectivity is not None)

                
    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)
    
    
    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        viewer = ConnectivityViewer()
        result = viewer.launch(self.connectivity)
        expected_keys = ['weightsMin', 'weightsMax', 'urlWeights', 'urlVertices',
                         'urlTriangles', 'urlTracts', 'urlPositions', 'urlNormals',
                         'rightHemisphereJson', 'raysArray', 'rayMin', 'rayMax', 'positions',
                         'leftHemisphereJson', 'connectivity_entity', 'bothHemisphereJson']
        for key in expected_keys:
            self.assertTrue(key in result)
Пример #14
0
class TestTimeSeries(TransactionalTestCase):
    """
    Unit-tests for Time Series Viewer.
    """


    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        assert self.connectivity is not None


    def transactional_teardown_method(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)


    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        timeseries = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = TimeSeries()
        result = viewer.launch(timeseries)
        expected_keys = ['t0', 'shape', 'preview', 'labelsStateVar', 'labelsModes',
                         'mainContent', 'labels', 'labels_json', 'figsize', 'dt']
        for key in expected_keys:
            assert key in result
Пример #15
0
class NetworkxImporterTest(TransactionalTestCase):
    """
    Unit-tests for Obj Surface importer.
    """

    upload_file = os.path.join(os.path.dirname(__file__), "test_data",
                               'connectome_83.gpickle')

    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

    def tearDown(self):
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_import(self):

        count_before = self.count_all_entities(Connectivity)
        self.assertEqual(0, count_before)

        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.networkx_importer',
            'NetworkxConnectivityImporter')
        args = {
            'data_file': self.upload_file,
            DataTypeMetaData.KEY_SUBJECT: "John"
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        count_after = self.count_all_entities(Connectivity)
        self.assertEqual(1, count_after)

        conn = self.get_all_entities(Connectivity)[0]
        self.assertEqual(83, conn.number_of_regions)
Пример #16
0
class TestCrossCorrelationViewer(TransactionalTestCase):
    """
    Unit-tests for Cross Correlation Viewer.
    """
    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        assert self.connectivity is not None

    def transactional_teardown_method(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        ccorr = self.datatypeFactory.create_crosscorrelation(time_series)
        viewer = CrossCorrelationVisualizer()
        result = viewer.launch(ccorr)
        expected_keys = [
            'matrix_shape', 'matrix_data', 'mainContent', 'isAdapter'
        ]
        for key in expected_keys:
            assert key in result
Пример #17
0
class TimeSeriesTest(TransactionalTestCase):
    """
    Unit-tests for Time Series Viewer.
    """
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        timeseries = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = TimeSeries()
        result = viewer.launch(timeseries)
        expected_keys = [
            't0', 'shape', 'preview', 'labelsStateVar', 'labelsModes',
            'mainContent', 'labels', 'labels_json', 'figsize', 'dt'
        ]
        for key in expected_keys:
            self.assertTrue(key in result)
Пример #18
0
class TestCovarianceViewer(TransactionalTestCase):
    """
    Unit-tests for Covariance Viewer.
    """


    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        assert self.connectivity is not None


    def transactional_teardown_method(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)


    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        covariance = self.datatypeFactory.create_covariance(time_series)
        viewer = CovarianceVisualizer()
        result = viewer.launch(covariance)
        expected_keys = ['matrix_shape', 'matrix_data', 'mainContent', 'isAdapter']
        for key in expected_keys:
            assert (key in result)
class NetworkxImporterTest(TransactionalTestCase):
    """
    Unit-tests for Obj Surface importer.
    """

    upload_file = os.path.join(os.path.dirname(__file__), "test_data", 'connectome_83.gpickle')


    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def tearDown(self):
        FilesHelper().remove_project_structure(self.test_project.name)


    def test_import(self):

        count_before = self.count_all_entities(Connectivity)
        self.assertEqual(0, count_before)

        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.networkx_importer', 'NetworkxConnectivityImporter')
        importer = ABCAdapter.build_adapter(group)

        args = {'data_file': self.upload_file,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        count_after = self.count_all_entities(Connectivity)
        self.assertEqual(1, count_after)

        conn = self.get_all_entities(Connectivity)[0]
        self.assertEqual(83, conn.number_of_regions)
Пример #20
0
class TestNetworkxImporter(TransactionalTestCase):
    """
    Unit-tests for Obj Surface importer.
    """

    upload_file = os.path.join(os.path.dirname(__file__), "test_data", 'connectome_83.gpickle')


    def transactional_setup_method(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def transactional_teardown_method(self):
        FilesHelper().remove_project_structure(self.test_project.name)


    def test_import(self):

        count_before = self.count_all_entities(Connectivity)
        assert 0  == count_before

        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.networkx_importer',
                                              'NetworkxConnectivityImporter')
        args = {'data_file': self.upload_file,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        count_after = self.count_all_entities(Connectivity)
        assert 1 == count_after

        conn = self.get_all_entities(Connectivity)[0]
        assert 83 == conn.number_of_regions
class CrossCoherenceViewerTest(TransactionalTestCase):
    """
    Unit-tests for Cross Coherence Viewer.
    """
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
        
        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        self.assertTrue(self.connectivity is not None)

                
    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)
    
    
    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        cross_coherence = self.datatypeFactory.create_crosscoherence(time_series)
        viewer = CrossCoherenceVisualizer()
        result = viewer.launch(cross_coherence)
        expected_keys = ['matrix_data', 'matrix_shape', 'matrix_strides', 'frequency']
        for key in expected_keys:
            self.assertTrue(key in result)
Пример #22
0
class SensorsImporterTest(TransactionalTestCase):
    """
    Unit-tests for Sensors importer.
    """
    EEG_FILE = os.path.join(os.path.dirname(demo_data.__file__),
                            'EEG_unit_vectors_BrainProducts_62.txt.bz2')
    MEG_FILE = os.path.join(os.path.dirname(demo_data.__file__),
                            'meg_channels_reg13.txt.bz2')

    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project and a `Sensors_Importer`
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
        self.importer = Sensors_Importer()

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def _import(self, import_file_path, sensors_type, expected_data):
        """
        This method is used for importing sensors
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.sensors_importer',
                               'Sensors_Importer')
        importer = ABCAdapter.build_adapter(group)

        args = {'sensors_file': import_file_path, 'sensors_type': sensors_type}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(
            self.test_project.id,
            expected_data.module + "." + expected_data.type)[0]
        self.assertEqual(
            1, len(data_types),
            "Project should contain only one data type = Sensors.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None,
                        "Sensors instance should not be none")

        return time_series

    def test_import_eeg_sensors(self):
        """
        This method tests import of a file containing EEG sensors.
        """
        eeg_sensors = self._import(self.EEG_FILE, self.importer.EEG_SENSORS,
                                   SensorsEEG())

        expected_size = 62
        self.assertTrue(eeg_sensors.labels is not None)
        self.assertEqual(expected_size, len(eeg_sensors.labels))
        self.assertEqual(expected_size, len(eeg_sensors.locations))
        self.assertEqual((expected_size, 3), eeg_sensors.locations.shape)
        self.assertEqual(expected_size, eeg_sensors.number_of_sensors)

    def test_import_meg_sensors(self):
        """
        This method tests import of a file containing MEG sensors.
        """
        meg_sensors = self._import(self.MEG_FILE, self.importer.MEG_SENSORS,
                                   SensorsMEG())

        expected_size = 151
        self.assertTrue(meg_sensors.labels is not None)
        self.assertEqual(expected_size, len(meg_sensors.labels))
        self.assertEqual(expected_size, len(meg_sensors.locations))
        self.assertEqual((expected_size, 3), meg_sensors.locations.shape)
        self.assertEqual(expected_size, meg_sensors.number_of_sensors)
        self.assertTrue(meg_sensors.has_orientation)
        self.assertEqual(expected_size, len(meg_sensors.orientations))
        self.assertEqual((expected_size, 3), meg_sensors.orientations.shape)

    def test_import_meg_without_orientation(self):
        """
        This method tests import of a file without orientation.
        """
        try:
            self._import(self.EEG_FILE, self.importer.MEG_SENSORS,
                         SensorsMEG())
            self.fail(
                "Import should fail in case of a MEG import without orientation."
            )
        except OperationException:
            # Expected exception
            pass

    def test_import_internal_sensors(self):
        """
        This method tests import of a file containing internal sensors.
        """
        internal_sensors = self._import(self.EEG_FILE,
                                        self.importer.INTERNAL_SENSORS,
                                        SensorsInternal())

        expected_size = 62
        self.assertTrue(internal_sensors.labels is not None)
        self.assertEqual(expected_size, len(internal_sensors.labels))
        self.assertEqual(expected_size, len(internal_sensors.locations))
        self.assertEqual((expected_size, 3), internal_sensors.locations.shape)
        self.assertEqual(expected_size, internal_sensors.number_of_sensors)
Пример #23
0
class TestSensorsImporter(TransactionalTestCase):
    """
    Unit-tests for Sensors importer.
    """
    EEG_FILE = os.path.join(os.path.dirname(demo_data.__file__),
                            'eeg_unitvector_62.txt.bz2')
    MEG_FILE = os.path.join(os.path.dirname(demo_data.__file__),
                            'meg_151.txt.bz2')

    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project and a `Sensors_Importer`
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
        self.importer = Sensors_Importer()

    def transactional_teardown_method(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def _import(self, import_file_path, sensors_type, expected_data):
        """
        This method is used for importing sensors
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')

        args = {'sensors_file': import_file_path, 'sensors_type': sensors_type}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(
            self.test_project.id,
            expected_data.module + "." + expected_data.type)[0]
        assert 1 == len(
            data_types), "Project should contain only one data type = Sensors."

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert time_series is not None, "Sensors instance should not be none"

        return time_series

    def test_import_eeg_sensors(self):
        """
        This method tests import of a file containing EEG sensors.
        """
        eeg_sensors = self._import(self.EEG_FILE, self.importer.EEG_SENSORS,
                                   SensorsEEG())

        expected_size = 62
        assert eeg_sensors.labels is not None
        assert expected_size == len(eeg_sensors.labels)
        assert expected_size == len(eeg_sensors.locations)
        assert (expected_size, 3) == eeg_sensors.locations.shape
        assert expected_size == eeg_sensors.number_of_sensors

    def test_import_meg_sensors(self):
        """
        This method tests import of a file containing MEG sensors.
        """
        meg_sensors = self._import(self.MEG_FILE, self.importer.MEG_SENSORS,
                                   SensorsMEG())

        expected_size = 151
        assert meg_sensors.labels is not None
        assert expected_size == len(meg_sensors.labels)
        assert expected_size == len(meg_sensors.locations)
        assert (expected_size, 3) == meg_sensors.locations.shape
        assert expected_size == meg_sensors.number_of_sensors
        assert meg_sensors.has_orientation
        assert expected_size == len(meg_sensors.orientations)
        assert (expected_size, 3) == meg_sensors.orientations.shape

    def test_import_meg_without_orientation(self):
        """
        This method tests import of a file without orientation.
        """
        try:
            self._import(self.EEG_FILE, self.importer.MEG_SENSORS,
                         SensorsMEG())
            raise AssertionError(
                "Import should fail in case of a MEG import without orientation."
            )
        except OperationException:
            # Expected exception
            pass

    def test_import_internal_sensors(self):
        """
        This method tests import of a file containing internal sensors.
        """
        internal_sensors = self._import(self.EEG_FILE,
                                        self.importer.INTERNAL_SENSORS,
                                        SensorsInternal())

        expected_size = 62
        assert internal_sensors.labels is not None
        assert expected_size == len(internal_sensors.labels)
        assert expected_size == len(internal_sensors.locations)
        assert (expected_size, 3) == internal_sensors.locations.shape
        assert expected_size == internal_sensors.number_of_sensors
Пример #24
0
class SensorViewersTest(TransactionalTestCase):
    """
    Unit-tests for Sensors viewers.
    """

    EXPECTED_KEYS_INTERNAL = {'urlMeasurePoints': None, 'urlMeasurePointsLabels': None, 'noOfMeasurePoints': 103,
                              'minMeasure': 0, 'maxMeasure': 103, 'urlMeasure': None, 'shelfObject': None}

    EXPECTED_KEYS_EEG = EXPECTED_KEYS_INTERNAL.copy()
    EXPECTED_KEYS_EEG.update({'urlVertices': None, 'urlTriangles': None, 'urlLines': None, 'urlNormals': None,
                              'boundaryURL': '', 'urlAlphas': '', 'urlAlphasIndices': '',
                              'noOfMeasurePoints': 62, 'maxMeasure': 62})

    EXPECTED_KEYS_MEG = EXPECTED_KEYS_EEG.copy()
    EXPECTED_KEYS_MEG.update({'noOfMeasurePoints': 151, 'maxMeasure': 151})


    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        ## Import Shelf Face Object
        zip_path = os.path.join(os.path.dirname(surfaces_dataset.__file__), 'face_surface_old.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, FACE, True)


    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)


    def test_launch_EEG(self):
        """
        Check that all required keys are present in output from EegSensorViewer launch.
        """
        ## Import Sensors
        zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__), 'EEG_unit_vectors_BrainProducts_62.txt.bz2')
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path, Sensors_Importer.EEG_SENSORS)
        sensors = TestFactory.get_entity(self.test_project, SensorsEEG())

        ## Import EEGCap
        zip_path = os.path.join(os.path.dirname(surfaces_dataset.__file__), 'eeg_skin_surface.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, EEG_CAP, True)
        eeg_cap_surface = TestFactory.get_entity(self.test_project, EEGCap())

        viewer = EegSensorViewer()
        viewer.current_project_id = self.test_project.id

        ## Launch with EEG Cap selected
        result = viewer.launch(sensors, eeg_cap_surface)
        self.assert_compliant_dictionary(self.EXPECTED_KEYS_EEG, result)
        for key in ['urlVertices', 'urlTriangles', 'urlLines', 'urlNormals']:
            self.assertIsNotNone(result[key], "Value at key %s should not be None" % key)

        ## Launch without EEG Cap
        result = viewer.launch(sensors)
        self.assert_compliant_dictionary(self.EXPECTED_KEYS_EEG, result)
        for key in ['urlVertices', 'urlTriangles', 'urlLines', 'urlNormals']:
            self.assertTrue(not result[key] or result[key] == "[]",
                            "Value at key %s should be None or empty, but is %s" % (key, result[key]))


    def test_launch_MEG(self):
        """
        Check that all required keys are present in output from MEGSensorViewer launch.
        """

        zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__), 'meg_channels_reg13.txt.bz2')
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path, Sensors_Importer.MEG_SENSORS)
        sensors = TestFactory.get_entity(self.test_project, SensorsMEG())

        viewer = MEGSensorViewer()
        viewer.current_project_id = self.test_project.id

        result = viewer.launch(sensors)
        self.assert_compliant_dictionary(self.EXPECTED_KEYS_MEG, result)


    def test_launch_internal(self):
        """
        Check that all required keys are present in output from InternalSensorViewer launch.
        """
        zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__), 'internal_39.txt.bz2')
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path, Sensors_Importer.INTERNAL_SENSORS)
        sensors = TestFactory.get_entity(self.test_project, SensorsInternal())

        viewer = InternalSensorViewer()
        viewer.current_project_id = self.test_project.id

        result = viewer.launch(sensors)
        self.assert_compliant_dictionary(self.EXPECTED_KEYS_INTERNAL, result)
class SimulatorAdapterTest(TransactionalTestCase):
    """
    Basic testing that Simulator is still working from UI.
    """
    CONNECTIVITY_NODES = 74

    def setUp(self):
        """
        Reset the database before each test.
        """
        initialize_storage()
        self.datatypes_factory = DatatypesFactory()
        self.test_user = self.datatypes_factory.get_user()
        self.test_project = self.datatypes_factory.get_project()
        self.connectivity = self.datatypes_factory.create_connectivity(
            self.CONNECTIVITY_NODES)[1]

        algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE,
                                                SIMULATOR_CLASS)
        self.simulator_adapter = ABCAdapter.build_adapter(algorithm)
        self.operation = TestFactory.create_operation(
            algorithm, self.test_user, self.test_project, model.STATUS_STARTED,
            json.dumps(SIMULATOR_PARAMETERS))

        SIMULATOR_PARAMETERS['connectivity'] = self.connectivity.gid

    def test_happy_flow_launch(self):
        """
        Test that launching a simulation from UI works.
        """
        OperationService().initiate_prelaunch(self.operation,
                                              self.simulator_adapter, {},
                                              **SIMULATOR_PARAMETERS)
        sim_result = dao.get_generic_entity(TimeSeriesRegion,
                                            'TimeSeriesRegion', 'type')[0]
        self.assertEquals(sim_result.read_data_shape(),
                          (32, 1, self.CONNECTIVITY_NODES, 1))

    def _estimate_hdd(self, new_parameters_dict):
        """ Private method, to return HDD estimation for a given set of input parameters"""
        filtered_params = self.simulator_adapter.prepare_ui_inputs(
            new_parameters_dict)
        self.simulator_adapter.configure(**filtered_params)
        return self.simulator_adapter.get_required_disk_size(**filtered_params)

    def test_estimate_hdd(self):
        """
        Test that occupied HDD estimation for simulation results considers simulation length.
        """
        factor = 5
        simulation_parameters = copy(SIMULATOR_PARAMETERS)
        ## Estimate HDD with default simulation parameters
        estimate1 = self._estimate_hdd(simulation_parameters)
        self.assertTrue(estimate1 > 1)

        ## Change simulation length and monitor period, we expect a direct proportial increase in estimated HDD
        simulation_parameters['simulation_length'] = float(
            simulation_parameters['simulation_length']) * factor
        period = float(simulation_parameters[
            'monitors_parameters_option_TemporalAverage_period'])
        simulation_parameters[
            'monitors_parameters_option_TemporalAverage_period'] = period / factor
        estimate2 = self._estimate_hdd(simulation_parameters)
        self.assertEqual(estimate1, estimate2 / factor / factor)

        ## Change number of nodes in connectivity. Expect HDD estimation increase.
        large_conn_gid = self.datatypes_factory.create_connectivity(
            self.CONNECTIVITY_NODES * factor)[1].gid
        simulation_parameters['connectivity'] = large_conn_gid
        estimate3 = self._estimate_hdd(simulation_parameters)
        self.assertEqual(estimate2, estimate3 / factor)

    def test_estimate_execution_time(self):
        """
        Test that get_execution_time_approximation considers the correct params
        """
        ## Compute reference estimation
        params = self.simulator_adapter.prepare_ui_inputs(SIMULATOR_PARAMETERS)
        estimation1 = self.simulator_adapter.get_execution_time_approximation(
            **params)

        ## Estimation when the surface input parameter is set
        params['surface'] = "GID_surface"
        estimation2 = self.simulator_adapter.get_execution_time_approximation(
            **params)

        self.assertEqual(estimation1, estimation2 / 500)
        params['surface'] = ""

        ## Modify integration step and simulation length:
        initial_simulation_length = float(params['simulation_length'])
        initial_integration_step = float(params['integrator_parameters']['dt'])

        for factor in (2, 4, 10):
            params['simulation_length'] = initial_simulation_length * factor
            params['integrator_parameters'][
                'dt'] = initial_integration_step / factor

            estimation3 = self.simulator_adapter.get_execution_time_approximation(
                **params)

            self.assertEqual(estimation1, estimation3 / factor / factor)

        ## Check that no division by zero happens
        params['integrator_parameters']['dt'] = 0
        estimation4 = self.simulator_adapter.get_execution_time_approximation(
            **params)
        self.assertTrue(estimation4 > 0)

        ## even with length zero, still a positive estimation should be returned
        params['simulation_length'] = 0
        estimation5 = self.simulator_adapter.get_execution_time_approximation(
            **params)
        self.assertTrue(estimation5 > 0)

    def test_noise_2d_bad_shape(self):
        """
        Test a simulation with noise. Pass a wrong shape and expect exception to be raised.
        """
        params = copy(SIMULATOR_PARAMETERS)
        params['integrator'] = u'HeunStochastic'
        noise_4d_config = [[1 for _ in range(self.CONNECTIVITY_NODES)]
                           for _ in range(4)]
        params[
            'integrator_parameters_option_HeunStochastic_dt'] = u'0.01220703125'
        params[
            'integrator_parameters_option_HeunStochastic_noise'] = u'Additive'
        params[
            'integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_nsig'] = str(
                noise_4d_config)
        params[
            'integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_ntau'] = u'0.0'
        params[
            'integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_random_stream'] = u'RandomStream'
        params[
            'integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_random_stream_parameters_option_RandomStream_init_seed'] = u'42'
        filtered_params = self.simulator_adapter.prepare_ui_inputs(params)
        self.simulator_adapter.configure(**filtered_params)
        if hasattr(self.simulator_adapter, 'algorithm'):
            self.assertEqual(
                (4, 74),
                self.simulator_adapter.algorithm.integrator.noise.nsig.shape)
        else:
            self.fail("Simulator adapter was not initialized properly")
        self.assertRaises(Exception,
                          OperationService().initiate_prelaunch,
                          self.operation, self.simulator_adapter, {}, **params)

    def test_noise_2d_happy_flow(self):
        """
        Test a simulation with noise.
        """
        params = copy(SIMULATOR_PARAMETERS)
        params['integrator'] = u'HeunStochastic'
        noise_2d_config = [[1 for _ in range(self.CONNECTIVITY_NODES)]
                           for _ in range(2)]
        params[
            'integrator_parameters_option_HeunStochastic_dt'] = u'0.01220703125'
        params[
            'integrator_parameters_option_HeunStochastic_noise'] = u'Additive'
        params[
            'integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_nsig'] = str(
                noise_2d_config)
        params[
            'integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_ntau'] = u'0.0'
        params[
            'integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_random_stream'] = u'RandomStream'
        params[
            'integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_random_stream_parameters_option_RandomStream_init_seed'] = u'42'

        self._launch_and_check_noise(params, (2, 74))

        sim_result = dao.get_generic_entity(TimeSeriesRegion,
                                            'TimeSeriesRegion', 'type')[0]
        self.assertEquals(sim_result.read_data_shape(),
                          (32, 1, self.CONNECTIVITY_NODES, 1))

        params[
            'integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_nsig'] = '[1]'
        self._launch_and_check_noise(params, (1, ))

    def _launch_and_check_noise(self, params, expected_noise_shape):

        filtered_params = self.simulator_adapter.prepare_ui_inputs(params)
        self.simulator_adapter.configure(**filtered_params)

        if hasattr(self.simulator_adapter, 'algorithm'):
            self.assertEqual(
                expected_noise_shape,
                self.simulator_adapter.algorithm.integrator.noise.nsig.shape)
        else:
            self.fail("Simulator adapter was not initialized properly")

        OperationService().initiate_prelaunch(self.operation,
                                              self.simulator_adapter, {},
                                              **params)

    def test_simulation_with_stimulus(self):
        """
        Test a simulation with noise.
        """
        params = copy(SIMULATOR_PARAMETERS)
        params["stimulus"] = self.datatypes_factory.create_stimulus(
            self.connectivity).gid

        filtered_params = self.simulator_adapter.prepare_ui_inputs(params)
        self.simulator_adapter.configure(**filtered_params)
        OperationService().initiate_prelaunch(self.operation,
                                              self.simulator_adapter, {},
                                              **params)
Пример #26
0
class NIFTIImporterTest(TransactionalTestCase):
    """
    Unit-tests for NIFTI importer.
    """

    NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii')
    GZ_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii.gz')
    TIMESERIES_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'time_series_152.nii.gz')
    WRONG_NII_FILE = os.path.abspath(__file__)

    DEFAULT_ORIGIN = [[0.0, 0.0, 0.0]]
    UNKNOWN_STR = "unknown"


    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)


    def _import(self, import_file_path=None, expected_result_class=StructuralMRI, connectivity=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.nifti_importer', 'NIFTIImporter')
        args = {'data_file': import_file_path, DataTypeMetaData.KEY_SUBJECT: "bla bla",
                'apply_corrections': True, 'connectivity': connectivity}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        dts, count = dao.get_values_of_datatype(self.test_project.id, expected_result_class, None)
        self.assertEqual(1, count, "Project should contain only one data type.")

        result = ABCAdapter.load_entity_by_gid(dts[0][2])
        self.assertTrue(result is not None, "Result should not be none")
        return result


    def test_import_demo_ts(self):
        """
        This method tests import of a NIFTI file.
        """
        time_series = self._import(self.TIMESERIES_NII_FILE, TimeSeriesVolume)

        # Since self.assertAlmostEquals is not available on all machine
        # We compare floats as following
        self.assertTrue(abs(1.0 - time_series.sample_period) <= 0.001)
        self.assertEqual("sec", str(time_series.sample_period_unit))
        self.assertEqual(0.0, time_series.start_time)
        self.assertTrue(time_series.title.startswith("NIFTI"))

        data_shape = time_series.read_data_shape()
        self.assertEquals(4, len(data_shape))
        # We have 5 time points
        self.assertEqual(5, data_shape[0])
        dimension_labels = time_series.labels_ordering
        self.assertTrue(dimension_labels is not None)
        self.assertEquals(4, len(dimension_labels))

        volume = time_series.volume
        self.assertTrue(volume is not None)
        self.assertTrue(numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all())
        self.assertEquals("mm", volume.voxel_unit)


    def test_import_nii_without_time_dimension(self):
        """
        This method tests import of a NIFTI file.
        """
        structure = self._import(self.NII_FILE)
        self.assertEqual("T1", structure.weighting)

        data_shape = structure.array_data.shape
        self.assertEquals(3, len(data_shape))
        self.assertEqual(64, data_shape[0])
        self.assertEqual(64, data_shape[1])
        self.assertEqual(10, data_shape[2])

        volume = structure.volume
        self.assertTrue(volume is not None)
        self.assertTrue(numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all())
        self.assertTrue(numpy.equal([3.0, 3.0, 3.0], volume.voxel_size).all())
        self.assertEquals(self.UNKNOWN_STR, volume.voxel_unit)


    def test_import_nifti_compressed(self):
        """
        This method tests import of a NIFTI file compressed in GZ format.
        """
        structure = self._import(self.GZ_NII_FILE)
        self.assertEqual("T1", structure.weighting)


    def test_import_region_mapping(self):
        """
        This method tests import of a NIFTI file compressed in GZ format.
        """
        to_link_conn = self.datatypeFactory.create_connectivity()[1]
        mapping = self._import(self.GZ_NII_FILE, RegionVolumeMapping, to_link_conn.gid)

        self.assertTrue(-1 <= mapping.array_data.min())
        self.assertTrue(mapping.array_data.max() < to_link_conn.number_of_regions)

        conn = mapping.connectivity
        self.assertTrue(conn is not None)
        self.assertEquals(to_link_conn.number_of_regions, conn.number_of_regions)

        volume = mapping.volume
        self.assertTrue(volume is not None)
        self.assertTrue(numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all())
        self.assertTrue(numpy.equal([3.0, 3.0, 3.0], volume.voxel_size).all())
        self.assertEquals(self.UNKNOWN_STR, volume.voxel_unit)



    def test_import_wrong_nii_file(self):
        """ 
        This method tests import of a file in a wrong format
        """
        try:
            self._import(self.WRONG_NII_FILE)
            self.fail("Import should fail in case of a wrong NIFTI format.")
        except OperationException:
            # Expected exception
            pass
Пример #27
0
class BrainViewerTest(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """

    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity, a cortical surface and a face surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_user = self.datatypeFactory.get_user()
        self.test_project = TestFactory.import_default_project(self.test_user)
        self.datatypeFactory.project = self.test_project

        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.face_surface = TestFactory.get_entity(self.test_project, FaceSurface())
        self.assertTrue(self.face_surface is not None)
        self.assertTrue(TestFactory.get_entity(self.test_project, EEGCap()) is not None)


    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)
    
    
    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = BrainViewer()
        viewer.current_project_id = self.test_project.id
        result = viewer.launch(time_series=time_series)
        expected_keys = ['urlVertices', 'urlNormals', 'urlTriangles', 'urlMeasurePointsLabels', 'title', 
                         'time_series', 'shelfObject', 'pageSize', 'labelsStateVar', 'labelsModes',
                         'minActivityLabels', 'minActivity', 'measure_points', 'maxActivity', 'isOneToOneMapping',
                         'isAdapter', 'extended_view', 'base_activity_url', 'alphas_indices']
        for key in expected_keys:
            self.assertTrue(key in result and result[key] is not None)
        self.assertFalse(result['extended_view'])

    
    def test_get_required_memory(self):
        """
        Brainviewer should know required memory so expect positive number and not -1.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        self.assertTrue(BrainViewer().get_required_memory_size(time_series) > 0)
        
        
    def test_generate_preview(self):
        """
        Check that all required keys are present in preview generate by BrainViewer.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = BrainViewer()
        result = viewer.generate_preview(time_series, (500, 200))
        expected_keys = ['urlVertices', 'urlNormals', 'urlTriangles', 'minActivity', 'maxActivity',
                         'isOneToOneMapping', 'isAdapter', 'base_activity_url', 'alphas_indices']
        for key in expected_keys:
            self.assertTrue(key in result and result[key] is not None)
        
        
    def test_launch_eeg(self):
        """
        Tests successful launch of a BrainEEG and that all required keys are present in returned template dictionary
        """
        sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
        time_series = self.datatypeFactory.create_timeseries(self.connectivity, 'EEG', sensors)
        time_series.configure()
        viewer = BrainEEG()
        viewer.current_project_id = self.test_project.id
        result = viewer.launch(time_series)
        expected_keys = ['urlVertices', 'urlNormals', 'urlTriangles', 'urlMeasurePointsLabels', 'title', 
                         'time_series', 'shelfObject', 'pageSize', 'labelsStateVar', 'labelsModes',
                         'minActivityLabels', 'minActivity', 'measure_points', 'maxActivity', 'isOneToOneMapping',
                         'isAdapter', 'extended_view', 'base_activity_url', 'alphas_indices']
        for key in expected_keys:
            self.assertTrue(key in result and result[key] is not None)
        self.assertTrue(result['extended_view'])
class SimulatorAdapterTest(TransactionalTestCase):
    """
    Basic testing that Simulator is still working from UI.
    """
    CONNECTIVITY_NODES = 74

    def setUp(self):
        """
        Reset the database before each test.
        """
        initialize_storage()
        self.datatypes_factory = DatatypesFactory()
        self.test_user = self.datatypes_factory.get_user()
        self.test_project = self.datatypes_factory.get_project()
        self.connectivity = self.datatypes_factory.create_connectivity(self.CONNECTIVITY_NODES)[1]

        algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS)
        self.simulator_adapter = ABCAdapter.build_adapter(algorithm)
        self.operation = TestFactory.create_operation(algorithm, self.test_user, self.test_project,
                                                      model.STATUS_STARTED, json.dumps(SIMULATOR_PARAMETERS))

        SIMULATOR_PARAMETERS['connectivity'] = self.connectivity.gid


    def test_happy_flow_launch(self):
        """
        Test that launching a simulation from UI works.
        """
        OperationService().initiate_prelaunch(self.operation, self.simulator_adapter, {}, **SIMULATOR_PARAMETERS)
        sim_result = dao.get_generic_entity(TimeSeriesRegion, 'TimeSeriesRegion', 'type')[0]
        self.assertEquals(sim_result.read_data_shape(), (32, 1, self.CONNECTIVITY_NODES, 1))


    def _estimate_hdd(self, new_parameters_dict):
        """ Private method, to return HDD estimation for a given set of input parameters"""
        filtered_params = self.simulator_adapter.prepare_ui_inputs(new_parameters_dict)
        self.simulator_adapter.configure(**filtered_params)
        return self.simulator_adapter.get_required_disk_size(**filtered_params)


    def test_estimate_hdd(self):
        """
        Test that occupied HDD estimation for simulation results considers simulation length.
        """
        factor = 5
        simulation_parameters = copy(SIMULATOR_PARAMETERS)
        ## Estimate HDD with default simulation parameters
        estimate1 = self._estimate_hdd(simulation_parameters)
        self.assertTrue(estimate1 > 1)

        ## Change simulation length and monitor period, we expect a direct proportial increase in estimated HDD
        simulation_parameters['simulation_length'] = float(simulation_parameters['simulation_length']) * factor
        period = float(simulation_parameters['monitors_parameters_option_TemporalAverage_period'])
        simulation_parameters['monitors_parameters_option_TemporalAverage_period'] = period / factor
        estimate2 = self._estimate_hdd(simulation_parameters)
        self.assertEqual(estimate1, estimate2 / factor / factor)

        ## Change number of nodes in connectivity. Expect HDD estimation increase.
        large_conn_gid = self.datatypes_factory.create_connectivity(self.CONNECTIVITY_NODES * factor)[1].gid
        simulation_parameters['connectivity'] = large_conn_gid
        estimate3 = self._estimate_hdd(simulation_parameters)
        self.assertEqual(estimate2, estimate3 / factor)


    def test_estimate_execution_time(self):
        """
        Test that get_execution_time_approximation considers the correct params
        """
        ## Compute reference estimation
        params = self.simulator_adapter.prepare_ui_inputs(SIMULATOR_PARAMETERS)
        estimation1 = self.simulator_adapter.get_execution_time_approximation(**params)

        ## Estimation when the surface input parameter is set
        params['surface'] = "GID_surface"
        estimation2 = self.simulator_adapter.get_execution_time_approximation(**params)

        self.assertEqual(estimation1, estimation2 / 500)
        params['surface'] = ""

        ## Modify integration step and simulation length:
        initial_simulation_length = float(params['simulation_length'])
        initial_integration_step = float(params['integrator_parameters']['dt'])

        for factor in (2, 4, 10):
            params['simulation_length'] = initial_simulation_length * factor
            params['integrator_parameters']['dt'] = initial_integration_step / factor

            estimation3 = self.simulator_adapter.get_execution_time_approximation(**params)

            self.assertEqual(estimation1, estimation3 / factor / factor)

        ## Check that no division by zero happens
        params['integrator_parameters']['dt'] = 0
        estimation4 = self.simulator_adapter.get_execution_time_approximation(**params)
        self.assertTrue(estimation4 > 0)

        ## even with length zero, still a positive estimation should be returned
        params['simulation_length'] = 0
        estimation5 = self.simulator_adapter.get_execution_time_approximation(**params)
        self.assertTrue(estimation5 > 0)


    def test_noise_2d_bad_shape(self):
        """
        Test a simulation with noise. Pass a wrong shape and expect exception to be raised.
        """
        params = copy(SIMULATOR_PARAMETERS)
        params['integrator'] = u'HeunStochastic'
        noise_4d_config = [[1 for _ in xrange(self.CONNECTIVITY_NODES)] for _ in xrange(4)]
        params['integrator_parameters_option_HeunStochastic_dt'] = u'0.01220703125'
        params['integrator_parameters_option_HeunStochastic_noise'] = u'Additive'
        params['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_nsig'] = str(noise_4d_config)
        params['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_ntau'] = u'0.0'
        params['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_random_stream'] = u'RandomStream'
        params['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_random_stream_parameters_option_RandomStream_init_seed'] = u'42'
        filtered_params = self.simulator_adapter.prepare_ui_inputs(params)
        self.simulator_adapter.configure(**filtered_params)
        if hasattr(self.simulator_adapter, 'algorithm'):
            self.assertEqual((4, 74), self.simulator_adapter.algorithm.integrator.noise.nsig.shape)
        else:
            self.fail("Simulator adapter was not initialized properly")
        self.assertRaises(Exception, OperationService().initiate_prelaunch, self.operation,
                          self.simulator_adapter, {}, **params)


    def test_noise_2d_happy_flow(self):
        """
        Test a simulation with noise.
        """
        params = copy(SIMULATOR_PARAMETERS)
        params['integrator'] = u'HeunStochastic'
        noise_2d_config = [[1 for _ in xrange(self.CONNECTIVITY_NODES)] for _ in xrange(2)]
        params['integrator_parameters_option_HeunStochastic_dt'] = u'0.01220703125'
        params['integrator_parameters_option_HeunStochastic_noise'] = u'Additive'
        params['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_nsig'] = str(noise_2d_config)
        params['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_ntau'] = u'0.0'
        params['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_random_stream'] = u'RandomStream'
        params['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_random_stream_parameters_option_RandomStream_init_seed'] = u'42'

        self._launch_and_check_noise(params, (2, 74))

        sim_result = dao.get_generic_entity(TimeSeriesRegion, 'TimeSeriesRegion', 'type')[0]
        self.assertEquals(sim_result.read_data_shape(), (32, 1, self.CONNECTIVITY_NODES, 1))

        params['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_nsig'] = '[1]'
        self._launch_and_check_noise(params, (1,))


    def _launch_and_check_noise(self, params, expected_noise_shape):

        filtered_params = self.simulator_adapter.prepare_ui_inputs(params)
        self.simulator_adapter.configure(**filtered_params)

        if hasattr(self.simulator_adapter, 'algorithm'):
            self.assertEqual(expected_noise_shape, self.simulator_adapter.algorithm.integrator.noise.nsig.shape)
        else:
            self.fail("Simulator adapter was not initialized properly")

        OperationService().initiate_prelaunch(self.operation, self.simulator_adapter, {}, **params)


    def test_simulation_with_stimulus(self):
        """
        Test a simulation with noise.
        """
        params = copy(SIMULATOR_PARAMETERS)
        params["stimulus"] = self.datatypes_factory.create_stimulus(self.connectivity).gid

        filtered_params = self.simulator_adapter.prepare_ui_inputs(params)
        self.simulator_adapter.configure(**filtered_params)
        OperationService().initiate_prelaunch(self.operation, self.simulator_adapter, {}, **params)
Пример #29
0
class TestBrainViewer(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """

    EXPECTED_KEYS = [
        'urlVertices', 'urlNormals', 'urlTriangles', 'urlLines',
        'urlRegionMap', 'base_activity_url', 'isOneToOneMapping',
        'minActivity', 'maxActivity', 'noOfMeasurePoints', 'isAdapter'
    ]
    EXPECTED_EXTRA_KEYS = [
        'urlMeasurePointsLabels', 'urlMeasurePoints', 'time_series',
        'pageSize', 'shelfObject', 'extended_view', 'legendLabels',
        'labelsStateVar', 'labelsModes', 'title'
    ]

    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity, a cortical surface and a face surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_user = self.datatypeFactory.get_user()
        self.test_project = TestFactory.import_default_project(self.test_user)
        self.datatypeFactory.project = self.test_project

        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        assert self.connectivity is not None
        self.face_surface = TestFactory.get_entity(self.test_project,
                                                   FaceSurface())
        assert self.face_surface is not None
        assert TestFactory.get_entity(self.test_project, EEGCap()) is not None

    def transactional_teardown_method(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = BrainViewer()
        viewer.current_project_id = self.test_project.id
        result = viewer.launch(time_series=time_series)

        for key in TestBrainViewer.EXPECTED_KEYS + TestBrainViewer.EXPECTED_EXTRA_KEYS:
            assert key in result and result[key] is not None
        assert not result['extended_view']

    def test_get_required_memory(self):
        """
        Brainviewer should know required memory so expect positive number and not -1.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        assert BrainViewer().get_required_memory_size(time_series) > 0

    def test_generate_preview(self):
        """
        Check that all required keys are present in preview generate by BrainViewer.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = BrainViewer()
        result = viewer.generate_preview(time_series, figure_size=(500, 200))
        for key in TestBrainViewer.EXPECTED_KEYS:
            assert key in result and result[key] is not None, key

    def test_launch_eeg(self):
        """
        Tests successful launch of a BrainEEG and that all required keys are present in returned template dictionary
        """
        sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
        time_series = self.datatypeFactory.create_timeseries(
            self.connectivity, 'EEG', sensors)
        time_series.configure()
        viewer = DualBrainViewer()
        viewer.current_project_id = self.test_project.id
        result = viewer.launch(time_series)
        for key in TestBrainViewer.EXPECTED_KEYS + TestBrainViewer.EXPECTED_EXTRA_KEYS:
            assert key in result and result[key] is not None
        assert result['extended_view']
Пример #30
0
class TestSensorViewers(TransactionalTestCase):
    """
    Unit-tests for Sensors viewers.
    """

    EXPECTED_KEYS_INTERNAL = {'urlMeasurePoints': None, 'urlMeasurePointsLabels': None, 'noOfMeasurePoints': 103,
                              'minMeasure': 0, 'maxMeasure': 103, 'urlMeasure': None, 'shelfObject': None}

    EXPECTED_KEYS_EEG = EXPECTED_KEYS_INTERNAL.copy()
    EXPECTED_KEYS_EEG.update({'urlVertices': None, 'urlTriangles': None, 'urlLines': None, 'urlNormals': None,
                              'noOfMeasurePoints': 62, 'maxMeasure': 62})

    EXPECTED_KEYS_MEG = EXPECTED_KEYS_EEG.copy()
    EXPECTED_KEYS_MEG.update({'noOfMeasurePoints': 151, 'maxMeasure': 151})

    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.factory = DatatypesFactory()
        self.test_project = self.factory.get_project()
        self.test_user = self.factory.get_user()

        ## Import Shelf Face Object
        face_path = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'face_surface.obj')
        TestFactory.import_surface_obj(self.test_user, self.test_project, face_path, FACE)

    def transactional_teardown_method(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch_eeg(self):
        """
        Check that all required keys are present in output from EegSensorViewer launch.
        """
        ## Import Sensors
        zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'eeg_unitvector_62.txt.bz2')
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path, Sensors_Importer.EEG_SENSORS)
        sensors = TestFactory.get_entity(self.test_project, SensorsEEG())

        ## Import EEGCap
        cap_path = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'eeg_cap.obj')
        TestFactory.import_surface_obj(self.test_user, self.test_project, cap_path, EEG_CAP)
        eeg_cap_surface = TestFactory.get_entity(self.test_project, EEGCap())

        viewer = SensorsViewer()
        viewer.current_project_id = self.test_project.id

        ## Launch with EEG Cap selected
        result = viewer.launch(sensors, eeg_cap_surface)
        self.assert_compliant_dictionary(self.EXPECTED_KEYS_EEG, result)
        for key in ['urlVertices', 'urlTriangles', 'urlLines', 'urlNormals']:
            assert result[key] is not None, "Value at key %s should not be None" % key

        ## Launch without EEG Cap
        result = viewer.launch(sensors)
        self.assert_compliant_dictionary(self.EXPECTED_KEYS_EEG, result)
        for key in ['urlVertices', 'urlTriangles', 'urlLines', 'urlNormals']:
            assert not result[key] or result[key] == "[]", "Value at key %s should be None or empty, " \
                                                           "but is %s" % (key, result[key])

    def test_launch_meg(self):
        """
        Check that all required keys are present in output from MEGSensorViewer launch.
        """

        zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'meg_151.txt.bz2')
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path, Sensors_Importer.MEG_SENSORS)
        sensors = TestFactory.get_entity(self.test_project, SensorsMEG())

        viewer = SensorsViewer()
        viewer.current_project_id = self.test_project.id

        result = viewer.launch(sensors)
        self.assert_compliant_dictionary(self.EXPECTED_KEYS_MEG, result)

    def test_launch_internal(self):
        """
        Check that all required keys are present in output from InternalSensorViewer launch.
        """
        zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'seeg_39.txt.bz2')
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path, Sensors_Importer.INTERNAL_SENSORS)
        sensors = TestFactory.get_entity(self.test_project, SensorsInternal())

        viewer = SensorsViewer()
        viewer.current_project_id = self.test_project.id

        result = viewer.launch(sensors)
        self.assert_compliant_dictionary(self.EXPECTED_KEYS_INTERNAL, result)
Пример #31
0
class SensorViewersTest(TransactionalTestCase):
    """
    Unit-tests for Sensors viewers.
    """

    EXPECTED_KEYS_INTERNAL = {
        'urlMeasurePoints': None,
        'urlMeasurePointsLabels': None,
        'noOfMeasurePoints': 103,
        'minMeasure': 0,
        'maxMeasure': 103,
        'urlMeasure': None,
        'shelfObject': None
    }

    EXPECTED_KEYS_EEG = EXPECTED_KEYS_INTERNAL.copy()
    EXPECTED_KEYS_EEG.update({
        'urlVertices': None,
        'urlTriangles': None,
        'urlLines': None,
        'urlNormals': None,
        'noOfMeasurePoints': 62,
        'maxMeasure': 62
    })

    EXPECTED_KEYS_MEG = EXPECTED_KEYS_EEG.copy()
    EXPECTED_KEYS_MEG.update({'noOfMeasurePoints': 151, 'maxMeasure': 151})

    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        ## Import Shelf Face Object
        face_path = os.path.join(os.path.dirname(tvb_data.obj.__file__),
                                 'face_surface.obj')
        TestFactory.import_surface_obj(self.test_user, self.test_project,
                                       face_path, FACE)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch_EEG(self):
        """
        Check that all required keys are present in output from EegSensorViewer launch.
        """
        ## Import Sensors
        zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__),
                                'EEG_unit_vectors_BrainProducts_62.txt.bz2')
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path,
                                   Sensors_Importer.EEG_SENSORS)
        sensors = TestFactory.get_entity(self.test_project, SensorsEEG())

        ## Import EEGCap
        cap_path = os.path.join(os.path.dirname(tvb_data.obj.__file__),
                                'eeg_cap.obj')
        TestFactory.import_surface_obj(self.test_user, self.test_project,
                                       cap_path, EEG_CAP)
        eeg_cap_surface = TestFactory.get_entity(self.test_project, EEGCap())

        viewer = SensorsViewer()
        viewer.current_project_id = self.test_project.id

        ## Launch with EEG Cap selected
        result = viewer.launch(sensors, eeg_cap_surface)
        self.assert_compliant_dictionary(self.EXPECTED_KEYS_EEG, result)
        for key in ['urlVertices', 'urlTriangles', 'urlLines', 'urlNormals']:
            self.assertIsNotNone(result[key],
                                 "Value at key %s should not be None" % key)

        ## Launch without EEG Cap
        result = viewer.launch(sensors)
        self.assert_compliant_dictionary(self.EXPECTED_KEYS_EEG, result)
        for key in ['urlVertices', 'urlTriangles', 'urlLines', 'urlNormals']:
            self.assertTrue(
                not result[key] or result[key] == "[]",
                "Value at key %s should be None or empty, but is %s" %
                (key, result[key]))

    def test_launch_MEG(self):
        """
        Check that all required keys are present in output from MEGSensorViewer launch.
        """

        zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__),
                                'meg_channels_reg13.txt.bz2')
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path,
                                   Sensors_Importer.MEG_SENSORS)
        sensors = TestFactory.get_entity(self.test_project, SensorsMEG())

        viewer = SensorsViewer()
        viewer.current_project_id = self.test_project.id

        result = viewer.launch(sensors)
        self.assert_compliant_dictionary(self.EXPECTED_KEYS_MEG, result)

    def test_launch_internal(self):
        """
        Check that all required keys are present in output from InternalSensorViewer launch.
        """
        zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__),
                                'internal_39.txt.bz2')
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path,
                                   Sensors_Importer.INTERNAL_SENSORS)
        sensors = TestFactory.get_entity(self.test_project, SensorsInternal())

        viewer = SensorsViewer()
        viewer.current_project_id = self.test_project.id

        result = viewer.launch(sensors)
        self.assert_compliant_dictionary(self.EXPECTED_KEYS_INTERNAL, result)
Пример #32
0
class TestBrainViewer(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """

    EXPECTED_KEYS = ['urlVertices', 'urlNormals', 'urlTriangles', 'urlLines', 'urlRegionMap',
                     'base_activity_url', 'isOneToOneMapping', 'minActivity', 'maxActivity',
                     'noOfMeasurePoints', 'isAdapter']
    EXPECTED_EXTRA_KEYS = ['urlMeasurePointsLabels', 'urlMeasurePoints', 'time_series', 'pageSize', 'shelfObject',
                           'extended_view', 'legendLabels', 'labelsStateVar', 'labelsModes', 'title']


    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity, a cortical surface and a face surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_user = self.datatypeFactory.get_user()
        self.test_project = TestFactory.import_default_project(self.test_user)
        self.datatypeFactory.project = self.test_project

        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        assert self.connectivity is not None
        self.face_surface = TestFactory.get_entity(self.test_project, FaceSurface())
        assert self.face_surface is not None
        assert TestFactory.get_entity(self.test_project, EEGCap()) is not None


    def transactional_teardown_method(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)
    
    
    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = BrainViewer()
        viewer.current_project_id = self.test_project.id
        result = viewer.launch(time_series=time_series)

        for key in TestBrainViewer.EXPECTED_KEYS + TestBrainViewer.EXPECTED_EXTRA_KEYS:
            assert key in result and result[key] is not None
        assert not result['extended_view']

    
    def test_get_required_memory(self):
        """
        Brainviewer should know required memory so expect positive number and not -1.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        assert BrainViewer().get_required_memory_size(time_series) > 0
        
        
    def test_generate_preview(self):
        """
        Check that all required keys are present in preview generate by BrainViewer.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = BrainViewer()
        result = viewer.generate_preview(time_series, figure_size=(500, 200))
        for key in TestBrainViewer.EXPECTED_KEYS:
            assert key in result and result[key] is not None, key
        
        
    def test_launch_eeg(self):
        """
        Tests successful launch of a BrainEEG and that all required keys are present in returned template dictionary
        """
        sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
        time_series = self.datatypeFactory.create_timeseries(self.connectivity, 'EEG', sensors)
        time_series.configure()
        viewer = DualBrainViewer()
        viewer.current_project_id = self.test_project.id
        result = viewer.launch(time_series)
        for key in TestBrainViewer.EXPECTED_KEYS + TestBrainViewer.EXPECTED_EXTRA_KEYS:
            assert key in result and result[key] is not None
        assert result['extended_view']
Пример #33
0
class NIFTIImporterTest(TransactionalTestCase):
    """
    Unit-tests for NIFTI importer.
    """

    NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii')
    GZ_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii.gz')
    TVB_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'time_series_152.nii.gz')
    WRONG_NII_FILE = os.path.abspath(__file__)

    DEFAULT_ORIGIN = [[0.0, 0.0, 0.0]]
    UNKNOWN_STR = "unknown"


    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)


    def _import(self, import_file_path=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        group = dao.find_group('tvb.adapters.uploaders.nifti_importer', 'NIFTIImporter')
        importer = ABCAdapter.build_adapter(group)
        args = {'data_file': import_file_path, DataTypeMetaData.KEY_SUBJECT: "bla bla",
                'apply_corrections': False, 'connectivity': None}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        time_series = TimeSeries()
        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           time_series.module + "." + time_series.type)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None, "TimeSeries should not be none")

        return time_series


    def test_import_demo_nii_data(self):
        """
        This method tests import of a NIFTI file.
        """
        time_series = self._import(self.TVB_NII_FILE)

        # Since self.assertAlmostEquals is not available on all machine
        # We compare floats as following
        self.assertTrue(abs(1.0 - time_series.sample_period) <= 0.001)
        self.assertEqual("sec", str(time_series.sample_period_unit))
        self.assertEqual(0.0, time_series.start_time)
        self.assertTrue(time_series.title.startswith("NIFTI"))

        data_shape = time_series.read_data_shape()
        self.assertEquals(4, len(data_shape))
        # We have only one entry for time dimension
        self.assertEqual(1, data_shape[0])
        dimension_labels = time_series.labels_ordering
        self.assertTrue(dimension_labels is not None)
        self.assertEquals(4, len(dimension_labels))

        volume = time_series.volume
        self.assertTrue(volume is not None)
        self.assertTrue(numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all())
        self.assertEquals("mm", volume.voxel_unit)


    def test_import_nii_without_time_dimension(self):
        """
        This method tests import of a NIFTI file.
        """
        time_series = self._import(self.NII_FILE)

        self.assertEqual(1.0, time_series.sample_period)
        self.assertEqual(self.UNKNOWN_STR, str(time_series.sample_period_unit))
        self.assertEqual(0.0, time_series.start_time)
        self.assertTrue(time_series.title is not None)

        data_shape = time_series.read_data_shape()
        self.assertEquals(4, len(data_shape))
        # We have only one entry for time dimension
        self.assertEqual(1, data_shape[0])
        dimension_labels = time_series.labels_ordering
        self.assertTrue(dimension_labels is not None)
        self.assertEquals(4, len(dimension_labels))

        volume = time_series.volume
        self.assertTrue(volume is not None)
        self.assertTrue(numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all())
        self.assertTrue(numpy.equal([3.0, 3.0, 3.0], volume.voxel_size).all())
        self.assertEquals(self.UNKNOWN_STR, volume.voxel_unit)


    def test_import_nifti_compressed(self):
        """
        This method tests import of a NIFTI file compressed in GZ format.
        """
        time_series = self._import(self.GZ_NII_FILE)

        self.assertEqual(1.0, time_series.sample_period)
        self.assertEqual(self.UNKNOWN_STR, str(time_series.sample_period_unit))
        self.assertEqual(0.0, time_series.start_time)
        self.assertTrue(time_series.title is not None)

        data_shape = time_series.read_data_shape()
        self.assertEquals(4, len(data_shape))
        # We have only one entry for time dimension
        self.assertEqual(1, data_shape[0])
        dimension_labels = time_series.labels_ordering
        self.assertTrue(dimension_labels is not None)
        self.assertEquals(4, len(dimension_labels))

        volume = time_series.volume
        self.assertTrue(volume is not None)
        self.assertTrue(numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all())
        self.assertTrue(numpy.equal([3.0, 3.0, 3.0], volume.voxel_size).all())
        self.assertEquals(self.UNKNOWN_STR, volume.voxel_unit)


    def test_import_wrong_nii_file(self):
        """ 
        This method tests import of a file in a wrong format
        """
        try:
            self._import(self.WRONG_NII_FILE)
            self.fail("Import should fail in case of a wrong NIFTI format.")
        except OperationException:
            # Expected exception
            pass
Пример #34
0
class EEGMonitorTest(TransactionalTestCase):
    """
    Unit-tests for EEG Viewer.
    """
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__),
                                'EEG_unit_vectors_BrainProducts_62.txt.bz2')

        TestFactory.import_sensors(self.test_user, self.test_project, zip_path,
                                   'EEG Sensors')
        sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
        time_series = self.datatypeFactory.create_timeseries(
            self.connectivity, 'EEG', sensors)
        viewer = EegMonitor()
        result = viewer.launch(time_series)
        expected_keys = [
            'tsNames', 'groupedLabels', 'tsModes', 'tsStateVars',
            'longestChannelLength', 'label_x', 'entities', 'page_size',
            'number_of_visible_points', 'extended_view', 'initialSelection',
            'ag_settings', 'ag_settings'
        ]

        for key in expected_keys:
            self.assertTrue(key in result, "key not found %s" % key)

        expected_ag_settings = [
            'channelsPerSet', 'channelLabels', 'noOfChannels',
            'translationStep', 'normalizedSteps', 'nan_value_found',
            'baseURLS', 'pageSize', 'nrOfPages', 'timeSetPaths', 'totalLength',
            'number_of_visible_points', 'extended_view',
            'measurePointsSelectionGIDs'
        ]

        ag_settings = json.loads(result['ag_settings'])

        for key in expected_ag_settings:
            self.assertTrue(key in ag_settings,
                            "ag_settings should have the key %s" % key)
Пример #35
0
class TestGIFTISurfaceImporter(TransactionalTestCase):
    """
    Unit-tests for GIFTI Surface importer.
    """

    GIFTI_SURFACE_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'sample.cortex.gii')
    GIFTI_TIME_SERIES_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'sample.time_series.gii')
    WRONG_GII_FILE = os.path.abspath(__file__)


    def transactional_setup_method(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def transactional_teardown_method(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)


    def _importSurface(self, import_file_path=None):
        """
        This method is used for importing data in GIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.gifti_surface_importer', 'GIFTISurfaceImporter')

        args = {'data_file': import_file_path, DataTypeMetaData.KEY_SUBJECT: ""}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        surface = CorticalSurface()
        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           surface.module + "." + surface.type)[0]
        assert 1, len(data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None == "TimeSeries should not be none"

        return surface


    def test_import_surface_gifti_data(self):
        """
            This method tests import of a surface from GIFTI file.
            !!! Important: We changed this test to execute only GIFTI parse
                because storing surface it takes too long (~ 9min) since
                normals needs to be calculated.
        """
        operation_id = self.datatypeFactory.get_operation().id
        storage_path = FilesHelper().get_operation_folder(self.test_project.name, operation_id)

        parser = GIFTIParser(storage_path, operation_id)
        surface = parser.parse(self.GIFTI_SURFACE_FILE)

        assert 131342 == len(surface.vertices)
        assert 262680 == len(surface.triangles)


    def test_import_timeseries_gifti_data(self):
        """
        This method tests import of a time series from GIFTI file.
        !!! Important: We changed this test to execute only GIFTI parse
            because storing surface it takes too long (~ 9min) since
            normals needs to be calculated.
        """
        operation_id = self.datatypeFactory.get_operation().id
        storage_path = FilesHelper().get_operation_folder(self.test_project.name, operation_id)

        parser = GIFTIParser(storage_path, operation_id)
        time_series = parser.parse(self.GIFTI_TIME_SERIES_FILE)

        data_shape = time_series.read_data_shape()

        assert 135 == data_shape[0]
        assert 143479 == data_shape[1]


    def test_import_wrong_gii_file(self):
        """ 
        This method tests import of a file in a wrong format
        """
        try:
            self._importSurface(self.WRONG_GII_FILE)
            raise AssertionError("Import should fail in case of a wrong GIFTI format.")
        except OperationException:
            # Expected exception
            pass
class SensorsImporterTest(TransactionalTestCase):
    """
    Unit-tests for Sensors importer.
    """
    EEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'EEG_unit_vectors_BrainProducts_62.txt.bz2')
    MEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'meg_channels_reg13.txt.bz2')


    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project and a `Sensors_Importer`
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
        self.importer = Sensors_Importer()


    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)


    def _import(self, import_file_path, sensors_type, expected_data):
        """
        This method is used for importing sensors
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        group = dao.find_group('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')
        importer = ABCAdapter.build_adapter(group)

        args = {'sensors_file': import_file_path, 'sensors_type': sensors_type}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           expected_data.module + "." + expected_data.type)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type = Sensors.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None, "Sensors instance should not be none")

        return time_series


    def test_import_eeg_sensors(self):
        """
        This method tests import of a file containing EEG sensors.
        """
        eeg_sensors = self._import(self.EEG_FILE, self.importer.EEG_SENSORS, SensorsEEG())

        expected_size = 62
        self.assertTrue(eeg_sensors.labels is not None)
        self.assertEqual(expected_size, len(eeg_sensors.labels))
        self.assertEqual(expected_size, len(eeg_sensors.locations))
        self.assertEqual((expected_size, 3), eeg_sensors.locations.shape)
        self.assertEqual(expected_size, eeg_sensors.number_of_sensors)


    def test_import_meg_sensors(self):
        """
        This method tests import of a file containing MEG sensors.
        """
        meg_sensors = self._import(self.MEG_FILE, self.importer.MEG_SENSORS, SensorsMEG())

        expected_size = 151
        self.assertTrue(meg_sensors.labels is not None)
        self.assertEqual(expected_size, len(meg_sensors.labels))
        self.assertEqual(expected_size, len(meg_sensors.locations))
        self.assertEqual((expected_size, 3), meg_sensors.locations.shape)
        self.assertEqual(expected_size, meg_sensors.number_of_sensors)
        self.assertTrue(meg_sensors.has_orientation)
        self.assertEqual(expected_size, len(meg_sensors.orientations))
        self.assertEqual((expected_size, 3), meg_sensors.orientations.shape)


    def test_import_meg_without_orientation(self):
        """
        This method tests import of a file without orientation.
        """
        try:
            self._import(self.EEG_FILE, self.importer.MEG_SENSORS, SensorsMEG())
            self.fail("Import should fail in case of a MEG import without orientation.")
        except OperationException:
            # Expected exception
            pass


    def test_import_internal_sensors(self):
        """
        This method tests import of a file containing internal sensors.
        """
        internal_sensors = self._import(self.EEG_FILE, self.importer.INTERNAL_SENSORS, SensorsInternal())

        expected_size = 62
        self.assertTrue(internal_sensors.labels is not None)
        self.assertEqual(expected_size, len(internal_sensors.labels))
        self.assertEqual(expected_size, len(internal_sensors.locations))
        self.assertEqual((expected_size, 3), internal_sensors.locations.shape)
        self.assertEqual(expected_size, internal_sensors.number_of_sensors)
Пример #37
0
class TestSensorsImporter(TransactionalTestCase):
    """
    Unit-tests for Sensors importer.
    """
    EEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'eeg_unitvector_62.txt.bz2')
    MEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'meg_151.txt.bz2')

    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project and a `Sensors_Importer`
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
        self.importer = Sensors_Importer()

    def transactional_teardown_method(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def _import(self, import_file_path, sensors_type, expected_data):
        """
        This method is used for importing sensors
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')

        args = {'sensors_file': import_file_path, 'sensors_type': sensors_type}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           expected_data.module + "." + expected_data.type)[0]
        assert 1 == len(data_types), "Project should contain only one data type = Sensors."

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert time_series is not None, "Sensors instance should not be none"

        return time_series

    def test_import_eeg_sensors(self):
        """
        This method tests import of a file containing EEG sensors.
        """
        eeg_sensors = self._import(self.EEG_FILE, self.importer.EEG_SENSORS, SensorsEEG())

        expected_size = 62
        assert eeg_sensors.labels is not None
        assert expected_size == len(eeg_sensors.labels)
        assert expected_size == len(eeg_sensors.locations)
        assert (expected_size, 3) == eeg_sensors.locations.shape
        assert expected_size == eeg_sensors.number_of_sensors

    def test_import_meg_sensors(self):
        """
        This method tests import of a file containing MEG sensors.
        """
        meg_sensors = self._import(self.MEG_FILE, self.importer.MEG_SENSORS, SensorsMEG())

        expected_size = 151
        assert meg_sensors.labels is not None
        assert expected_size == len(meg_sensors.labels)
        assert expected_size == len(meg_sensors.locations)
        assert (expected_size, 3) == meg_sensors.locations.shape
        assert expected_size == meg_sensors.number_of_sensors
        assert meg_sensors.has_orientation
        assert expected_size == len(meg_sensors.orientations)
        assert (expected_size, 3) == meg_sensors.orientations.shape

    def test_import_meg_without_orientation(self):
        """
        This method tests import of a file without orientation.
        """
        try:
            self._import(self.EEG_FILE, self.importer.MEG_SENSORS, SensorsMEG())
            raise AssertionError("Import should fail in case of a MEG import without orientation.")
        except OperationException:
            # Expected exception
            pass

    def test_import_internal_sensors(self):
        """
        This method tests import of a file containing internal sensors.
        """
        internal_sensors = self._import(self.EEG_FILE, self.importer.INTERNAL_SENSORS, SensorsInternal())

        expected_size = 62
        assert internal_sensors.labels is not None
        assert expected_size == len(internal_sensors.labels)
        assert expected_size == len(internal_sensors.locations)
        assert (expected_size, 3) == internal_sensors.locations.shape
        assert expected_size == internal_sensors.number_of_sensors
Пример #38
0
class NIFTIImporterTest(TransactionalTestCase):
    """
    Unit-tests for NIFTI importer.
    """

    NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii')
    GZ_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii.gz')
    TVB_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'avg152T1_LR_nifti.nii.gz')
    WRONG_NII_FILE = os.path.abspath(__file__)

    DEFAULT_ORIGIN = [[0.0, 0.0, 0.0]]
    UNKNOWN_STR = "unknown"


    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)


    def _import(self, import_file_path=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        group = dao.find_group('tvb.adapters.uploaders.nifti_importer', 'NIFTIImporter')
        importer = ABCAdapter.build_adapter(group)
        args = {'data_file': import_file_path, DataTypeMetaData.KEY_SUBJECT: "bla bla"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        time_series = TimeSeries()
        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           time_series.module + "." + time_series.type)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None, "TimeSeries should not be none")

        return time_series


    def test_import_demo_nii_data(self):
        """
            This method tests import of a NIFTI file.
        """
        time_series = self._import(self.TVB_NII_FILE)

        # Since self.assertAlmostEquals is not available on all machine
        # We compare floats as following
        self.assertTrue(abs(1.0 - time_series.sample_period) <= 0.001)
        self.assertEqual("sec", str(time_series.sample_period_unit))
        self.assertEqual(0.0, time_series.start_time)
        self.assertTrue(time_series.title.startswith("NIFTI"))

        data_shape = time_series.read_data_shape()
        self.assertEquals(4, len(data_shape))
        # We have only one entry for time dimension
        self.assertEqual(1, data_shape[0])
        dimension_labels = time_series.labels_ordering
        self.assertTrue(dimension_labels is not None)
        self.assertEquals(4, len(dimension_labels))

        volume = time_series.volume
        self.assertTrue(volume is not None)
        self.assertTrue(numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all())
        self.assertEquals("mm", volume.voxel_unit)


    def test_import_nii_without_time_dimension(self):
        """
            This method tests import of a NIFTI file.
        """
        time_series = self._import(self.NII_FILE)

        self.assertEqual(1.0, time_series.sample_period)
        self.assertEqual(self.UNKNOWN_STR, str(time_series.sample_period_unit))
        self.assertEqual(0.0, time_series.start_time)
        self.assertTrue(time_series.title is not None)

        data_shape = time_series.read_data_shape()
        self.assertEquals(4, len(data_shape))
        # We have only one entry for time dimension
        self.assertEqual(1, data_shape[0])
        dimension_labels = time_series.labels_ordering
        self.assertTrue(dimension_labels is not None)
        self.assertEquals(4, len(dimension_labels))

        volume = time_series.volume
        self.assertTrue(volume is not None)
        self.assertTrue(numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all())
        self.assertTrue(numpy.equal([3.0, 3.0, 3.0], volume.voxel_size).all())
        self.assertEquals(self.UNKNOWN_STR, volume.voxel_unit)


    def test_import_nifti_compressed(self):
        """
            This method tests import of a NIFTI file compressed in GZ format.
        """
        time_series = self._import(self.GZ_NII_FILE)

        self.assertEqual(1.0, time_series.sample_period)
        self.assertEqual(self.UNKNOWN_STR, str(time_series.sample_period_unit))
        self.assertEqual(0.0, time_series.start_time)
        self.assertTrue(time_series.title is not None)

        data_shape = time_series.read_data_shape()
        self.assertEquals(4, len(data_shape))
        # We have only one entry for time dimension
        self.assertEqual(1, data_shape[0])
        dimension_labels = time_series.labels_ordering
        self.assertTrue(dimension_labels is not None)
        self.assertEquals(4, len(dimension_labels))

        volume = time_series.volume
        self.assertTrue(volume is not None)
        self.assertTrue(numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all())
        self.assertTrue(numpy.equal([3.0, 3.0, 3.0], volume.voxel_size).all())
        self.assertEquals(self.UNKNOWN_STR, volume.voxel_unit)


    def test_import_wrong_nii_file(self):
        """ 
        This method tests import of a file in a wrong format
        """
        try:
            self._import(self.WRONG_NII_FILE)
            self.fail("Import should fail in case of a wrong NIFTI format.")
        except OperationException:
            # Expected exception
            pass
Пример #39
0
class TVBImporterTest(TransactionalTestCase):
    """
    Unit-tests for TVB importer.
    """
    TVB_EXPORTER = "TVBExporter"

    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a datatype and a datatype_group;
        """
        export_manager = ExportManager()

        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()

        # Generate simple data type and export it to H5 file
        self.datatype = self.datatypeFactory.create_datatype_with_storage()
        _, exported_h5_file, _ = export_manager.export_data(
            self.datatype, self.TVB_EXPORTER, self.test_project)
        # Copy H5 file to another location since the original one / exported
        # will be deleted with the project
        _, h5_file_name = os.path.split(exported_h5_file)
        shutil.copy(exported_h5_file, TvbProfile.current.TVB_TEMP_FOLDER)
        self.h5_file_path = os.path.join(TvbProfile.current.TVB_TEMP_FOLDER,
                                         h5_file_name)

        self.assertTrue(os.path.exists(self.h5_file_path),
                        "Simple data type was not exported correct")

        # Generate data type group and export it to ZIP file
        self.datatype_group = self.datatypeFactory.create_datatype_group()
        _, self.zip_file_path, _ = export_manager.export_data(
            self.datatype_group, self.TVB_EXPORTER, self.test_project)
        self.assertTrue(os.path.exists(self.zip_file_path),
                        "Data type group was not exported correct")

        FilesHelper().remove_project_structure(self.test_project.name)
        self.clean_database(delete_folders=False)

        # Recreate project, but a clean one where to import data
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

    def _import(self, import_file_path=None):
        """
        This method is used for importing data in TVB format
        :param import_file_path: absolute path of the file to be imported
        """
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.tvb_importer', 'TVBImporter')
        args = {'data_file': import_file_path}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

    def test_zip_import(self):
        """
            This method tests import of TVB data in zip format (which imply multiple data types
            in the same zip file - exported from a group)
        """
        self._import(self.zip_file_path)
        count = FlowService().get_available_datatypes(
            self.test_project.id,
            self.datatype.module + "." + self.datatype.type)[1]
        self.assertEqual(9, count,
                         "9 datatypes should have been imported from group.")

    def test_h5_import(self):
        """
            This method tests import of TVB data in h5 format. Single data type / import
        """
        self._import(self.h5_file_path)

        data_types = FlowService().get_available_datatypes(
            self.test_project.id,
            self.datatype.module + "." + self.datatype.type)[0]
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        data_type_entity = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(data_type_entity is not None,
                        "Datatype should not be none")
        self.assertEqual(self.datatype.gid, data_type_entity.gid,
                         "Imported datatype should have the same gid")

    def test_import_invalid_file(self):
        """
        This method tests import of a file which does not exists or does not
        have a supported format.
        """
        try:
            self._import("invalid_path")
            self.fail(
                "System should throw an exception if trying to import an invalid file"
            )
        except OperationException:
            # Expected
            pass

        # Now try to generate a file on disk with wrong format and import that
        file_path = os.path.join(TvbProfile.current.TVB_TEMP_FOLDER,
                                 "dummy_file.txt")
        with open(file_path, "w") as f:
            f.write("dummy text")

        try:
            self._import(file_path)
            self.fail(
                "System should throw an exception if trying to import a file with wrong format"
            )
        except OperationException:
            # Expected
            pass
class TVBImporterTest(TransactionalTestCase):
    """
    Unit-tests for TVB importer.
    """
    TVB_EXPORTER = "TVBExporter"


    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a datatype and a datatype_group;
        """
        export_manager = ExportManager()

        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()

        # Generate simple data type and export it to H5 file
        self.datatype = self.datatypeFactory.create_datatype_with_storage()
        _, exported_h5_file, _ = export_manager.export_data(self.datatype, self.TVB_EXPORTER, self.test_project)
        # Copy H5 file to another location since the original one / exported 
        # will be deleted with the project
        _, h5_file_name = os.path.split(exported_h5_file)
        shutil.copy(exported_h5_file, TvbProfile.current.TVB_TEMP_FOLDER)
        self.h5_file_path = os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, h5_file_name)

        self.assertTrue(os.path.exists(self.h5_file_path), "Simple data type was not exported correct")

        # Generate data type group and export it to ZIP file
        self.datatype_group = self.datatypeFactory.create_datatype_group()
        _, self.zip_file_path, _ = export_manager.export_data(self.datatype_group, self.TVB_EXPORTER, self.test_project)
        self.assertTrue(os.path.exists(self.zip_file_path), "Data type group was not exported correct")

        FilesHelper().remove_project_structure(self.test_project.name)
        self.clean_database(delete_folders=False)

        # Recreate project, but a clean one where to import data
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def _import(self, import_file_path=None):
        """
        This method is used for importing data in TVB format
        :param import_file_path: absolute path of the file to be imported
        """
        ### Retrieve Adapter instance 
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.tvb_importer', 'TVBImporter')
        args = {'data_file': import_file_path}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)


    def test_zip_import(self):
        """
            This method tests import of TVB data in zip format (which imply multiple data types
            in the same zip file - exported from a group)
        """
        self._import(self.zip_file_path)
        count = FlowService().get_available_datatypes(self.test_project.id,
                                                      self.datatype.module + "." + self.datatype.type)[1]
        self.assertEqual(9, count, "9 datatypes should have been imported from group.")


    def test_h5_import(self):
        """
            This method tests import of TVB data in h5 format. Single data type / import
        """
        self._import(self.h5_file_path)

        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           self.datatype.module + "." + self.datatype.type)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        data_type_entity = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(data_type_entity is not None, "Datatype should not be none")
        self.assertEqual(self.datatype.gid, data_type_entity.gid, "Imported datatype should have the same gid")


    def test_import_invalid_file(self):
        """
        This method tests import of a file which does not exists or does not
        have a supported format.
        """
        try:
            self._import("invalid_path")
            self.fail("System should throw an exception if trying to import an invalid file")
        except OperationException:
            # Expected
            pass

        # Now try to generate a file on disk with wrong format and import that
        file_path = os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, "dummy_file.txt")
        with open(file_path, "w") as f:
            f.write("dummy text")

        try:
            self._import(file_path)
            self.fail("System should throw an exception if trying to import a file with wrong format")
        except OperationException:
            # Expected
            pass