Пример #1
0
 def transactional_setup_method(self):
     """
     Sets up the environment for running the tests;
     creates a datatype group
     """
     self.datatypeFactory = DatatypesFactory()
     self.group = self.datatypeFactory.create_datatype_group()
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a datatype and a datatype_group;
        """
        export_manager = ExportManager()

        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()

        # Generate simple data type and export it to H5 file
        self.datatype = self.datatypeFactory.create_datatype_with_storage()
        _, exported_h5_file, _ = export_manager.export_data(self.datatype, self.TVB_EXPORTER, self.test_project)
        # Copy H5 file to another location since the original one / exported 
        # will be deleted with the project
        _, h5_file_name = os.path.split(exported_h5_file)
        shutil.copy(exported_h5_file, TvbProfile.current.TVB_TEMP_FOLDER)
        self.h5_file_path = os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, h5_file_name)

        self.assertTrue(os.path.exists(self.h5_file_path), "Simple data type was not exported correct")

        # Generate data type group and export it to ZIP file
        self.datatype_group = self.datatypeFactory.create_datatype_group()
        _, self.zip_file_path, _ = export_manager.export_data(self.datatype_group, self.TVB_EXPORTER, self.test_project)
        self.assertTrue(os.path.exists(self.zip_file_path), "Data type group was not exported correct")

        FilesHelper().remove_project_structure(self.test_project.name)
        self.clean_database(delete_folders=False)

        # Recreate project, but a clean one where to import data
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
Пример #3
0
 def setUp(self):
     """
     Sets up the environment for running the tests;
     creates a datatype group
     """
     self.datatypeFactory = DatatypesFactory()
     self.group = self.datatypeFactory.create_datatype_group()
Пример #4
0
class ObjSurfaceImporterTest(TransactionalTestCase):
    """
    Unit-tests for Obj Surface importer.
    """

    torrus = os.path.join(os.path.dirname(tvb_data.obj.__file__),
                          'test_torus.obj')
    face = os.path.join(os.path.dirname(tvb_data.obj.__file__),
                        'face_surface.obj')

    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

    def tearDown(self):
        FilesHelper().remove_project_structure(self.test_project.name)

    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')

        args = {
            'data_file': import_file_path,
            "surface_type": FACE,
            DataTypeMetaData.KEY_SUBJECT: "John"
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(
            self.test_project.id, FaceSurface)[0]
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "Surface should not be None")
        return surface

    def test_import_quads_no_normals(self):
        """
        Test that import works with a file which contains quads and no normals
        """
        surface = self._importSurface(self.face)
        self.assertEqual(8614, len(surface.vertices))
        self.assertEqual(8614, len(surface.vertex_normals))
        self.assertEqual(17224, len(surface.triangles))

    def test_import_simple_with_normals(self):
        """
        Test that import works with an OBJ file which included normals
        """
        surface = self._importSurface(self.torrus)
        self.assertEqual(441, surface.number_of_vertices)
        self.assertEqual(441, len(surface.vertex_normals))
        self.assertEqual(800, surface.number_of_triangles)
Пример #5
0
    def test_get_launchable_algorithms(self):

        factory = DatatypesFactory()
        conn = factory.create_connectivity(4)[1]
        ts = factory.create_timeseries(conn)
        result = self.flow_service.get_launchable_algorithms(ts.gid)
        assert 'Analyze' in result
        assert 'View' in result
Пример #6
0
    def test_get_launchable_algorithms(self):

        factory = DatatypesFactory()
        conn = factory.create_connectivity(4)[1]
        ts = factory.create_timeseries(conn)
        result = self.flow_service.get_launchable_algorithms(ts.gid)
        self.assertTrue('Analyze' in result)
        self.assertTrue('View' in result)
Пример #7
0
 def transactional_setup_method(self):
     """
     Sets up the environment for running the tests;
     creates a test user, a test project and a `Sensors_Importer`
     """
     self.datatypeFactory = DatatypesFactory()
     self.test_project = self.datatypeFactory.get_project()
     self.test_user = self.datatypeFactory.get_user()
     self.importer = Sensors_Importer()
class ObjSurfaceImporterTest(TransactionalTestCase):
    """
    Unit-tests for Obj Surface importer.
    """

    torrus = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'test_torus.obj')
    face = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'face_surface.obj')


    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def tearDown(self):
        FilesHelper().remove_project_structure(self.test_project.name)


    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')
        importer = ABCAdapter.build_adapter(group)

        args = {'data_file': import_file_path,
                "surface_type": FACE,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id, FaceSurface)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "Surface should not be None")
        return surface


    def test_import_quads_no_normals(self):
        """
        Test that import works with a file which contains quads and no normals
        """
        surface = self._importSurface(self.face)
        self.assertEqual(8614, len(surface.vertices))
        self.assertEqual(8614, len(surface.vertex_normals))
        self.assertEqual(17224, len(surface.triangles))


    def test_import_simple_with_normals(self):
        """
        Test that import works with an OBJ file which included normals
        """
        surface = self._importSurface(self.torrus)
        self.assertEqual(441, surface.number_of_vertices)
        self.assertEqual(441, len(surface.vertex_normals))
        self.assertEqual(800, surface.number_of_triangles)
Пример #9
0
class MatTimeSeriesImporterTest(TransactionalTestCase):

    base_pth = os.path.join(os.path.dirname(tvb_data.__file__),
                            'berlinSubjects', 'QL_20120814')
    bold_path = os.path.join(base_pth, 'QL_BOLD_regiontimecourse.mat')
    connectivity_path = os.path.join(base_pth, 'QL_20120814_Connectivity.zip')

    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
        self._import_connectivity()

    def tearDown(self):
        FilesHelper().remove_project_structure(self.test_project.name)

    def _import_connectivity(self):
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.zip_connectivity_importer',
            'ZIPConnectivityImporter')

        ### Launch Operation
        FlowService().fire_operation(importer,
                                     self.test_user,
                                     self.test_project.id,
                                     uploaded=self.connectivity_path,
                                     Data_Subject='QL')

        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())

    def test_import_bold(self):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.mat_timeseries_importer',
            'MatTimeSeriesImporter')

        args = dict(
            data_file=self.bold_path,
            dataset_name='QL_20120824_DK_BOLD_timecourse',
            structure_path='',
            transpose=False,
            slice=None,
            sampling_rate=1000,
            start_time=0,
            tstype='region',
            tstype_parameters_option_region_connectivity=self.connectivity.gid,
            Data_Subject="QL")

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        tsr = TestFactory.get_entity(self.test_project, TimeSeriesRegion())

        self.assertEqual((661, 1, 68, 1), tsr.read_data_shape())
Пример #10
0
class EEGMonitorTest(TransactionalTestCase):
    """
    Unit-tests for EEG Viewer.
    """
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
        
        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        self.assertTrue(self.connectivity is not None)

                
    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)
    
    
    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__), 
                                'EEG_unit_vectors_BrainProducts_62.txt.bz2')
        
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path, 'EEG Sensors')
        sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
        time_series = self.datatypeFactory.create_timeseries(self.connectivity, 'EEG', sensors)
        viewer = EegMonitor()
        result = viewer.launch(time_series)
        expected_keys = ['tsNames', 'groupedLabels', 'tsModes', 'tsStateVars', 'longestChannelLength',
                         'label_x', 'entities', 'page_size', 'number_of_visible_points',
                         'extended_view', 'initialSelection', 'ag_settings', 'ag_settings']

        for key in expected_keys:
            self.assertTrue(key in result, "key not found %s" % key)

        expected_ag_settings = ['channelsPerSet', 'channelLabels', 'noOfChannels', 'translationStep',
                                'normalizedSteps', 'nan_value_found', 'baseURLS', 'pageSize',
                                'nrOfPages', 'timeSetPaths', 'totalLength', 'number_of_visible_points',
                                'extended_view', 'measurePointsSelectionGIDs']

        ag_settings = json.loads(result['ag_settings'])

        for key in expected_ag_settings:
            self.assertTrue(key in ag_settings, "ag_settings should have the key %s" % key)
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        self.assertTrue(self.connectivity is not None)
Пример #12
0
class TestObjSurfaceImporter(TransactionalTestCase):
    """
    Unit-tests for Obj Surface importer.
    """

    torus = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'test_torus.obj')
    face = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'face_surface.obj')

    def transactional_setup_method(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

    def transactional_teardown_method(self):
        FilesHelper().remove_project_structure(self.test_project.name)

    def _import_surface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')

        args = {'data_file': import_file_path,
                "surface_type": FACE,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id, FaceSurface)[0]
        assert 1, len(data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None, "Surface should not be None"
        return surface

    def test_import_quads_no_normals(self):
        """
        Test that import works with a file which contains quads and no normals
        """
        surface = self._import_surface(self.face)
        assert 8614 == len(surface.vertices)
        assert 8614 == len(surface.vertex_normals)
        assert 17224 == len(surface.triangles)

    def test_import_simple_with_normals(self):
        """
        Test that import works with an OBJ file which included normals
        """
        surface = self._import_surface(self.torus)
        assert 441 == surface.number_of_vertices
        assert 441 == len(surface.vertex_normals)
        assert 800 == surface.number_of_triangles
Пример #13
0
 def transactional_setup_method(self):
     self.init()
     self.surface_m_p_c = SurfaceModelParametersController()
     BurstController().index()
     stored_burst = cherrypy.session[common.KEY_BURST_CONFIG]
     datatypes_factory = DatatypesFactory()
     _, self.connectivity = datatypes_factory.create_connectivity()
     _, self.surface = datatypes_factory.create_surface()
     new_params = {}
     for key, val in SIMULATOR_PARAMETERS.iteritems():
         new_params[key] = {'value': val}
     new_params['connectivity'] = {'value': self.connectivity.gid}
     new_params['surface'] = {'value': self.surface.gid}
     stored_burst.simulator_configuration = new_params
 def transactional_setup_method(self):
     self.init()
     self.surface_m_p_c = SurfaceModelParametersController()
     BurstController().index()
     stored_burst = cherrypy.session[common.KEY_BURST_CONFIG]
     datatypes_factory = DatatypesFactory()
     _, self.connectivity = datatypes_factory.create_connectivity()
     _, self.surface = datatypes_factory.create_surface()
     new_params = {}
     for key, val in SIMULATOR_PARAMETERS.iteritems():
         new_params[key] = {'value': val}
     new_params['connectivity'] = {'value': self.connectivity.gid}
     new_params['surface'] = {'value': self.surface.gid}
     stored_burst.simulator_configuration = new_params
class TestZIPSurfaceImporter(TransactionalTestCase):
    """
    Unit-tests for Zip Surface importer.
    """

    surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__),
                              'outer_skull_4096.zip')

    def transactional_setup_method(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

    def transactional_teardown_method(self):
        FilesHelper().remove_project_structure(self.test_project.name)

    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.zip_surface_importer',
            'ZIPSurfaceImporter')
        args = {
            'uploaded': import_file_path,
            'surface_type': OUTER_SKULL,
            'zero_based_triangles': True,
            DataTypeMetaData.KEY_SUBJECT: "John"
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(
            self.test_project.id, SkullSkin)[0]
        assert 1, len(
            data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None, "Surface should not be None"
        return surface

    def test_import_surf_zip(self):
        surface = self._importSurface(self.surf_skull)
        assert 4096 == len(surface.vertices)
        assert 4096 == surface.number_of_vertices
        assert 8188 == len(surface.triangles)
        assert 8188 == surface.number_of_triangles
        assert '' == surface.user_tag_3
        assert surface.valid_for_simulations
Пример #16
0
class ZIPSurfaceImporterTest(TransactionalTestCase):
    """
    Unit-tests for Zip Surface importer.
    """

    surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__),
                              'outer_skull_4096.zip')

    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

    def tearDown(self):
        FilesHelper().remove_project_structure(self.test_project.name)

    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.zip_surface_importer',
                               'ZIPSurfaceImporter')
        importer = ABCAdapter.build_adapter(group)
        args = {
            'uploaded': import_file_path,
            'surface_type': OUTER_SKULL,
            'zero_based_triangles': True,
            DataTypeMetaData.KEY_SUBJECT: "John"
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(
            self.test_project.id, SkullSkin)[0]
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "Surface should not be None")
        return surface

    def test_import_surf_zip(self):
        surface = self._importSurface(self.surf_skull)
        self.assertEqual(4096, len(surface.vertices))
        self.assertEqual(4096, surface.number_of_vertices)
        self.assertEqual(8188, len(surface.triangles))
        self.assertEqual(8188, surface.number_of_triangles)
        self.assertEqual('', surface.user_tag_3)
        self.assertTrue(surface.valid_for_simulations)
Пример #17
0
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        ## Import Shelf Face Object
        face_path = os.path.join(os.path.dirname(tvb_data.obj.__file__),
                                 'face_surface.obj')
        TestFactory.import_surface_obj(self.test_user, self.test_project,
                                       face_path, FACE)
Пример #18
0
 def setUp(self):
     """
     Sets up the environment for running the tests;
     creates a datatype group
     """
     self.datatypeFactory = DatatypesFactory()
     self.group = self.datatypeFactory.create_datatype_group()
Пример #19
0
    def setUp(self):
        """
        Reset the database before each test.
        """
        initialize_storage()
        self.datatypes_factory = DatatypesFactory()
        self.test_user = self.datatypes_factory.get_user()
        self.test_project = self.datatypes_factory.get_project()
        self.connectivity = self.datatypes_factory.create_connectivity(self.CONNECTIVITY_NODES)[1]

        algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS)
        self.simulator_adapter = ABCAdapter.build_adapter(algorithm)
        self.operation = TestFactory.create_operation(algorithm, self.test_user, self.test_project,
                                                      model.STATUS_STARTED, json.dumps(SIMULATOR_PARAMETERS))

        SIMULATOR_PARAMETERS['connectivity'] = self.connectivity.gid
Пример #20
0
 def transactional_setup_method(self):
     """
     Sets up the environment for running the tests;
     creates a datatype group
     """
     self.datatypeFactory = DatatypesFactory()
     self.group = self.datatypeFactory.create_datatype_group()
    def setUp(self):
        """
        Sets up the environment for testing
        creates a `NoiseConfigurationController`
        """
        self.init()
        self.noise_c = NoiseConfigurationController()
        _, self.connectivity = DatatypesFactory().create_connectivity()
        BurstController().index()

        stored_burst = cherrypy.session[common.KEY_BURST_CONFIG]

        new_params = {}
        for key, val in SIMULATOR_PARAMETERS.iteritems():
            new_params[key] = {'value': val}
        new_params['connectivity'] = {'value': self.connectivity.gid}

        # Simulate selection of a specific integration  from the ui
        new_params[PARAM_INTEGRATOR] = {'value': EulerStochastic.__name__}
        new_params[PARAM_MODEL] = {'value': Generic2dOscillator.__name__}
        new_params[INTEGRATOR_PARAMETERS + '_option_EulerStochastic_noise'] = {
            'value': Additive.__name__
        }
        stored_burst.simulator_configuration = new_params

        self.noise_c.index()
class ZIPSurfaceImporterTest(TransactionalTestCase):
    """
    Unit-tests for Zip Surface importer.
    """

    surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'outer_skull_4096.zip')


    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def tearDown(self):
        FilesHelper().remove_project_structure(self.test_project.name)


    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.zip_surface_importer', 'ZIPSurfaceImporter')
        importer = ABCAdapter.build_adapter(group)
        args = {
            'uploaded': import_file_path, 'surface_type': OUTER_SKULL,
            'zero_based_triangles': True,
            DataTypeMetaData.KEY_SUBJECT: "John"
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id, SkullSkin)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "Surface should not be None")
        return surface


    def test_import_surf_zip(self):
        surface = self._importSurface(self.surf_skull)
        self.assertEqual(4096, len(surface.vertices))
        self.assertEqual(4096, surface.number_of_vertices)
        self.assertEqual(8188, len(surface.triangles))
        self.assertEqual(8188, surface.number_of_triangles)
        self.assertEqual('', surface.user_tag_3)
        self.assertTrue(surface.valid_for_simulations)
Пример #23
0
 def test_read_datatype_attribute(self):
     """
     Read an attribute from a datatype.
     """
     dt = DatatypesFactory().create_datatype_with_storage("test_subject", "RAW_STATE",
                                                          'this is the stored data'.split())
     returned_data = self.flow_c.read_datatype_attribute(dt.gid, "string_data")
     self.assertEqual(returned_data, '["this", "is", "the", "stored", "data"]')
Пример #24
0
class TimeSeriesTest(TransactionalTestCase):
    """
    Unit-tests for Time Series Viewer.
    """

    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        self.assertTrue(self.connectivity is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        timeseries = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = TimeSeries()
        result = viewer.launch(timeseries)
        expected_keys = [
            "t0",
            "shape",
            "preview",
            "labelsStateVar",
            "labelsModes",
            "mainContent",
            "labels",
            "labels_json",
            "figsize",
            "dt",
        ]
        for key in expected_keys:
            self.assertTrue(key in result)
Пример #25
0
 def test_get_linkable_projects(self):
     """
     Test get linkable project, no projects linked so should just return none.
     """
     datatype = DatatypesFactory().create_datatype_with_storage()
     result_dict = self.project_c.get_linkable_projects(datatype.id, False, False)
     assert result_dict['projectslinked'] is None
     assert result_dict['datatype_id'] == datatype.id
 def setUp(self):
     """
     Sets up the environment for testing;
     creates a datatype group and a Parameter Exploration Controller
     """
     self.init()
     self.dt_group = DatatypesFactory().create_datatype_group()
     self.controller = ParameterExplorationController()
class MatTimeSeriesImporterTest(TransactionalTestCase):

    base_pth = os.path.join(os.path.dirname(tvb_data.__file__), 'berlinSubjects', 'QL_20120814')
    bold_path = os.path.join(base_pth, 'QL_BOLD_regiontimecourse.mat')
    connectivity_path = os.path.join(base_pth, 'QL_20120814_Connectivity.zip')

    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
        self._import_connectivity()


    def tearDown(self):
        FilesHelper().remove_project_structure(self.test_project.name)


    def _import_connectivity(self):
        group = dao.find_group('tvb.adapters.uploaders.zip_connectivity_importer', 'ZIPConnectivityImporter')
        importer = ABCAdapter.build_adapter(group)

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id,
                                     uploaded=self.connectivity_path, Data_Subject='QL')

        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())


    def test_import_bold(self):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.mat_timeseries_importer', 'MatTimeSeriesImporter')
        importer = ABCAdapter.build_adapter(group)

        args = dict(data_file=self.bold_path, dataset_name='QL_20120824_DK_BOLD_timecourse', structure_path='',
                    transpose=False, slice=None, sampling_rate=1000, start_time=0,
                    tstype='region',
                    tstype_parameters_option_region_connectivity=self.connectivity.gid,
                    Data_Subject="QL")

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        tsr = TestFactory.get_entity(self.test_project, TimeSeriesRegion())

        self.assertEqual((661, 1, 68, 1), tsr.read_data_shape())
Пример #28
0
 def transactional_setup_method(self):
     """
     Sets up the environment for running the tests;
     creates a test user, a test project and a `Sensors_Importer`
     """
     self.datatypeFactory = DatatypesFactory()
     self.test_project = self.datatypeFactory.get_project()
     self.test_user = self.datatypeFactory.get_user()
     self.importer = Sensors_Importer()
Пример #29
0
 def test_read_datatype_attribute_method_call(self):
     """
     Call method on given datatype.
     """
     dt = DatatypesFactory().create_datatype_with_storage("test_subject", "RAW_STATE",
                                                          'this is the stored data'.split())
     args = {'length': 101}
     returned_data = self.flow_c.read_datatype_attribute(dt.gid, 'return_test_data', **args)
     self.assertTrue(returned_data == str(range(101)))
Пример #30
0
    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity, a cortical surface and a face surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_user = self.datatypeFactory.get_user()
        self.test_project = TestFactory.import_default_project(self.test_user)
        self.datatypeFactory.project = self.test_project

        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        assert self.connectivity is not None
        self.face_surface = TestFactory.get_entity(self.test_project,
                                                   FaceSurface())
        assert self.face_surface is not None
        assert TestFactory.get_entity(self.test_project, EEGCap()) is not None
class TestZIPSurfaceImporter(TransactionalTestCase):
    """
    Unit-tests for Zip Surface importer.
    """

    surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'outer_skull_4096.zip')


    def transactional_setup_method(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def transactional_teardown_method(self):
        FilesHelper().remove_project_structure(self.test_project.name)


    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.zip_surface_importer', 'ZIPSurfaceImporter')
        args = {'uploaded': import_file_path, 'surface_type': OUTER_SKULL,
                'zero_based_triangles': True,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id, SkullSkin)[0]
        assert 1, len(data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None, "Surface should not be None"
        return surface


    def test_import_surf_zip(self):
        surface = self._importSurface(self.surf_skull)
        assert 4096 == len(surface.vertices)
        assert 4096 == surface.number_of_vertices
        assert 8188 == len(surface.triangles)
        assert 8188 == surface.number_of_triangles
        assert '' == surface.user_tag_3
        assert surface.valid_for_simulations
Пример #32
0
class _BaseLinksTest(TransactionalTestCase):

    GEORGE1st = "george the grey"
    GEORGE2nd = "george"


    def _initialize_two_projects(self):
        """
        Creates a user, an algorithm and 2 projects
        Project src_project will have an operation and 2 datatypes
        Project dest_project will be empty.
        Initializes a flow and a project service
        """
        self.datatype_factory_src = DatatypesFactory()
        self.src_project = self.datatype_factory_src.project
        self.src_usr_id = self.datatype_factory_src.user.id

        self.red_datatype = self.datatype_factory_src.create_simple_datatype(subject=self.GEORGE1st)
        self.blue_datatype = self.datatype_factory_src.create_datatype_with_storage(subject=self.GEORGE2nd)

        # create the destination project
        self.datatype_factory_dest = DatatypesFactory()
        self.dest_project = self.datatype_factory_dest.project
        self.dest_usr_id = self.datatype_factory_dest.user.id

        self.flow_service = FlowService()
        self.project_service = ProjectService()


    def setUp(self):
        self.clean_database(delete_folders=True)
        self._initialize_two_projects()


    def tearDown(self):
        self.clean_database(delete_folders=True)


    def red_datatypes_in(self, project_id):
        return self.flow_service.get_available_datatypes(project_id, Datatype1)[1]


    def blue_datatypes_in(self, project_id):
        return self.flow_service.get_available_datatypes(project_id, Datatype2)[1]
Пример #33
0
    def setUpTVB(self):
        """
        Creates a user, an algorithm and 2 projects
        Project src_project will have an operation and 2 datatypes
        Project dest_project will be empty.
        Initializes a flow and a project service
        """
        datatype_factory = DatatypesFactory()
        self.user = datatype_factory.user
        self.src_project = datatype_factory.project

        self.red_datatype = datatype_factory.create_simple_datatype(subject=self.GEORGE1st)
        self.blue_datatype = datatype_factory.create_datatype_with_storage(subject=self.GEORGE2nd)

        # create the destination project
        self.dest_project = TestFactory.create_project(admin=datatype_factory.user, name="destination")

        self.flow_service = FlowService()
        self.project_service = ProjectService()
Пример #34
0
 def test_get_datatype_details(self):
     """
     Check for various field in the datatype details dictionary.
     """
     datatype = DatatypesFactory().create_datatype_with_storage()
     dt_details = self.project_c.get_datatype_details(datatype.gid)
     assert dt_details['datatype_id'] == datatype.id
     assert dt_details['entity_gid'] == datatype.gid
     assert not dt_details['isGroup']
     assert dt_details['isRelevant']
     assert len(dt_details['overlay_indexes']) == len(dt_details['overlay_tabs_horizontal'])
Пример #35
0
    def _initialize_two_projects(self):
        """
        Creates a user, an algorithm and 2 projects
        Project src_project will have an operation and 2 datatypes
        Project dest_project will be empty.
        Initializes a flow and a project service
        """
        self.datatype_factory_src = DatatypesFactory()
        self.src_project = self.datatype_factory_src.project
        self.src_usr_id = self.datatype_factory_src.user.id

        self.red_datatype = self.datatype_factory_src.create_simple_datatype(subject=self.GEORGE1st)
        self.blue_datatype = self.datatype_factory_src.create_datatype_with_storage(subject=self.GEORGE2nd)

        # create the destination project
        self.datatype_factory_dest = DatatypesFactory()
        self.dest_project = self.datatype_factory_dest.project
        self.dest_usr_id = self.datatype_factory_dest.user.id

        self.flow_service = FlowService()
        self.project_service = ProjectService()
 def test_set_visibility_datatype(self):
     """
     Set datatype visibility to true and false and check results are updated.
     """
     datatype = DatatypesFactory().create_datatype_with_storage()
     self.assertTrue(datatype.visible)
     self.project_c.set_visibility('datatype', datatype.gid, 'False')
     datatype = dao.get_datatype_by_gid(datatype.gid)
     self.assertFalse(datatype.visible)
     self.project_c.set_visibility('datatype', datatype.gid, 'True')
     datatype = dao.get_datatype_by_gid(datatype.gid)
     self.assertTrue(datatype.visible)
    def setUp(self):
        _, self.connectivity = DatatypesFactory().create_connectivity()
        self.test_user = TestFactory.create_user(username="******")
        self.test_project = TestFactory.create_project(self.test_user, "Test")

        burst_conf = BurstConfiguration(self.test_project.id)
        burst_conf._simulator_configuration = self.CONF_HOPFIELD_HEUN_STOCH_RANGES
        burst_conf.prepare_after_load()
        burst_conf.simulator_configuration['connectivity'] = {'value': self.connectivity.gid}

        self.s_manager = SerializationManager(burst_conf)
        self.empty_manager = SerializationManager(BurstConfiguration(None))
Пример #38
0
class ICATest(TransactionalTestCase):
    """
    Unit-tests for ICA Viewer.
    """
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        conn_measure = self.datatypeFactory.create_ICA(time_series)
        viewer = ICA()
        result = viewer.launch(conn_measure)
        expected_keys = [
            'matrix_strides', 'matrix_shape', 'matrix_data', 'mainContent',
            'isAdapter'
        ]
        for key in expected_keys:
            self.assertTrue(key in result)
Пример #39
0
 def _long_burst_launch(self, is_range=False):
     self.burst_c.index()
     connectivity = DatatypesFactory().create_connectivity()[1]
     launch_params = copy.deepcopy(SIMULATOR_PARAMETERS)
     launch_params['connectivity'] = dao.get_datatype_by_id(connectivity.id).gid
     if not is_range:
         launch_params['simulation_length'] = '10000'
     else:
         launch_params['simulation_length'] = '[10000,10001,10002]'
         launch_params[model.RANGE_PARAMETER_1] = 'simulation_length'
     launch_params = {"simulator_parameters": json.dumps(launch_params)}
     burst_id = json.loads(self.burst_c.launch_burst("new", "test_burst", **launch_params))['id']
     return dao.get_burst_by_id(burst_id)
Пример #40
0
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        ## Import Shelf Face Object
        zip_path = os.path.join(os.path.dirname(surfaces_dataset.__file__), 'face_surface_old.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, FACE, True)
 def setUp(self):
     """
     Sets up the environment for running the tests;
     creates a test user, a test project, a connectivity and a surface;
     imports a CFF data-set
     """
     self.datatypeFactory = DatatypesFactory()
     self.test_project = self.datatypeFactory.get_project()
     self.test_user = self.datatypeFactory.get_user()
     
     TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
     self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
     self.assertTrue(self.connectivity is not None)
Пример #42
0
class NetworkxImporterTest(TransactionalTestCase):
    """
    Unit-tests for Obj Surface importer.
    """

    upload_file = os.path.join(os.path.dirname(__file__), "test_data",
                               'connectome_83.gpickle')

    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

    def tearDown(self):
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_import(self):

        count_before = self.count_all_entities(Connectivity)
        self.assertEqual(0, count_before)

        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.networkx_importer',
            'NetworkxConnectivityImporter')
        args = {
            'data_file': self.upload_file,
            DataTypeMetaData.KEY_SUBJECT: "John"
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        count_after = self.count_all_entities(Connectivity)
        self.assertEqual(1, count_after)

        conn = self.get_all_entities(Connectivity)[0]
        self.assertEqual(83, conn.number_of_regions)
Пример #43
0
class TestCrossCorrelationViewer(TransactionalTestCase):
    """
    Unit-tests for Cross Correlation Viewer.
    """
    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        assert self.connectivity is not None

    def transactional_teardown_method(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        ccorr = self.datatypeFactory.create_crosscorrelation(time_series)
        viewer = CrossCorrelationVisualizer()
        result = viewer.launch(ccorr)
        expected_keys = [
            'matrix_shape', 'matrix_data', 'mainContent', 'isAdapter'
        ]
        for key in expected_keys:
            assert key in result
Пример #44
0
    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.factory = DatatypesFactory()
        self.test_project = self.factory.get_project()
        self.test_user = self.factory.get_user()

        ## Import Shelf Face Object
        face_path = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'face_surface.obj')
        TestFactory.import_surface_obj(self.test_user, self.test_project, face_path, FACE)
class ConnectivityViewerTest(TransactionalTestCase):
    """
    Unit-tests for Connectivity Viewer.
    """

    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
        
        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        self.assertTrue(self.connectivity is not None)

                
    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)
    
    
    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        viewer = ConnectivityViewer()
        result = viewer.launch(self.connectivity)
        expected_keys = ['weightsMin', 'weightsMax', 'urlWeights', 'urlVertices',
                         'urlTriangles', 'urlTracts', 'urlPositions', 'urlNormals',
                         'rightHemisphereJson', 'raysArray', 'rayMin', 'rayMax', 'positions',
                         'leftHemisphereJson', 'connectivity_entity', 'bothHemisphereJson']
        for key in expected_keys:
            self.assertTrue(key in result)
Пример #46
0
class TestTimeSeries(TransactionalTestCase):
    """
    Unit-tests for Time Series Viewer.
    """


    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        assert self.connectivity is not None


    def transactional_teardown_method(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)


    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        timeseries = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = TimeSeries()
        result = viewer.launch(timeseries)
        expected_keys = ['t0', 'shape', 'preview', 'labelsStateVar', 'labelsModes',
                         'mainContent', 'labels', 'labels_json', 'figsize', 'dt']
        for key in expected_keys:
            assert key in result
Пример #47
0
class TimeSeriesTest(TransactionalTestCase):
    """
    Unit-tests for Time Series Viewer.
    """
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        timeseries = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = TimeSeries()
        result = viewer.launch(timeseries)
        expected_keys = [
            't0', 'shape', 'preview', 'labelsStateVar', 'labelsModes',
            'mainContent', 'labels', 'labels_json', 'figsize', 'dt'
        ]
        for key in expected_keys:
            self.assertTrue(key in result)
Пример #48
0
class TestCovarianceViewer(TransactionalTestCase):
    """
    Unit-tests for Covariance Viewer.
    """


    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        assert self.connectivity is not None


    def transactional_teardown_method(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)


    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        covariance = self.datatypeFactory.create_covariance(time_series)
        viewer = CovarianceVisualizer()
        result = viewer.launch(covariance)
        expected_keys = ['matrix_shape', 'matrix_data', 'mainContent', 'isAdapter']
        for key in expected_keys:
            assert (key in result)
class NetworkxImporterTest(TransactionalTestCase):
    """
    Unit-tests for Obj Surface importer.
    """

    upload_file = os.path.join(os.path.dirname(__file__), "test_data", 'connectome_83.gpickle')


    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def tearDown(self):
        FilesHelper().remove_project_structure(self.test_project.name)


    def test_import(self):

        count_before = self.count_all_entities(Connectivity)
        self.assertEqual(0, count_before)

        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.networkx_importer', 'NetworkxConnectivityImporter')
        importer = ABCAdapter.build_adapter(group)

        args = {'data_file': self.upload_file,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        count_after = self.count_all_entities(Connectivity)
        self.assertEqual(1, count_after)

        conn = self.get_all_entities(Connectivity)[0]
        self.assertEqual(83, conn.number_of_regions)
Пример #50
0
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a datatype and a datatype_group;
        """
        export_manager = ExportManager()

        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()

        # Generate simple data type and export it to H5 file
        self.datatype = self.datatypeFactory.create_datatype_with_storage()
        _, exported_h5_file, _ = export_manager.export_data(
            self.datatype, self.TVB_EXPORTER, self.test_project)
        # Copy H5 file to another location since the original one / exported
        # will be deleted with the project
        _, h5_file_name = os.path.split(exported_h5_file)
        shutil.copy(exported_h5_file, TvbProfile.current.TVB_TEMP_FOLDER)
        self.h5_file_path = os.path.join(TvbProfile.current.TVB_TEMP_FOLDER,
                                         h5_file_name)

        self.assertTrue(os.path.exists(self.h5_file_path),
                        "Simple data type was not exported correct")

        # Generate data type group and export it to ZIP file
        self.datatype_group = self.datatypeFactory.create_datatype_group()
        _, self.zip_file_path, _ = export_manager.export_data(
            self.datatype_group, self.TVB_EXPORTER, self.test_project)
        self.assertTrue(os.path.exists(self.zip_file_path),
                        "Data type group was not exported correct")

        FilesHelper().remove_project_structure(self.test_project.name)
        self.clean_database(delete_folders=False)

        # Recreate project, but a clean one where to import data
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
Пример #51
0
class TestNetworkxImporter(TransactionalTestCase):
    """
    Unit-tests for Obj Surface importer.
    """

    upload_file = os.path.join(os.path.dirname(__file__), "test_data", 'connectome_83.gpickle')


    def transactional_setup_method(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def transactional_teardown_method(self):
        FilesHelper().remove_project_structure(self.test_project.name)


    def test_import(self):

        count_before = self.count_all_entities(Connectivity)
        assert 0  == count_before

        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.networkx_importer',
                                              'NetworkxConnectivityImporter')
        args = {'data_file': self.upload_file,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        count_after = self.count_all_entities(Connectivity)
        assert 1 == count_after

        conn = self.get_all_entities(Connectivity)[0]
        assert 83 == conn.number_of_regions
class CrossCoherenceViewerTest(TransactionalTestCase):
    """
    Unit-tests for Cross Coherence Viewer.
    """
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
        
        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        self.assertTrue(self.connectivity is not None)

                
    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)
    
    
    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        cross_coherence = self.datatypeFactory.create_crosscoherence(time_series)
        viewer = CrossCoherenceVisualizer()
        result = viewer.launch(cross_coherence)
        expected_keys = ['matrix_data', 'matrix_shape', 'matrix_strides', 'frequency']
        for key in expected_keys:
            self.assertTrue(key in result)
    def setUp(self):
        """
        Reset the database before each test.
        """
        initialize_storage()
        self.datatypes_factory = DatatypesFactory()
        self.test_user = self.datatypes_factory.get_user()
        self.test_project = self.datatypes_factory.get_project()
        self.connectivity = self.datatypes_factory.create_connectivity(self.CONNECTIVITY_NODES)[1]

        algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS)
        self.simulator_adapter = ABCAdapter.build_adapter(algorithm)
        self.operation = TestFactory.create_operation(algorithm, self.test_user, self.test_project,
                                                      model.STATUS_STARTED, json.dumps(SIMULATOR_PARAMETERS))

        SIMULATOR_PARAMETERS['connectivity'] = self.connectivity.gid
Пример #54
0
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity, a cortical surface and a face surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_user = self.datatypeFactory.get_user()
        self.test_project = TestFactory.import_default_project(self.test_user)
        self.datatypeFactory.project = self.test_project

        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.face_surface = TestFactory.get_entity(self.test_project, FaceSurface())
        self.assertTrue(self.face_surface is not None)
        self.assertTrue(TestFactory.get_entity(self.test_project, EEGCap()) is not None)
Пример #55
0
class PSETest(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """


    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a datatype group
        """
        self.datatypeFactory = DatatypesFactory()
        self.group = self.datatypeFactory.create_datatype_group()


    def test_launch_discrete(self):
        """
        Check that all required keys are present in output from PSE Discrete Adapter launch.
        """
        viewer = DiscretePSEAdapter()
        result = viewer.launch(self.group)

        expected_keys = ['status', 'size_metric', 'series_array', 'min_shape_size', 'min_color', 'data',
                         'max_shape_size', 'max_color', 'mainContent', 'labels_y', 'labels_x', 'isAdapter',
                         'has_started_ops', 'datatype_group_gid', 'datatypes_dict', 'color_metric']
        for key in expected_keys:
            self.assertTrue(key in result)
        self.assertEqual(self.group.gid, result["datatype_group_gid"])
        self.assertEqual('false', result["has_started_ops"])



    def test_launch_isocline(self):
        """
        Check that all required keys are present in output from PSE Discrete Adapter launch.
        """
        viewer = IsoclinePSEAdapter()
        result = viewer.launch(self.group)
        self.assertEqual(viewer._ui_name, result["title"])
        self.assertEqual(TvbProfile.current.web.MPLH5_SERVER_URL, result["mplh5ServerURL"])
        self.assertEqual(1, len(result["figureNumbers"]))
        self.assertEqual(1, len(result["metrics"]))
Пример #56
0
class TestPSE(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """

    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a datatype group
        """
        self.datatypeFactory = DatatypesFactory()
        self.group = self.datatypeFactory.create_datatype_group()

    def test_launch_discrete(self):
        """
        Check that all required keys are present in output from PSE Discrete Adapter launch.
        """
        viewer = DiscretePSEAdapter()
        result = viewer.launch(self.group)

        expected_keys = ['status', 'size_metric', 'series_array', 'min_shape_size', 'min_color', 'd3_data',
                         'max_shape_size', 'max_color', 'mainContent', 'labels_y', 'labels_x', 'isAdapter',
                         'has_started_ops', 'datatype_group_gid', 'datatypes_dict', 'color_metric']
        for key in expected_keys:
            assert key in result
        assert self.group.gid == result["datatype_group_gid"]
        assert 'false' == result["has_started_ops"]

    def test_launch_isocline(self):
        """
        Check that all required keys are present in output from PSE Discrete Adapter launch.
        """
        viewer = IsoclinePSEAdapter()
        result = viewer.launch(self.group)
        assert viewer._ui_name == result["title"]
        assert 1 == len(result["available_metrics"])
Пример #57
0
class TestSensorsImporter(TransactionalTestCase):
    """
    Unit-tests for Sensors importer.
    """
    EEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'eeg_unitvector_62.txt.bz2')
    MEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'meg_151.txt.bz2')

    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project and a `Sensors_Importer`
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
        self.importer = Sensors_Importer()

    def transactional_teardown_method(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def _import(self, import_file_path, sensors_type, expected_data):
        """
        This method is used for importing sensors
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')

        args = {'sensors_file': import_file_path, 'sensors_type': sensors_type}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           expected_data.module + "." + expected_data.type)[0]
        assert 1 == len(data_types), "Project should contain only one data type = Sensors."

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert time_series is not None, "Sensors instance should not be none"

        return time_series

    def test_import_eeg_sensors(self):
        """
        This method tests import of a file containing EEG sensors.
        """
        eeg_sensors = self._import(self.EEG_FILE, self.importer.EEG_SENSORS, SensorsEEG())

        expected_size = 62
        assert eeg_sensors.labels is not None
        assert expected_size == len(eeg_sensors.labels)
        assert expected_size == len(eeg_sensors.locations)
        assert (expected_size, 3) == eeg_sensors.locations.shape
        assert expected_size == eeg_sensors.number_of_sensors

    def test_import_meg_sensors(self):
        """
        This method tests import of a file containing MEG sensors.
        """
        meg_sensors = self._import(self.MEG_FILE, self.importer.MEG_SENSORS, SensorsMEG())

        expected_size = 151
        assert meg_sensors.labels is not None
        assert expected_size == len(meg_sensors.labels)
        assert expected_size == len(meg_sensors.locations)
        assert (expected_size, 3) == meg_sensors.locations.shape
        assert expected_size == meg_sensors.number_of_sensors
        assert meg_sensors.has_orientation
        assert expected_size == len(meg_sensors.orientations)
        assert (expected_size, 3) == meg_sensors.orientations.shape

    def test_import_meg_without_orientation(self):
        """
        This method tests import of a file without orientation.
        """
        try:
            self._import(self.EEG_FILE, self.importer.MEG_SENSORS, SensorsMEG())
            raise AssertionError("Import should fail in case of a MEG import without orientation.")
        except OperationException:
            # Expected exception
            pass

    def test_import_internal_sensors(self):
        """
        This method tests import of a file containing internal sensors.
        """
        internal_sensors = self._import(self.EEG_FILE, self.importer.INTERNAL_SENSORS, SensorsInternal())

        expected_size = 62
        assert internal_sensors.labels is not None
        assert expected_size == len(internal_sensors.labels)
        assert expected_size == len(internal_sensors.locations)
        assert (expected_size, 3) == internal_sensors.locations.shape
        assert expected_size == internal_sensors.number_of_sensors