Пример #1
0
 def setUp(self):
     """
     Sets up the environment for running the tests;
     creates a datatype group
     """
     self.datatypeFactory = DatatypesFactory()
     self.group = self.datatypeFactory.create_datatype_group()
Пример #2
0
    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        zip_path = os.path.join(os.path.dirname(surface_dataset.__file__),
                                'face-surface.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project,
                                       zip_path, 'Face', 1)
        zip_path = os.path.join(os.path.dirname(surface_dataset.__file__),
                                'eeg_skin_surface.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project,
                                       zip_path, 'EEG Cap', 1)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)
        self.face_surface = TestFactory.get_entity(self.test_project,
                                                   FaceSurface())
        self.assertTrue(self.face_surface is not None)
        self.assertTrue(
            TestFactory.get_entity(self.test_project, EEGCap()) is not None)
Пример #3
0
    def setUp(self):
        export_manager = ExportManager()

        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()

        # Generate simple data type and export it to H5 file
        self.datatype = self.datatypeFactory.create_datatype_with_storage()
        _, exported_h5_file, _ = export_manager.export_data(self.datatype, self.TVB_EXPORTER, self.test_project)
        # Copy H5 file to another location since the original one / exported 
        # will be deleted with the project
        _, h5_file_name = os.path.split(exported_h5_file)
        shutil.copy(exported_h5_file, cfg.TVB_TEMP_FOLDER)
        self.h5_file_path = os.path.join(cfg.TVB_TEMP_FOLDER, h5_file_name)

        self.assertTrue(os.path.exists(self.h5_file_path), "Simple data type was not exported correct")

        # Generate data type group and export it to ZIP file
        self.datatype_group = self.datatypeFactory.create_datatype_group()
        _, self.zip_file_path, _ = export_manager.export_data(self.datatype_group, self.TVB_EXPORTER, self.test_project)
        self.assertTrue(os.path.exists(self.zip_file_path), "Data type group was not exported correct")

        FilesHelper().remove_project_structure(self.test_project.name)
        self.clean_database(delete_folders=False)

        # Recreate project, but a clean one where to import data
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
Пример #4
0
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity, a cortical surface and a face surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        zip_path = os.path.join(os.path.dirname(surface_dataset.__file__),
                                'face-surface.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project,
                                       zip_path, 'Face', 1)
        zip_path = os.path.join(os.path.dirname(surface_dataset.__file__),
                                'eeg_skin_surface.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project,
                                       zip_path, 'EEG Cap', 1)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)
        self.face_surface = TestFactory.get_entity(self.test_project,
                                                   FaceSurface())
        self.assertTrue(self.face_surface is not None)
        self.assertTrue(
            TestFactory.get_entity(self.test_project, EEGCap()) is not None)
Пример #5
0
 def setUp(self):
     """
     Sets up the environment for running the tests;
     creates a test user, a test project and a `Sensors_Importer`
     """
     self.datatypeFactory = DatatypesFactory()
     self.test_project = self.datatypeFactory.get_project()
     self.test_user = self.datatypeFactory.get_user()
     self.importer = Sensors_Importer()
Пример #6
0
    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = self._get_entity(Connectivity())
        self.surface = self._get_entity(CorticalSurface())
Пример #7
0
 def test_readprojectsforlink(self):
     """
     Check that the dictionary of linkable projects is returned properly.
     """
     dt_factory = DatatypesFactory()
     cherrypy.session[b_c.KEY_USER] = dt_factory.user
     datatype = dt_factory.create_datatype_with_storage()
     result = self.project_c.readprojectsforlink(datatype.id)
     self.assertTrue(result is None)     # No projects to link into
     new_project = TestFactory.create_project(dt_factory.user)
     result = self.project_c.readprojectsforlink(datatype.id)
     self.assertEqual(result, '{"%s": "%s"}' % (new_project.id, new_project.name))
    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)
Пример #9
0
class EEGMonitorTest(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__),
                                'EEG_unit_vectors_BrainProducts_62.txt.bz2')

        TestFactory.import_sensors(self.test_user, self.test_project, zip_path,
                                   'EEG Sensors')
        sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
        time_series = self.datatypeFactory.create_timeseries(
            self.connectivity, 'EEG', sensors)
        viewer = EegMonitor()
        result = viewer.launch(time_series)
        expected_keys = [
            'tsStateVars', 'tsModes', 'translationStep', 'total_length',
            'title', 'timeSetPaths', 'number_of_visible_points',
            'normalizedSteps', 'noOfChannels', 'labelsForCheckBoxes',
            'label_x', 'graphLabels', 'entities', 'channelsPage'
        ]
        for key in expected_keys:
            self.assertTrue(key in result)
 def setUp(self):
     BaseControllersTest.init(self)
     self.surface_m_p_c = SurfaceModelParametersController()
     BurstController().index()
     stored_burst = cherrypy.session[b_c.KEY_BURST_CONFIG]
     datatypes_factory = DatatypesFactory()
     _, self.connectivity = datatypes_factory.create_connectivity()
     _, self.surface = datatypes_factory.create_surface()
     new_params = {}
     for key, val in SIMULATOR_PARAMETERS.iteritems():
         new_params[key] = {'value': val}
     new_params['connectivity'] = {'value': self.connectivity.gid}
     new_params['surface'] = {'value': self.surface.gid}
     stored_burst.simulator_configuration = new_params
Пример #11
0
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = self._get_entity(Connectivity())
        self.surface = self._get_entity(CorticalSurface())
Пример #12
0
 def test_read_datatype_attribute(self):
     """
     Read an attribute from a datatype.
     """
     dt = DatatypesFactory().create_datatype_with_storage("test_subject", "RAW_STATE", 'this is the stored data'.split())
     returned_data = self.flow_c.read_datatype_attribute(dt.gid, "string_data")
     self.assertEqual(returned_data, '["this", "is", "the", "stored", "data"]')
Пример #13
0
 def test_get_linkable_projects(self):
     """
     Test get linkable project, no projects linked so should just return none.
     """
     datatype = DatatypesFactory().create_datatype_with_storage()
     result_dict = self.project_c.get_linkable_projects(datatype.id, False, False)
     self.assertTrue(result_dict['projectslinked'] is None)
     self.assertEqual(result_dict['datatype_id'], datatype.id)
Пример #14
0
 def test_read_datatype_attribute_method_call(self):
     """
     Call method on given datatype.
     """
     dt = DatatypesFactory().create_datatype_with_storage("test_subject", "RAW_STATE", 'this is the stored data'.split())
     args = {'length' : 101}
     returned_data = self.flow_c.read_datatype_attribute(dt.gid, 'return_test_data', **args)
     self.assertTrue(returned_data == str(range(101)))
Пример #15
0
 def setUp(self):
     """
     Sets up the environment for testing;
     creates a datatype group and a Parameter Exploration Controller
     """
     BaseControllersTest.init(self)
     self.dt_group = DatatypesFactory().create_datatype_group()
     self.controller = ParameterExplorationController()
Пример #16
0
class ConnectivityViewerTest(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        viewer = ConnectivityViewer()
        result = viewer.launch(self.connectivity)
        expected_keys = [
            'weightsMin', 'weightsMax', 'weights', 'urlWeights', 'urlVertices',
            'urlTriangles', 'urlTracts', 'urlPositions', 'urlNormals',
            'rightHemisphereJson', 'raysArray', 'rayMin', 'rayMax',
            'positions', 'leftHemisphereJson', 'connectivity_entity',
            'bothHemisphereJson'
        ]
        for key in expected_keys:
            self.assertTrue(key in result)
Пример #17
0
class CovarianceViewerTest(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        covariance = self.datatypeFactory.create_covariance(time_series)
        viewer = CovarianceVisualizer()
        result = viewer.launch(covariance)
        expected_keys = [
            'matrix_strides', 'matrix_shape', 'matrix_data', 'mainContent',
            'isAdapter'
        ]
        for key in expected_keys:
            self.assertTrue(key in result)
Пример #18
0
 def test_get_datatype_details(self):
     """
     Check for various field in the datatype details dictionary.
     """
     datatype = DatatypesFactory().create_datatype_with_storage()
     dt_details = self.project_c.get_datatype_details(datatype.gid)
     self.assertEqual(dt_details['datatype_id'], datatype.id)
     self.assertEqual(dt_details['entity_gid'], datatype.gid)
     self.assertFalse(dt_details['isGroup'])
     self.assertTrue(dt_details['isRelevant'])
     self.assertEqual(len(dt_details['overlay_indexes']), len(dt_details['overlay_tabs']))
Пример #19
0
 def test_set_visibility_datatype(self):
     """
     Set datatype visibility to true and false and check results are updated.
     """
     datatype = DatatypesFactory().create_datatype_with_storage()
     self.assertTrue(datatype.visible)
     self.project_c.set_visibility('datatype', datatype.gid, 'False')
     datatype = dao.get_datatype_by_gid(datatype.gid)
     self.assertFalse(datatype.visible)
     self.project_c.set_visibility('datatype', datatype.gid, 'True')
     datatype = dao.get_datatype_by_gid(datatype.gid)
     self.assertTrue(datatype.visible)
Пример #20
0
 def _long_burst_launch(self, is_range=False):
     self.burst_c.index()
     connectivity = DatatypesFactory().create_connectivity()[1]
     launch_params = copy.deepcopy(SIMULATOR_PARAMETERS)
     launch_params['connectivity'] = dao.get_datatype_by_id(connectivity.id).gid
     if not is_range:
         launch_params['simulation_length'] = '10000'
     else:
         launch_params['simulation_length'] = '[10000,10001,10002]'
         launch_params['first_range'] = 'simulation_length'
     burst_id, _ = json.loads(self.burst_c.launch_burst("new", "test_burst", **launch_params))
     return dao.get_burst_by_id(burst_id)
class CrossCoherenceViewerTest(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """
    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        ccoherence = self.datatypeFactory.create_crosscoherence(time_series)
        viewer = CrossCoherenceVisualizer()
        result = viewer.launch(ccoherence)
        expected_keys = [
            'strides', 'shape', 'mainContent', 'isAdapter', 'frequency',
            'coherence'
        ]
        for key in expected_keys:
            self.assertTrue(key in result)
Пример #22
0
 def test_set_visibility_operation(self):
     """
     Same flow of operations as per test_set_visibilty_datatype just for
     operation entity.
     """
     dt_factory = DatatypesFactory()
     operation = dt_factory.operation
     self.assertTrue(operation.visible)
     self.project_c.set_visibility('operation', operation.gid, 'False')
     operation = dao.get_operation_by_gid(operation.gid)
     self.assertFalse(operation.visible)
     self.project_c.set_visibility('operation', operation.gid, 'True')
     operation = dao.get_operation_by_gid(operation.gid)
     self.assertTrue(operation.visible)
Пример #23
0
class PSETest(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """
    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        timeseries = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = TimeSeries()
        result = viewer.launch(timeseries)
        expected_keys = [
            't0', 'shape', 'preview', 'labelsStateVar', 'labelsModes',
            'mainContent', 'labels', 'labels_json', 'figsize', 'dt'
        ]
        for key in expected_keys:
            self.assertTrue(key in result)
Пример #24
0
 def setUp(self):
     """
     Sets up the environment for testing;
     creates a `RegionsModelParametersController` and a connectivity
     """
     BaseControllersTest.init(self)
     self.region_m_p_c = RegionsModelParametersController()
     BurstController().index()
     stored_burst = cherrypy.session[b_c.KEY_BURST_CONFIG]
     _, self.connectivity = DatatypesFactory().create_connectivity()
     new_params = {}
     for key, val in SIMULATOR_PARAMETERS.iteritems():
         new_params[key] = {'value': val}
     new_params['connectivity'] = {'value': self.connectivity.gid}
     stored_burst.simulator_configuration = new_params
 def test_step_2(self):
     _, surface = DatatypesFactory().create_surface()
     self.surface_s_c.step_1_submit(1, 1)
     context = b_c.get_from_session(KEY_SURFACE_CONTEXT)
     context.equation_kwargs[SURFACE_PARAMETER] = surface.gid
     result_dict = self.surface_s_c.step_2()
     expected_keys = [
         'urlVerticesPick', 'urlVertices', 'urlTrianglesPick',
         'urlTriangles', 'urlNormalsPick', 'urlNormals', 'surfaceGID',
         'mainContent', 'loadExistentEntityUrl',
         'existentEntitiesInputList', 'definedFocalPoints'
     ]
     map(lambda x: self.assertTrue(x in result_dict), expected_keys)
     self.assertEqual(result_dict['next_step_url'],
                      '/spatial/stimulus/surface/step_2_submit')
     self.assertEqual(result_dict['mainContent'],
                      'spatial/stimulus_surface_step2_main')
     self.assertEqual(result_dict['loadExistentEntityUrl'],
                      '/spatial/stimulus/surface/load_surface_stimulus')
Пример #26
0
class PSETest(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """


    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a datatype group
        """
        self.datatypeFactory = DatatypesFactory()
        self.group = self.datatypeFactory.create_datatype_group()


    def test_launch_discrete(self):
        """
        Check that all required keys are present in output from PSE Discrete Adapter launch.
        """
        viewer = DiscretePSEAdapter()
        result = viewer.launch(self.group)

        expected_keys = ['status', 'size_metric', 'series_array', 'min_shape_size_weight', 'min_color',
                         'max_shape_size_weight', 'max_color', 'mainContent', 'labels_y', 'labels_x', 'isAdapter',
                         'has_started_ops', 'datatype_group_gid', 'datatypes_dict', 'data', 'color_metric']
        for key in expected_keys:
            self.assertTrue(key in result)
        self.assertEqual(self.group.gid, result["datatype_group_gid"])
        self.assertEqual(False, result["has_started_ops"])



    def test_launch_isocline(self):
        """
        Check that all required keys are present in output from PSE Discrete Adapter launch.
        """
        viewer = IsoclinePSEAdapter()
        result = viewer.launch(self.group)
        self.assertEqual(viewer._ui_name, result["title"])
        self.assertEqual(config.SERVER_IP, result["serverIp"])
        self.assertEqual(config.MPLH5_SERVER_PORT, result["serverPort"])
        self.assertEqual(0, len(result["figureNumbers"]))
        self.assertEqual(0, len(result["metrics"]))
 def setUp(self):
     BaseControllersTest.init(self)
     self.dt_group = DatatypesFactory().create_datatype_group()
     self.controller = ParameterExplorationController()
Пример #28
0
class RegionMappingImporterTest(TransactionalTestCase):
    """
    Unit-tests for RegionMapping importer.
    """

    TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__),
                            'original_region_mapping.txt')
    ZIP_FILE = os.path.join(os.path.dirname(demo_data.__file__),
                            'original_region_mapping.zip')
    BZ2_FILE = os.path.join(os.path.dirname(demo_data.__file__),
                            'original_region_mapping.bz2')

    # Wrong data
    WRONG_FILE_1 = os.path.join(os.path.dirname(test_data.__file__),
                                'region_mapping_wrong_1.txt')
    WRONG_FILE_2 = os.path.join(os.path.dirname(test_data.__file__),
                                'region_mapping_wrong_2.txt')
    WRONG_FILE_3 = os.path.join(os.path.dirname(test_data.__file__),
                                'region_mapping_wrong_3.txt')

    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = self._get_entity(Connectivity())
        self.surface = self._get_entity(CorticalSurface())

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def _get_entity(self, expected_data, filters=None):
        data_types = FlowService().get_available_datatypes(
            self.test_project.id,
            expected_data.module + "." + expected_data.type, filters)
        self.assertEqual(
            1, len(data_types), "Project should contain only one data type:" +
            str(expected_data.type))

        entity = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(entity is not None, "Instance should not be none")

        return entity

    def _import(self, import_file_path, surface_gid, connectivity_gid):
        """
        This method is used for importing region mappings
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group(
            'tvb.adapters.uploaders.region_mapping_importer',
            'RegionMapping_Importer')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: "test",
            DataTypeMetaData.KEY_STATE: "RAW"
        }

        args = {
            'mapping_file': import_file_path,
            'surface': surface_gid,
            'connectivity': connectivity_gid
        }

        now = datetime.datetime.now()

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        # During setup we import a CFF which creates an additional RegionMapping
        # So, here we have to find our mapping (just imported)
        data_filter = FilterChain(
            fields=[FilterChain.datatype + ".create_date"],
            operations=[">"],
            values=[now])
        region_mapping = self._get_entity(RegionMapping(), data_filter)

        return region_mapping

    def test_import_no_surface_or_connectivity(self):
        """
            This method tests import of region mapping without providing a surface or connectivity
        """
        try:
            self._import(self.TXT_FILE, None, self.connectivity.gid)
            self.fail("Import should fail in case Surface is missing")
        except OperationException:
            # Expected exception
            pass

        try:
            self._import(self.TXT_FILE, self.surface.gid, None)
            self.fail("Import should fail in case Connectivity is missing")
        except OperationException:
            # Expected exception
            pass

    def test_import_from_txt(self):
        """
            This method tests import of region mapping from TXT file
        """
        self._import_from_file(self.TXT_FILE)

    def test_import_from_zip(self):
        """
            This method tests import of region mapping from TXT file
        """
        self._import_from_file(self.ZIP_FILE)

    def test_import_from_bz2(self):
        """
            This method tests import of region mapping from TXT file
        """
        self._import_from_file(self.BZ2_FILE)

    def _import_from_file(self, import_file):
        """
            This method tests import of region mapping from TXT file
        """
        region_mapping = self._import(import_file, self.surface.gid,
                                      self.connectivity.gid)

        self.assertTrue(region_mapping.surface is not None)
        self.assertTrue(region_mapping.connectivity is not None)

        array_data = region_mapping.array_data
        self.assertTrue(array_data is not None)
        self.assertEqual(16384, len(array_data))

    def test_import_wrong_file_content(self):
        """
            This method tests import of region mapping with:
            - a wrong region number
            - wrong number of regions
            - negative region number
        """
        try:
            self._import(self.WRONG_FILE_1, self.surface.gid,
                         self.connectivity.gid)
            self.fail("Import should fail in case of invalid region number")
        except OperationException:
            # Expected exception
            pass

        try:
            self._import(self.WRONG_FILE_2, self.surface.gid,
                         self.connectivity.gid)
            self.fail("Import should fail in case of invalid regions number")
        except OperationException:
            # Expected exception
            pass

        try:
            self._import(self.WRONG_FILE_3, self.surface.gid,
                         self.connectivity.gid)
            self.fail(
                "Import should fail in case of invalid region number (negative number)"
            )
        except OperationException:
            # Expected exception
            pass
Пример #29
0
class BrainViewerTest(TransactionalTestCase):
    """
    Unit-tests for BrainViewer.
    """
    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        zip_path = os.path.join(os.path.dirname(surface_dataset.__file__),
                                'face-surface.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project,
                                       zip_path, 'Face', 1)
        zip_path = os.path.join(os.path.dirname(surface_dataset.__file__),
                                'eeg_skin_surface.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project,
                                       zip_path, 'EEG Cap', 1)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)
        self.face_surface = TestFactory.get_entity(self.test_project,
                                                   FaceSurface())
        self.assertTrue(self.face_surface is not None)
        self.assertTrue(
            TestFactory.get_entity(self.test_project, EEGCap()) is not None)

    def tearDown(self):
        """
        Clean-up tests data
        """
        FilesHelper().remove_project_structure(self.test_project.name)

    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = BrainViewer()
        result = viewer.launch(time_series=time_series)
        expected_keys = [
            'urlVertices', 'urlNormals', 'urlTriangles',
            'urlMeasurePointsLabels', 'title', 'time_series', 'shelfObject',
            'pageSize', 'labelsStateVar', 'nrOfPages', 'labelsModes',
            'minActivityLabels', 'minActivity', 'measure_points',
            'maxActivity', 'isOneToOneMapping', 'isAdapter', 'extended_view',
            'base_activity_url', 'alphas_indices'
        ]
        for key in expected_keys:
            self.assertTrue(key in result and result[key] is not None)
        self.assertFalse(result['extended_view'])

    def test_get_required_memory(self):
        """
        Brainviewer should know required memory so expect positive number and not -1.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        self.assertTrue(
            BrainViewer().get_required_memory_size(time_series) > 0)

    def test_generate_preview(self):
        """
        Check that all required keys are present in preview generate by BrainViewer.
        """
        time_series = self.datatypeFactory.create_timeseries(self.connectivity)
        viewer = BrainViewer()
        result = viewer.generate_preview(time_series, (500, 200))
        expected_keys = [
            'urlVertices', 'urlNormals', 'urlTriangles', 'pageSize',
            'nrOfPages', 'minActivityLabels', 'minActivity', 'maxActivity',
            'isOneToOneMapping', 'isAdapter', 'base_activity_url',
            'alphas_indices'
        ]
        for key in expected_keys:
            self.assertTrue(key in result and result[key] is not None)

    def test_launch_eeg(self):
        zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__),
                                'EEG_unit_vectors_BrainProducts_62.txt.bz2')

        TestFactory.import_sensors(self.test_user, self.test_project, zip_path,
                                   'EEG Sensors')
        sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
        time_series = self.datatypeFactory.create_timeseries(
            self.connectivity, 'EEG', sensors)
        time_series.configure()
        viewer = BrainEEG()
        result = viewer.launch(time_series)
        expected_keys = [
            'urlVertices', 'urlNormals', 'urlTriangles',
            'urlMeasurePointsLabels', 'title', 'time_series', 'shelfObject',
            'pageSize', 'labelsStateVar', 'nrOfPages', 'labelsModes',
            'minActivityLabels', 'minActivity', 'measure_points',
            'maxActivity', 'isOneToOneMapping', 'isAdapter', 'extended_view',
            'base_activity_url', 'alphas_indices'
        ]
        for key in expected_keys:
            self.assertTrue(key in result and result[key] is not None)
        self.assertTrue(result['extended_view'])
Пример #30
0
class TVBImporterTest(TransactionalTestCase):
    """
    Unit-tests for TVB importer.
    """
    TVB_EXPORTER = "TVBExporter"


    def setUp(self):
        export_manager = ExportManager()

        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()

        # Generate simple data type and export it to H5 file
        self.datatype = self.datatypeFactory.create_datatype_with_storage()
        _, exported_h5_file, _ = export_manager.export_data(self.datatype, self.TVB_EXPORTER, self.test_project)
        # Copy H5 file to another location since the original one / exported 
        # will be deleted with the project
        _, h5_file_name = os.path.split(exported_h5_file)
        shutil.copy(exported_h5_file, cfg.TVB_TEMP_FOLDER)
        self.h5_file_path = os.path.join(cfg.TVB_TEMP_FOLDER, h5_file_name)

        self.assertTrue(os.path.exists(self.h5_file_path), "Simple data type was not exported correct")

        # Generate data type group and export it to ZIP file
        self.datatype_group = self.datatypeFactory.create_datatype_group()
        _, self.zip_file_path, _ = export_manager.export_data(self.datatype_group, self.TVB_EXPORTER, self.test_project)
        self.assertTrue(os.path.exists(self.zip_file_path), "Data type group was not exported correct")

        FilesHelper().remove_project_structure(self.test_project.name)
        self.clean_database(delete_folders=False)

        # Recreate project, but a clean one where to import data
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()


    def _import(self, import_file_path=None):
        """
        This method is used for importing data in TVB format
        :param import_file_path: absolute path of the file to be imported
        """
        ### Retrieve Adapter instance 
        group = dao.find_group('tvb.adapters.uploaders.tvb_importer', 'TVBImporter')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {DataTypeMetaData.KEY_SUBJECT: "",
                              DataTypeMetaData.KEY_STATE: "RAW"}

        args = {'data_file': import_file_path}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)


    def test_zip_import(self):
        """
            This method tests import of TVB data in zip format (which imply multiple data types
            in the same zip file - exported from a group)
        """
        self._import(self.zip_file_path)
        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           self.datatype.module + "." + self.datatype.type)
        self.assertEqual(3, len(data_types), "3 datatypes should have been imported from group.")


    def test_h5_import(self):
        """
            This method tests import of TVB data in h5 format. Single data type / import
        """
        self._import(self.h5_file_path)

        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           self.datatype.module + "." + self.datatype.type)
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        data_type_entity = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(data_type_entity is not None, "Datatype should not be none")
        self.assertEqual(self.datatype.gid, data_type_entity.gid, "Imported datatype should have the same gid")


    def test_import_invalid_file(self):
        """
        This method tests import of a file which does not exists or does not
        have a supported format.
        """
        try:
            self._import("invalid_path")
            self.fail("System should throw an exception if trying to import an invalid file")
        except OperationException:
            # Expected
            pass

        # Now try to generate a file on disk with wrong format and import that
        file_path = os.path.join(cfg.TVB_TEMP_FOLDER, "dummy_file.txt")
        with open(file_path, "w") as f:
            f.write("dummy text")

        try:
            self._import(file_path)
            self.fail("System should throw an exception if trying to import a file with wrong format")
        except OperationException:
            # Expected
            pass