def test_happy_flow_import(self):
        """
        Test that importing a CFF generates at least one DataType in DB.
        """
        dt_count_before = TestFactory.get_entity_count(self.test_project,
                                                       Connectivity())
        group = dao.find_group(
            'tvb.adapters.uploaders.zip_connectivity_importer',
            'ZIPConnectivityImporter')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
            DataTypeMetaData.KEY_STATE: "RAW"
        }
        zip_path = os.path.join(
            os.path.abspath(os.path.dirname(dataset.__file__)),
            'connectivity_regions_96.zip')
        args = {'uploaded': zip_path}

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)
        dt_count_after = TestFactory.get_entity_count(self.test_project,
                                                      Connectivity())
        self.assertTrue(dt_count_after == dt_count_before + 1)
 def _burst_create_connectivity(self):
     """
     Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator).
     TODO: This is duplicate code from burstservice_test. Should go into the 'generic' DataType factory
     once that is done.
     """
     meta = {
         DataTypeMetaData.KEY_SUBJECT: "John Doe",
         DataTypeMetaData.KEY_STATE: "RAW_DATA"
     }
     algorithm, algo_group = FlowService(
     ).get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
     self.operation = model.Operation(self.test_user.id,
                                      self.test_project.id,
                                      algo_group.id,
                                      json.dumps(''),
                                      meta=json.dumps(meta),
                                      status=model.STATUS_STARTED)
     self.operation = dao.store_entity(self.operation)
     storage_path = FilesHelper().get_project_folder(
         self.test_project, str(self.operation.id))
     connectivity = Connectivity(storage_path=storage_path)
     connectivity.weights = numpy.ones((74, 74))
     connectivity.centres = numpy.ones((74, 3))
     adapter_instance = StoreAdapter([connectivity])
     OperationService().initiate_prelaunch(self.operation, adapter_instance,
                                           {})
     return connectivity
Example #3
0
    def test_happy_flow_import(self):
        """
        Test that importing a CFF generates at least one DataType in DB.
        """
        TestConnectivityZip.import_test_connectivity96(self.test_user,
                                                       self.test_project,
                                                       subject=TEST_SUBJECT_A)

        field = FilterChain.datatype + '.subject'
        filters = FilterChain('', [field], [TEST_SUBJECT_A], ['=='])
        reference_connectivity = TestFactory.get_entity(self.test_project, Connectivity(), filters)

        dt_count_before = TestFactory.get_entity_count(self.test_project, Connectivity())

        self._import_csv_test_connectivity(reference_connectivity.gid, TEST_SUBJECT_B)

        dt_count_after = TestFactory.get_entity_count(self.test_project, Connectivity())
        assert dt_count_before + 1 == dt_count_after

        filters = FilterChain('', [field], [TEST_SUBJECT_B], ['like'])
        imported_connectivity = TestFactory.get_entity(self.test_project, Connectivity(), filters)

        # check relationship between the imported connectivity and the reference
        assert (reference_connectivity.centres == imported_connectivity.centres).all()
        assert (reference_connectivity.orientations == imported_connectivity.orientations).all()

        assert reference_connectivity.number_of_regions == imported_connectivity.number_of_regions
        assert (reference_connectivity.region_labels == imported_connectivity.region_labels).all()

        assert not (reference_connectivity.weights == imported_connectivity.weights).all()
        assert not (reference_connectivity.tract_lengths == imported_connectivity.tract_lengths).all()
Example #4
0
    def _burst_create_connectivity(self):
        """
        Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator).
        """
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "RAW_DATA"
        }

        self.operation = model.Operation(self.test_user.id,
                                         self.test_project.id,
                                         self.sim_algorithm.id,
                                         json.dumps(''),
                                         meta=json.dumps(meta),
                                         status=model.STATUS_STARTED)
        self.operation = dao.store_entity(self.operation)
        storage_path = FilesHelper().get_project_folder(
            self.test_project, str(self.operation.id))
        connectivity = Connectivity(storage_path=storage_path)
        connectivity.weights = numpy.ones((74, 74))
        connectivity.centres = numpy.ones((74, 3))
        adapter_instance = StoreAdapter([connectivity])
        self.operation_service.initiate_prelaunch(self.operation,
                                                  adapter_instance, {})
        return connectivity
    def launch(self, weights, weights_delimiter, tracts, tracts_delimiter, input_data):
        """
        Execute import operations: process the weights and tracts csv files, then use
        the reference connectivity passed as input_data for the rest of the attributes.

        :param weights: csv file containing the weights measures
        :param tracts:  csv file containing the tracts measures
        :param input_data: a reference connectivity with the additional attributes

        :raises LaunchException: when the number of nodes in CSV files doesn't match the one in the connectivity
        """
        weights_matrix = self._read_csv_file(weights, weights_delimiter)
        tract_matrix = self._read_csv_file(tracts, tracts_delimiter)

        FilesHelper.remove_files([weights, tracts])

        if weights_matrix.shape[0] != input_data.number_of_regions:
            raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference "
                                  "has only %s nodes." % (weights_matrix.shape[0], input_data.number_of_regions))
        result = Connectivity()
        result.storage_path = self.storage_path
        result.centres = input_data.centres
        result.region_labels = input_data.region_labels
        result.weights = weights_matrix
        result.tract_lengths = tract_matrix
        result.orientations = input_data.orientations
        result.areas = input_data.areas
        result.cortical = input_data.cortical
        result.hemispheres = input_data.hemispheres
        return result
Example #6
0
    def parse(self, network):
        """
        Populate Connectivity DataType from NetworkX object.
        Tested with results from Connectome Mapper Toolkit.

        :param network: NetworkX graph
        :return: Connectivity object
        """
        graph_size = len(network.nodes())

        weights_matrix = numpy.zeros((graph_size, graph_size))
        tract_matrix = numpy.zeros((graph_size, graph_size))
        labels_vector, positions, cortical, hemisphere = [], [], [], []

        try:
            for node in range(1, graph_size + 1):
                node_data = network.nodes[node]

                pos = self._find_value(node_data, self.KEY_NODE_COORDINATES)
                positions.append(list(pos))

                label = self._find_value(node_data, self.KEY_NODE_LABEL)
                labels_vector.append(str(label))

                if self.REGION_CORTICAL == self._find_value(
                        node_data, self.KEY_NODE_REGION):
                    cortical.append(True)
                else:
                    cortical.append(False)

                if self.HEMISPHERE_RIGHT == self._find_value(
                        node_data, self.KEY_NODE_HEMISPHERE):
                    hemisphere.append(True)
                else:
                    hemisphere.append(False)

            # Iterate over edges:
            for start, end in network.edges():
                weights_matrix[start - 1][end - 1] = self._find_value(
                    network.adj[start][end], self.KEY_EDGE_WEIGHT)
                tract_matrix[start - 1][end - 1] = self._find_value(
                    network.adj[start][end], self.KEY_EDGE_TRACT)

            result = Connectivity()
            result.region_labels = numpy.array(labels_vector)
            result.centres = numpy.array(positions)
            # result.set_metadata({'description': 'Array Columns: labels, X, Y, Z'}, 'centres')
            result.hemispheres = numpy.array(hemisphere)
            result.cortical = numpy.array(cortical)
            result.weights = weights_matrix
            result.tract_lengths = tract_matrix
            result.configure()
            return result

        except KeyError as err:
            self.logger.exception("Could not parse Connectivity")
            raise ParseException(err)
Example #7
0
    def configure(self,
                  dt=2**-3,
                  model=models.Generic2dOscillator,
                  speed=4.0,
                  coupling_strength=0.00042,
                  method="HeunDeterministic",
                  surface_sim=False,
                  default_connectivity=True):
        """
        Create an instance of the Simulator class, by default use the
        generic plane oscillator local dynamic model and the deterministic 
        version of Heun's method for the numerical integration.
        
        """
        self.method = method

        if default_connectivity:
            white_matter = Connectivity(load_default=True)
            region_mapping = RegionMapping.from_file(
                source_file="regionMapping_16k_76.txt")
        else:
            white_matter = Connectivity.from_file(
                source_file="connectivity_192.zip")
            region_mapping = RegionMapping.from_file(
                source_file="regionMapping_16k_192.txt")

        white_matter_coupling = coupling.Linear(a=coupling_strength)
        white_matter.speed = speed

        dynamics = model()

        if method[-10:] == "Stochastic":
            hisss = noise.Additive(nsig=numpy.array([2**-11]))
            integrator = eval("integrators." + method + "(dt=dt, noise=hisss)")
        else:
            integrator = eval("integrators." + method + "(dt=dt)")

        if surface_sim:
            local_coupling_strength = numpy.array([2**-10])
            default_cortex = Cortex(load_default=True,
                                    region_mapping_data=region_mapping)
            default_cortex.coupling_strength = local_coupling_strength
            default_cortex.local_connectivity = LocalConnectivity(
                load_default=default_connectivity, surface=default_cortex)
        else:
            default_cortex = None

        # Order of monitors determines order of returned values.
        self.sim = simulator.Simulator(model=dynamics,
                                       connectivity=white_matter,
                                       coupling=white_matter_coupling,
                                       integrator=integrator,
                                       monitors=self.monitors,
                                       surface=default_cortex)
        self.sim.configure()
 def create_connectivity(self):
     """
     Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator).
     """
     operation, algo_id, storage_path = self.__create_operation()
     connectivity = Connectivity(storage_path=storage_path)
     connectivity.weights = numpy.ones((74, 74))
     connectivity.centres = numpy.ones((74, 3))
     adapter_instance = StoreAdapter([connectivity])
     OperationService().initiate_prelaunch(operation, adapter_instance, {})
     return algo_id, connectivity
Example #9
0
 def create_connectivity(self, nodes=74):
     """
     Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator).
     """
     operation, algo_id, storage_path = self.__create_operation()
     connectivity = Connectivity(storage_path=storage_path)
     connectivity.weights = numpy.ones((nodes, nodes))
     connectivity.centres = numpy.ones((nodes, 3))
     adapter_instance = StoreAdapter([connectivity])
     OperationService().initiate_prelaunch(operation, adapter_instance, {})
     return algo_id, connectivity
Example #10
0
    def parse(self, network):
        """
        Populate Connectivity DataType from NetworkX object.
        Tested with results from Connectome Mapper Toolkit.

        :param network: NetworkX graph
        :return: Connectivity object
        """
        graph_size = len(network.nodes())

        weights_matrix = numpy.zeros((graph_size, graph_size))
        tract_matrix = numpy.zeros((graph_size, graph_size))
        labels_vector, positions, cortical, hemisphere = [], [], [], []

        try:
            for node in network.nodes():
                node_data = network.node[node]

                pos = self._find_value(node_data, self.KEY_NODE_COORDINATES)
                positions.append(list(pos))

                label = self._find_value(node_data, self.KEY_NODE_LABEL)
                labels_vector.append(str(label))

                if self.REGION_CORTICAL == self._find_value(node_data, self.KEY_NODE_REGION):
                    cortical.append(1)
                else:
                    cortical.append(0)

                if self.HEMISPHERE_RIGHT == self._find_value(node_data, self.KEY_NODE_HEMISPHERE):
                    hemisphere.append(True)
                else:
                    hemisphere.append(False)

            # Iterate over edges:
            for start, end in network.edges():
                weights_matrix[start - 1][end - 1] = self._find_value(network.adj[start][end], self.KEY_EDGE_WEIGHT)
                tract_matrix[start - 1][end - 1] = self._find_value(network.adj[start][end], self.KEY_EDGE_TRACT)

            result = Connectivity()
            result.storage_path = self.storage_path
            result.region_labels = labels_vector
            result.centres = positions
            result.set_metadata({'description': 'Array Columns: labels, X, Y, Z'}, 'centres')
            result.hemispheres = hemisphere
            result.cortical = cortical
            result.weights = weights_matrix
            result.tract_lengths = tract_matrix
            return result

        except KeyError, err:
            self.logger.exception("Could not parse Connectivity")
            raise ParseException(err)
    def _create_connectivity(self, nodes_number):
        """
        Create a connectivity entity and return its GID
        """
        storage_path = FilesHelper().get_project_folder(self.test_project, str(self.operation.id))
        connectivity = Connectivity(storage_path=storage_path)
        connectivity.weights = numpy.ones((nodes_number, nodes_number))
        connectivity.centres = numpy.ones((nodes_number, 3))
        adapter_instance = StoreAdapter([connectivity])
        OperationService().initiate_prelaunch(self.operation, adapter_instance, {})

        return dao.get_datatype_by_id(connectivity.id).gid
 def _create_connectivity(self, nodes_number):
     """
     Create a connectivity entity and return its GID
     """
     storage_path = FilesHelper().get_project_folder(self.test_project, str(self.operation.id))
     connectivity = Connectivity(storage_path=storage_path)
     connectivity.weights = numpy.ones((nodes_number, nodes_number))
     connectivity.centres = numpy.ones((nodes_number, 3))
     adapter_instance = StoreAdapter([connectivity])
     OperationService().initiate_prelaunch(self.operation, adapter_instance, {})
     
     return dao.get_datatype_by_id(connectivity.id).gid
Example #13
0
    def test_happy_flow_import(self):
        """
        Test that importing a CFF generates at least one DataType in DB.
        """
        dt_count_before = TestFactory.get_entity_count(self.test_project,
                                                       Connectivity())

        ConnectivityZipTest.import_test_connectivity96(self.test_user,
                                                       self.test_project)

        dt_count_after = TestFactory.get_entity_count(self.test_project,
                                                      Connectivity())
        self.assertEqual(dt_count_before + 1, dt_count_after)
    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        zip_path = os.path.join(os.path.dirname(surface_dataset.__file__),
                                'face-surface.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project,
                                       zip_path, 'Face', 1)
        zip_path = os.path.join(os.path.dirname(surface_dataset.__file__),
                                'eeg_skin_surface.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project,
                                       zip_path, 'EEG Cap', 1)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)
        self.face_surface = TestFactory.get_entity(self.test_project,
                                                   FaceSurface())
        self.assertTrue(self.face_surface is not None)
        self.assertTrue(
            TestFactory.get_entity(self.test_project, EEGCap()) is not None)
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity, a cortical surface and a face surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        zip_path = os.path.join(os.path.dirname(surface_dataset.__file__),
                                'face-surface.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project,
                                       zip_path, 'Face', 1)
        zip_path = os.path.join(os.path.dirname(surface_dataset.__file__),
                                'eeg_skin_surface.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project,
                                       zip_path, 'EEG Cap', 1)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)
        self.face_surface = TestFactory.get_entity(self.test_project,
                                                   FaceSurface())
        self.assertTrue(self.face_surface is not None)
        self.assertTrue(
            TestFactory.get_entity(self.test_project, EEGCap()) is not None)
Example #16
0
    def build_simulator(self, n=4):

        self.conn = numpy.zeros((n, n))  # , numpy.int32)
        for i in range(self.conn.shape[0] - 1):
            self.conn[i, i + 1] = 1

        self.dist = numpy.r_[:n * n].reshape((n, n))
        self.dist = numpy.array(self.dist, dtype=float)

        self.sim = Simulator(
            conduction_speed=1.0,
            coupling=IdCoupling(),
            surface=None,
            stimulus=None,
            integrator=Identity(dt=1.0),
            initial_conditions=numpy.ones((n * n, 1, n, 1)),
            simulation_length=10.0,
            connectivity=Connectivity(region_labels=numpy.array(['']),
                                      weights=self.conn,
                                      tract_lengths=self.dist,
                                      speed=numpy.array([1.0]),
                                      centres=numpy.array([0.0])),
            model=Sum(),
            monitors=(Raw(), ),
        )
        self.sim.configure()
    def _burst_create_connectivity(self):
        """
        Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator).
        """
        meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW_DATA"}

        self.operation = model.Operation(self.test_user.id, self.test_project.id, self.sim_algorithm.id,
                                         json.dumps(''), meta=json.dumps(meta), status=model.STATUS_STARTED)
        self.operation = dao.store_entity(self.operation)
        storage_path = FilesHelper().get_project_folder(self.test_project, str(self.operation.id))
        connectivity = Connectivity(storage_path=storage_path)
        connectivity.weights = numpy.ones((74, 74))
        connectivity.centres = numpy.ones((74, 3))
        adapter_instance = StoreAdapter([connectivity])
        self.operation_service.initiate_prelaunch(self.operation, adapter_instance, {})
        return connectivity
Example #18
0
 def _create_sim(self, integrator=None, inhom_mmpr=False, delays=False,
         run_sim=True):
     mpr = MontbrioPazoRoxin()
     conn = Connectivity.from_file()
     if inhom_mmpr:
         dispersion = 1 + np.random.randn(conn.weights.shape[0])*0.1
         mpr = MontbrioPazoRoxin(eta=mpr.eta*dispersion)
     conn.speed = np.r_[3.0 if delays else np.inf]
     if integrator is None:
         dt = 0.01
         integrator = EulerDeterministic(dt=dt)
     else:
         dt = integrator.dt
     sim = Simulator(connectivity=conn, model=mpr, integrator=integrator, 
         monitors=[Raw()],
         simulation_length=0.1)  # 10 steps
     sim.configure()
     if not delays:
         self.assertTrue((conn.idelays == 0).all())
     buf = sim.history.buffer[...,0]
     # kernel has history in reverse order except 1st element 🤕
     rbuf = np.concatenate((buf[0:1], buf[1:][::-1]), axis=0)
     state = np.transpose(rbuf, (1, 0, 2)).astype('f')
     self.assertEqual(state.shape[0], 2)
     self.assertEqual(state.shape[2], conn.weights.shape[0])
     if isinstance(sim.integrator, IntegratorStochastic):
         sim.integrator.noise.reset_random_stream()
     if run_sim:
         (t,y), = sim.run()
         return sim, state, t, y
     else:
         return sim
Example #19
0
def test_store_load_complete_region_mapping(tmph5factory, connectivity_factory, surface_factory, region_mapping_factory):
    connectivity = connectivity_factory(2)
    surface = surface_factory(5)
    region_mapping = region_mapping_factory(surface, connectivity)

    with ConnectivityH5(tmph5factory('Connectivity_{}.h5'.format(connectivity.gid))) as conn_h5:
        conn_h5.store(connectivity)
        conn_stored = Connectivity()
        conn_h5.load_into(conn_stored)

    with SurfaceH5(tmph5factory('Surface_{}.h5'.format(surface.gid))) as surf_h5:
        surf_h5.store(surface)
        surf_stored = Surface()
        surf_h5.load_into(surf_stored)

    with RegionMappingH5(tmph5factory('RegionMapping_{}.h5'.format(region_mapping.gid))) as rm_h5:
        rm_h5.store(region_mapping)
        rm_stored = RegionMapping()
        rm_h5.load_into(rm_stored)

    # load_into will not load dependent datatypes. connectivity and surface are undefined
    with pytest.raises(TraitAttributeError):
        rm_stored.connectivity
    with pytest.raises(TraitAttributeError):
        rm_stored.surface

    rm_stored.connectivity = conn_stored
    rm_stored.surface = surf_stored
    assert rm_stored.connectivity is not None
    assert rm_stored.surface is not None
 def from_file(filepath, **kwargs):
     result = TVBConnectivity.from_file(filepath)
     if isinstance(result, TVBConnectivity):
         raise NotImplementedError
         # return Connectivity.from_tvb_instance(result, **kwargs)
     else:
         result.centres = result.centers
         return Connectivity.from_instance(result, **kwargs)
Example #21
0
 def from_tvb_file(self, filepath):
     self._tvb = TVBConnectivity.from_file(filepath, self._tvb)
     if len(self.normalized_weights) == 0:
         self.normalized_weights = normalize_weights(self._tvb.weights,
                                                     remove_diagonal=True,
                                                     ceil=1.0)
     self.file_path = filepath
     return self
Example #22
0
    def test_bad_reference(self):
        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        field = FilterChain.datatype + '.subject'
        filters = FilterChain('', [field], [TEST_SUBJECT_A], ['!='])
        bad_reference_connectivity = TestFactory.get_entity(self.test_project, Connectivity(), filters)

        with pytest.raises(OperationException):
            self._import_csv_test_connectivity(bad_reference_connectivity.gid, TEST_SUBJECT_A)
 def _burst_create_connectivity(self):
     """
     Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator).
     TODO: This is duplicate code from burstservice_test. Should go into the 'generic' DataType factory
     once that is done.
     """
     meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW_DATA"}
     algorithm = FlowService().get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
     self.operation = model.Operation(self.test_user.id, self.test_project.id, algorithm.id,
                                      json.dumps(''), meta=json.dumps(meta), status=model.STATUS_STARTED)
     self.operation = dao.store_entity(self.operation)
     storage_path = FilesHelper().get_project_folder(self.test_project, str(self.operation.id))
     connectivity = Connectivity(storage_path=storage_path)
     connectivity.weights = numpy.ones((74, 74))
     connectivity.centres = numpy.ones((74, 3))
     adapter_instance = StoreAdapter([connectivity])
     OperationService().initiate_prelaunch(self.operation, adapter_instance, {})
     return connectivity
Example #24
0
    def _import_connectivity(self):
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.zip_connectivity_importer',
                                              'ZIPConnectivityImporter')

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id,
                                     uploaded=self.connectivity_path, Data_Subject='QL')

        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
Example #25
0
    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = self._get_entity(Connectivity())
        self.surface = self._get_entity(CorticalSurface())
Example #26
0
 def transactional_setup_method(self):
     zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                             'connectivity', 'connectivity_66.zip')
     self.test_user = TestFactory.create_user('Test_User')
     self.test_project = TestFactory.create_project(self.test_user,
                                                    "Test_Project")
     TestFactory.import_zip_connectivity(self.test_user, self.test_project,
                                         "John", zip_path)
     self.connectivity = TestFactory.get_entity(self.test_project,
                                                Connectivity())
Example #27
0
 def setUp(self):
     """
     Sets up the environment for running the tests;
     creates a test user, a test project, a connectivity and a surface;
     imports a CFF data-set
     """
     self.test_user = TestFactory.create_user("UserRM")
     self.test_project = TestFactory.import_default_project(self.test_user)
     self.connectivity = self._get_entity(Connectivity())
     self.surface = self._get_entity(CorticalSurface())
Example #28
0
def test_store_load_connectivity(tmph5factory, connectivity_factory):
    connectivity = connectivity_factory(2)
    conn_h5 = ConnectivityH5(tmph5factory())
    conn_h5.store(connectivity)
    conn_h5.close()

    conn_stored = Connectivity()
    with pytest.raises(TraitAttributeError):
        conn_stored.region_labels
    conn_h5.load_into(conn_stored)
    assert conn_stored.region_labels.shape[0] == 2
Example #29
0
    def test_cortexdata(self):
        dt = Cortex.from_file(
            local_connectivity_file="local_connectivity_16384.mat")
        dt.region_mapping_data.connectivity = Connectivity.from_file()
        assert isinstance(dt, Cortex)
        assert dt.region_mapping is not None

        dt.configure()
        assert dt.vertices.shape == (16384, 3)
        assert dt.vertex_normals.shape == (16384, 3)
        assert dt.triangles.shape == (32760, 3)
Example #30
0
    def test_cortex_reg_map_without_subcorticals(self):
        dt = Cortex.from_file()
        dt.region_mapping_data.connectivity = Connectivity.from_file()
        self.add_subcorticals_to_conn(dt.region_mapping_data.connectivity)
        dt.region_mapping_data.connectivity.configure()

        assert isinstance(dt, Cortex)
        assert dt.region_mapping is not None
        assert numpy.unique(
            dt.region_mapping
        ).size == dt.region_mapping_data.connectivity.number_of_regions
Example #31
0
def test_store_partial_connectivity(tmph5factory):
    partial_conn = Connectivity(
        region_labels=numpy.array(["a", "b"]),
        weights=numpy.zeros((2, 2)),
        tract_lengths=numpy.zeros((2, 2)),
        centres=numpy.zeros((2, 2)),
        number_of_regions=int(2),
        number_of_connections=int(4),
    )
    conn_h5 = ConnectivityH5(tmph5factory())
    conn_h5.store(partial_conn)
    conn_h5.close()
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        self.assertTrue(self.connectivity is not None)
    def setUp(self):
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
        self.assertTrue(self.connectivity is not None)
        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)
Example #34
0
def _prepare_connectivity():
    connectivity = Connectivity(weights=numpy.array([[0.0, 2.0, 3.0, 4.0],
                                                     [2.0, 0.0, 2.0, 3.0],
                                                     [3.0, 2.0, 0.0, 1.0],
                                                     [4.0, 3.0, 1.0, 0.0]]),
                                tract_lengths=numpy.array([[0.0, 2.0, 3.0, 4.0],
                                                           [2.0, 0.0, 2.0, 3.0],
                                                           [3.0, 2.0, 0.0, 1.0],
                                                           [4.0, 3.0, 1.0, 0.0]]),
                                region_labels=numpy.array(["a", "b", "c", "d"]),
                                centres=numpy.array([1.0, 2.0, 3.0, 4.0]),
                                areas=numpy.array([1.0, 2.0, 3.0, 4.0]))
    return connectivity
Example #35
0
    def launch(self, view_model):
        # type: (RegionStimulusCreatorModel) -> [StimuliRegionIndex]
        """
        Used for creating a `StimuliRegion` instance
        """
        stimuli_region = StimuliRegion()
        stimuli_region.connectivity = Connectivity()
        stimuli_region.connectivity.gid = view_model.connectivity
        stimuli_region.weight = view_model.weight
        stimuli_region.temporal = view_model.temporal
        self.generic_attributes.user_tag_1 = view_model.display_name

        stimuli_region_idx = self.store_complete(stimuli_region)
        return stimuli_region_idx
Example #36
0
 def build(nr_regions=4):
     return Connectivity(region_labels=numpy.array(["a"] * nr_regions),
                         weights=numpy.zeros((nr_regions, nr_regions)),
                         undirected=True,
                         tract_lengths=numpy.zeros(
                             (nr_regions, nr_regions)),
                         centres=numpy.zeros((nr_regions, nr_regions)),
                         cortical=numpy.array([True] * nr_regions),
                         hemispheres=numpy.array([True] * nr_regions),
                         orientations=numpy.zeros((nr_regions, nr_regions)),
                         areas=numpy.zeros((nr_regions * nr_regions, )),
                         number_of_regions=nr_regions,
                         number_of_connections=nr_regions * nr_regions,
                         saved_selection=[1, 2, 3])
    def launch(self, resolution, weighting, inj_f_thresh, vol_thresh):
        resolution = int(resolution)
        weighting = int(weighting)
        inj_f_thresh = float(inj_f_thresh)/100.
        vol_thresh = float(vol_thresh)

        project = dao.get_project_by_id(self.current_project_id)
        manifest_file = self.file_handler.get_allen_mouse_cache_folder(project.name)
        manifest_file = os.path.join(manifest_file, 'mouse_connectivity_manifest.json')
        cache = MouseConnectivityCache(resolution=resolution, manifest_file=manifest_file)

        # the method creates a dictionary with information about which experiments need to be downloaded
        ist2e = dictionary_builder(cache, False)

        # the method downloads experiments necessary to build the connectivity
        projmaps = download_an_construct_matrix(cache, weighting, ist2e, False)

        # the method cleans the file projmaps in 4 steps
        projmaps = pms_cleaner(projmaps)

        # download from the AllenSDK the annotation volume, the template volume
        vol, annot_info = cache.get_annotation_volume()
        template, template_info = cache.get_template_volume()

        # rotate template in the TVB 3D reference:
        template = rotate_reference(template)

        # grab the StructureTree instance
        structure_tree = cache.get_structure_tree()

        # the method includes in the parcellation only brain regions whose volume is greater than vol_thresh
        projmaps = areas_volume_threshold(cache, projmaps, vol_thresh, resolution)
        
        # the method exclude from the experimental dataset
        # those exps where the injected fraction of pixel in the injection site is lower than than the inj_f_thr 
        projmaps = infected_threshold(cache, projmaps, inj_f_thresh)

        # the method creates file order and keyword that will be the link between the SC order and the
        # id key in the Allen database
        [order, key_ord] = create_file_order(projmaps, structure_tree)

        # the method builds the Structural Connectivity (SC) matrix
        structural_conn = construct_structural_conn(projmaps, order, key_ord)

        # the method returns the coordinate of the centres and the name of the brain areas in the selected parcellation
        [centres, names] = construct_centres(cache, order, key_ord)

        # the method returns the tract lengths between the brain areas in the selected parcellation
        tract_lengths = construct_tract_lengths(centres)

        # the method associated the parent and the grandparents to the child in the selected parcellation with
        # the biggest volume
        [unique_parents, unique_grandparents] = parents_and_grandparents_finder(cache, order, key_ord, structure_tree)

        # the method returns a volume indexed between 0 and N-1, with N=tot brain areas in the parcellation.
        # -1=background and areas that are not in the parcellation
        vol_parcel = mouse_brain_visualizer(vol, order, key_ord, unique_parents, unique_grandparents,
                                            structure_tree, projmaps)

        # results: Connectivity, Volume & RegionVolumeMapping
        # Connectivity
        result_connectivity = Connectivity(storage_path=self.storage_path)
        result_connectivity.centres = centres
        result_connectivity.region_labels = names
        result_connectivity.weights = structural_conn
        result_connectivity.tract_lengths = tract_lengths
        # Volume
        result_volume = Volume(storage_path=self.storage_path)
        result_volume.origin = [[0.0, 0.0, 0.0]]
        result_volume.voxel_size = [resolution, resolution, resolution]
        # result_volume.voxel_unit= micron
        # Region Volume Mapping
        result_rvm = RegionVolumeMapping(storage_path=self.storage_path)
        result_rvm.volume = result_volume
        result_rvm.array_data = vol_parcel
        result_rvm.connectivity = result_connectivity
        result_rvm.title = "Volume mouse brain "
        result_rvm.dimensions_labels = ["X", "Y", "Z"]
        # Volume template
        result_template = StructuralMRI(storage_path=self.storage_path)
        result_template.array_data = template
        result_template.weighting = 'T1'
        result_template.volume = result_volume
        return [result_connectivity, result_volume, result_rvm, result_template]
    def launch(self, uploaded, rotate_x=0, rotate_y=0, rotate_z=0):
        """
        Execute import operations: unpack ZIP and build Connectivity object as result.

        :param uploaded: an archive containing the Connectivity data to be imported

        :returns: `Connectivity`

        :raises LaunchException: when `uploaded` is empty or nonexistent
        :raises Exception: when
                    * weights or tracts matrix is invalid (negative values, wrong shape)
                    * any of the vector orientation, areas, cortical or hemisphere is \
                      different from the expected number of nodes
        """
        if uploaded is None:
            raise LaunchException("Please select ZIP file which contains data to import")
        
        files = FilesHelper().unpack_zip(uploaded, self.storage_path)
        
        weights_matrix = None
        centres = None
        labels_vector = None
        tract_matrix = None
        orientation = None
        areas = None
        cortical_vector = None
        hemisphere_vector = None
        
        for file_name in files:
            if file_name.lower().find(self.WEIGHT_TOKEN) >= 0:
                weights_matrix = read_list_data(file_name)
                continue
            if file_name.lower().find(self.POSITION_TOKEN) >= 0:
                centres = read_list_data(file_name, skiprows=1, usecols=[1, 2, 3])
                labels_vector = read_list_data(file_name, dtype=numpy.str, skiprows=1, usecols=[0])
                continue
            if file_name.lower().find(self.TRACT_TOKEN) >= 0:
                tract_matrix = read_list_data(file_name)
                continue
            if file_name.lower().find(self.ORIENTATION_TOKEN) >= 0:
                orientation = read_list_data(file_name)
                continue
            if file_name.lower().find(self.AREA_TOKEN) >= 0:
                areas = read_list_data(file_name)
                continue
            if file_name.lower().find(self.CORTICAL_INFO) >= 0:
                cortical_vector = read_list_data(file_name, dtype=numpy.bool)
                continue
            if file_name.lower().find(self.HEMISPHERE_INFO) >= 0:
                hemisphere_vector = read_list_data(file_name, dtype=numpy.bool)
                continue
        ### Clean remaining text-files.
        FilesHelper.remove_files(files, True)
        
        result = Connectivity()
        result.storage_path = self.storage_path
        result.nose_correction = [rotate_x, rotate_y, rotate_z]
        
        ### Fill positions
        if centres is None:
            raise Exception("Positions for Connectivity Regions are required! "
                            "We expect a file *position* inside the uploaded ZIP.")
        expected_number_of_nodes = len(centres)
        if expected_number_of_nodes < 2:
            raise Exception("A connectivity with at least 2 nodes is expected")
        result.centres = centres
        if labels_vector is not None:
            result.region_labels = labels_vector
            
        ### Fill and check weights
        if weights_matrix is not None:
            if numpy.any([x < 0 for x in weights_matrix.flatten()]):
                raise Exception("Negative values are not accepted in weights matrix! "
                                "Please check your file, and use values >= 0")
            if weights_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes):
                raise Exception("Unexpected shape for weights matrix! "
                                "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes))
            result.weights = weights_matrix
            
        ### Fill and check tracts    
        if tract_matrix is not None:
            if numpy.any([x < 0 for x in tract_matrix.flatten()]):
                raise Exception("Negative values are not accepted in tracts matrix! "
                                "Please check your file, and use values >= 0")
            if tract_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes):
                raise Exception("Unexpected shape for tracts matrix! "
                                "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes))
            result.tract_lengths = tract_matrix
        
        
        if orientation is not None:
            if len(orientation) != expected_number_of_nodes:
                raise Exception("Invalid size for vector orientation. "
                                "Expected the same as region-centers number %d" % expected_number_of_nodes)
            result.orientations = orientation
            
        if areas is not None:
            if len(areas) != expected_number_of_nodes:
                raise Exception("Invalid size for vector areas. "
                                "Expected the same as region-centers number %d" % expected_number_of_nodes)
            result.areas = areas
            
        if cortical_vector is not None:
            if len(cortical_vector) != expected_number_of_nodes:
                raise Exception("Invalid size for vector cortical. "
                                "Expected the same as region-centers number %d" % expected_number_of_nodes)
            result.cortical = cortical_vector
            
        if hemisphere_vector is not None:
            if len(hemisphere_vector) != expected_number_of_nodes:
                raise Exception("Invalid size for vector hemispheres. "
                                "Expected the same as region-centers number %d" % expected_number_of_nodes)
            result.hemispheres = hemisphere_vector
        return result
def networkx2connectivity(network_obj, storage_path):
    """
    Populate Connectivity DataType from NetworkX object.
    """
    network_obj.load()
    weights_matrix, tract_matrix, labels_vector = [], [], []
    positions, areas, orientation = [], [], []
    # Read all nodes
    graph_data = network_obj.data
    graph_size = len(graph_data.nodes())
    for node in graph_data.nodes():
        positions.append([graph_data.node[node][ct.KEY_POS_X], 
                          graph_data.node[node][ct.KEY_POS_Y],
                          graph_data.node[node][ct.KEY_POS_Z]])
        labels_vector.append(graph_data.node[node][ct.KEY_POS_LABEL])
        if ct.KEY_AREA in graph_data.node[node]:
            areas.append(graph_data.node[node][ct.KEY_AREA])
        if ct.KEY_ORIENTATION_AVG in graph_data.node[node]:
            orientation.append(graph_data.node[node][ct.KEY_ORIENTATION_AVG])
        weights_matrix.append([0.0] * graph_size)
        tract_matrix.append([0.0] * graph_size)
    # Read all edges
    for edge in network_obj.data.edges():
        start = edge[0]
        end = edge[1]
        weights_matrix[start][end] = graph_data.adj[start][end][ct.KEY_WEIGHT]
        tract_matrix[start][end] = graph_data.adj[start][end][ct.KEY_TRACT]

    meta = network_obj.get_metadata_as_dict()
    
    result = Connectivity()
    result.storage_path = storage_path
    result.nose_correction = meta[ct.KEY_NOSE] if ct.KEY_NOSE in meta else None
    result.weights = weights_matrix
    result.centres = positions
    result.region_labels = labels_vector
    result.set_metadata({'description':'Array Columns: labels, X, Y, Z'},'centres')
    result.orientations = orientation
    result.areas = areas
    result.tract_lengths = tract_matrix
    return result, (meta[ct.KEY_UID] if ct.KEY_UID in meta else None)
    def launch(self, resolution, weighting, inf_vox_thresh, vol_thresh):
        resolution = int(resolution)
        weighting = int(weighting)
        inf_vox_thresh = float(inf_vox_thresh)
        vol_thresh = float(vol_thresh)

        project = dao.get_project_by_id(self.current_project_id)
        manifest_file = self.file_handler.get_allen_mouse_cache_folder(project.name)
        manifest_file = os.path.join(manifest_file, "mouse_connectivity_manifest.json")
        cache = MouseConnectivityCache(resolution=resolution, manifest_file=manifest_file)

        # the method creates a dictionary with information about which experiments need to be downloaded
        ist2e = DictionaireBuilder(cache, False)

        # the method downloads experiments necessary to build the connectivity
        projmaps = DownloadAndConstructMatrix(cache, weighting, ist2e, False)

        # the method cleans the file projmaps in 4 steps
        projmaps = pmsCleaner(projmaps)

        Vol, annot_info = cache.get_annotation_volume()
        ontology = cache.get_ontology()

        # the method includes in the parcellation only brain regions whose volume is greater than vol_thresh
        projmaps = AreasVolumeTreshold(projmaps, vol_thresh, resolution, Vol, ontology)

        # the method includes in the parcellation only brain regions where at least one injection experiment had infected more than N voxel (where N is inf_vox_thresh)
        projmaps = AreasVoxelTreshold(cache, projmaps, inf_vox_thresh, Vol, ontology)

        # the method creates file order and keyord that will be the link between the SC order and the id key in the Allen database
        [order, key_ord] = CreateFileOrder(projmaps, ontology)

        # the method builds the Structural Connectivity (SC) matrix
        SC = ConstructingSC(projmaps, order, key_ord)

        # the method returns the coordinate of the centres and the name of the brain areas in the selected parcellation
        [centres, names] = Construct_centres(ontology, order, key_ord, Vol)

        # the method returns the tract lengths between the brain areas in the selected parcellation
        tract_lengths = ConstructTractLengths(centres)

        # the method associated the parent and the grandparents to the child in the selected parcellation with the biggest volume
        [unique_parents, unique_grandparents] = ParentsAndGrandParentsFinder(order, key_ord, Vol, ontology)

        # the method returns a volume indexed between 0 and N-1, with N=tot brain areas in the parcellation. -1=background and areas that are not in the parcellation
        Vol_parcel = MouseBrainVisualizer(Vol, order, key_ord, unique_parents, unique_grandparents, ontology, projmaps)

        # results: Connectivity, Volume & RegionVolumeMapping
        # Connectivity
        result_connectivity = Connectivity(storage_path=self.storage_path)
        result_connectivity.centres = centres
        result_connectivity.region_labels = names
        result_connectivity.weights = SC
        result_connectivity.tract_lengths = tract_lengths
        # Volume
        result_volume = Volume(storage_path=self.storage_path)
        result_volume.origin = [[0.0, 0.0, 0.0]]
        result_volume.voxel_size = [resolution, resolution, resolution]
        # result_volume.voxel_unit= micron
        # Region Volume Mapping
        result_rvm = RegionVolumeMapping(storage_path=self.storage_path)
        result_rvm.volume = result_volume
        result_rvm.array_data = Vol_parcel
        result_rvm.connectivity = result_connectivity
        result_rvm.title = "Volume mouse brain "
        result_rvm.dimensions_labels = ["X", "Y", "Z"]
        return [result_connectivity, result_rvm, result_volume]
    def launch(self, weights, tracts, input_data):
        """
        Execute import operations: process the weights and tracts csv files, then use
        the reference connectivity passed as input_data for the rest of the attributes.

        :param weights: csv file containing the weights measures
        :param tracts:  csv file containing the tracts measures
        :param input_data: a reference connectivity with the additional attributes

        :raises LaunchException: when the number of nodes in CSV files doesn't match the one in the connectivity
        """
        dti_service = DTIPipelineService()
        dti_service._process_csv_file(weights, dti_service.WEIGHTS_FILE)
        dti_service._process_csv_file(tracts, dti_service.TRACT_FILE)
        weights_matrix = read_list_data(os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE))
        tract_matrix = read_list_data(os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE))
        FilesHelper.remove_files([os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE), 
                                  os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE)])

        if weights_matrix.shape[0] != input_data.orientations.shape[0]:
            raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference "
                                  "has only %s nodes." % (weights_matrix.shape[0], input_data.orientations.shape[0]))
        result = Connectivity()
        result.storage_path = self.storage_path
        result.nose_correction = input_data.nose_correction
        result.centres = input_data.centres
        result.region_labels = input_data.region_labels
        result.weights = weights_matrix
        result.tract_lengths = tract_matrix
        result.orientations = input_data.orientations
        result.areas = input_data.areas
        result.cortical = input_data.cortical
        result.hemispheres = input_data.hemispheres
        return result