Пример #1
0
    def launch(self, weights, weights_delimiter, tracts, tracts_delimiter, input_data):
        """
        Execute import operations: process the weights and tracts csv files, then use
        the reference connectivity passed as input_data for the rest of the attributes.

        :param weights: csv file containing the weights measures
        :param tracts:  csv file containing the tracts measures
        :param input_data: a reference connectivity with the additional attributes

        :raises LaunchException: when the number of nodes in CSV files doesn't match the one in the connectivity
        """
        weights_matrix = self._read_csv_file(weights, weights_delimiter)
        tract_matrix = self._read_csv_file(tracts, tracts_delimiter)

        FilesHelper.remove_files([weights, tracts])

        if weights_matrix.shape[0] != input_data.number_of_regions:
            raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference "
                                  "has only %s nodes." % (weights_matrix.shape[0], input_data.number_of_regions))
        result = Connectivity()
        result.storage_path = self.storage_path
        result.centres = input_data.centres
        result.region_labels = input_data.region_labels
        result.weights = weights_matrix
        result.tract_lengths = tract_matrix
        result.orientations = input_data.orientations
        result.areas = input_data.areas
        result.cortical = input_data.cortical
        result.hemispheres = input_data.hemispheres
        return result
Пример #2
0
    def launch(self, view_model):
        # type: (CSVConnectivityImporterModel) -> ConnectivityIndex
        """
        Execute import operations: process the weights and tracts csv files, then use
        the reference connectivity passed as input_data for the rest of the attributes.

        :raises LaunchException: when the number of nodes in CSV files doesn't match the one in the connectivity
        """
        weights_matrix = self._read_csv_file(view_model.weights, view_model.weights_delimiter)
        tract_matrix = self._read_csv_file(view_model.tracts, view_model.tracts_delimiter)
        self.storage_interface.remove_files([view_model.weights, view_model.tracts])
        conn_index = self.load_entity_by_gid(view_model.input_data)
        if weights_matrix.shape[0] != conn_index.number_of_regions:
            raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference "
                                  "has only %s nodes." % (weights_matrix.shape[0], conn_index.number_of_regions))

        input_connectivity = h5.load_from_index(conn_index)

        result = Connectivity()
        result.centres = input_connectivity.centres
        result.region_labels = input_connectivity.region_labels
        result.weights = weights_matrix
        result.tract_lengths = tract_matrix
        result.orientations = input_connectivity.orientations
        result.areas = input_connectivity.areas
        result.cortical = input_connectivity.cortical
        result.hemispheres = input_connectivity.hemispheres
        result.configure()

        return h5.store_complete(result, self.storage_path)
Пример #3
0
    def _branch_connectivity(self, original_conn, new_weights, interest_areas,
                             new_tracts=None):
        # type: (Connectivity, numpy.array, numpy.array, numpy.array) -> Connectivity
        """
        Generate new Connectivity based on a previous one, by changing weights (e.g. simulate lesion).
        The returned connectivity has the same number of nodes. The edges of unselected nodes will have weight 0.
        :param original_conn: Original Connectivity, to copy from
        :param new_weights: weights matrix for the new connectivity
        :param interest_areas: ndarray of the selected node id's
        :param new_tracts: tracts matrix for the new connectivity
        """

        new_weights, interest_areas, new_tracts = self._reorder_arrays(original_conn, new_weights,
                                                                       interest_areas, new_tracts)
        if new_tracts is None:
            new_tracts = original_conn.tract_lengths

        for i in range(len(original_conn.weights)):
            for j in range(len(original_conn.weights)):
                if i not in interest_areas or j not in interest_areas:
                    new_weights[i][j] = 0

        final_conn = Connectivity()
        final_conn.parent_connectivity = original_conn.gid.hex
        final_conn.saved_selection = interest_areas.tolist()
        final_conn.weights = new_weights
        final_conn.centres = original_conn.centres
        final_conn.region_labels = original_conn.region_labels
        final_conn.orientations = original_conn.orientations
        final_conn.cortical = original_conn.cortical
        final_conn.hemispheres = original_conn.hemispheres
        final_conn.areas = original_conn.areas
        final_conn.tract_lengths = new_tracts
        final_conn.configure()
        return final_conn
    def launch(self, weights, tracts, input_data):
        """
        Execute import operations: process the weights and tracts csv files, then use
        the reference connectivity passed as input_data for the rest of the attributes.

        :param weights: csv file containing the weights measures
        :param tracts:  csv file containing the tracts measures
        :param input_data: a reference connectivity with the additional attributes

        :raises LaunchException: when the number of nodes in CSV files doesn't match the one in the connectivity
        """
        dti_service = DTIPipelineService()
        dti_service._process_csv_file(weights, dti_service.WEIGHTS_FILE)
        dti_service._process_csv_file(tracts, dti_service.TRACT_FILE)
        weights_matrix = read_list_data(os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE))
        tract_matrix = read_list_data(os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE))
        FilesHelper.remove_files([os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE), 
                                  os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE)])

        if weights_matrix.shape[0] != input_data.orientations.shape[0]:
            raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference "
                                  "has only %s nodes." % (weights_matrix.shape[0], input_data.orientations.shape[0]))
        result = Connectivity()
        result.storage_path = self.storage_path
        result.nose_correction = input_data.nose_correction
        result.centres = input_data.centres
        result.region_labels = input_data.region_labels
        result.weights = weights_matrix
        result.tract_lengths = tract_matrix
        result.orientations = input_data.orientations
        result.areas = input_data.areas
        result.cortical = input_data.cortical
        result.hemispheres = input_data.hemispheres
        return result
Пример #5
0
 def launch(self, weights, tracts, input_data):
     """
     Execute import operations: process the weights and tracts csv files, then use
     the reference connectivity passed as input_data for the rest of the attributes.
     """
     dti_service = dtipipelineservice.DTIPipelineService()
     dti_service._process_csv_file(weights, dti_service.WEIGHTS_FILE)
     dti_service._process_csv_file(tracts, dti_service.TRACT_FILE)
     weights_matrix = read_list_data(os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE))
     tract_matrix = read_list_data(os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE))
     FilesHelper.remove_files([os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE), 
                               os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE)])
     if weights_matrix.shape[0] != input_data.orientations.shape[0]:
         raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference has only %s nodes."%(
                                 weights_matrix.shape[0], input_data.orientations.shape[0]))
     result = Connectivity()
     result.storage_path = self.storage_path
     result.nose_correction = input_data.nose_correction
     result.centres = input_data.centres
     result.region_labels = input_data.region_labels
     result.weights = weights_matrix
     result.tract_lengths = tract_matrix
     result.orientations = input_data.orientations
     result.areas = input_data.areas
     result.cortical = input_data.cortical
     result.hemispheres = input_data.hemispheres
     return result
Пример #6
0
    def parse(self, network):
        """
        Populate Connectivity DataType from NetworkX object.
        Tested with results from Connectome Mapper Toolkit.

        :param network: NetworkX graph
        :return: Connectivity object
        """
        graph_size = len(network.nodes())

        weights_matrix = numpy.zeros((graph_size, graph_size))
        tract_matrix = numpy.zeros((graph_size, graph_size))
        labels_vector, positions, cortical, hemisphere = [], [], [], []

        try:
            for node in xrange(1, graph_size + 1):
                node_data = network.nodes[node]

                pos = self._find_value(node_data, self.KEY_NODE_COORDINATES)
                positions.append(list(pos))

                label = self._find_value(node_data, self.KEY_NODE_LABEL)
                labels_vector.append(str(label))

                if self.REGION_CORTICAL == self._find_value(
                        node_data, self.KEY_NODE_REGION):
                    cortical.append(1)
                else:
                    cortical.append(0)

                if self.HEMISPHERE_RIGHT == self._find_value(
                        node_data, self.KEY_NODE_HEMISPHERE):
                    hemisphere.append(True)
                else:
                    hemisphere.append(False)

            # Iterate over edges:
            for start, end in network.edges():
                weights_matrix[start - 1][end - 1] = self._find_value(
                    network.adj[start][end], self.KEY_EDGE_WEIGHT)
                tract_matrix[start - 1][end - 1] = self._find_value(
                    network.adj[start][end], self.KEY_EDGE_TRACT)

            result = Connectivity()
            result.storage_path = self.storage_path
            result.region_labels = labels_vector
            result.centres = positions
            result.set_metadata(
                {'description': 'Array Columns: labels, X, Y, Z'}, 'centres')
            result.hemispheres = hemisphere
            result.cortical = cortical
            result.weights = weights_matrix
            result.tract_lengths = tract_matrix
            return result

        except KeyError as err:
            self.logger.exception("Could not parse Connectivity")
            raise ParseException(err)
Пример #7
0
    def parse(self, network):
        """
        Populate Connectivity DataType from NetworkX object.
        Tested with results from Connectome Mapper Toolkit.

        :param network: NetworkX graph
        :return: Connectivity object
        """
        graph_size = len(network.nodes())

        weights_matrix = numpy.zeros((graph_size, graph_size))
        tract_matrix = numpy.zeros((graph_size, graph_size))
        labels_vector, positions, cortical, hemisphere = [], [], [], []

        try:
            for node in network.nodes():
                node_data = network.node[node]

                pos = self._find_value(node_data, self.KEY_NODE_COORDINATES)
                positions.append(list(pos))

                label = self._find_value(node_data, self.KEY_NODE_LABEL)
                labels_vector.append(str(label))

                if self.REGION_CORTICAL == self._find_value(node_data, self.KEY_NODE_REGION):
                    cortical.append(1)
                else:
                    cortical.append(0)

                if self.HEMISPHERE_RIGHT == self._find_value(node_data, self.KEY_NODE_HEMISPHERE):
                    hemisphere.append(True)
                else:
                    hemisphere.append(False)

            # Iterate over edges:
            for start, end in network.edges():
                weights_matrix[start - 1][end - 1] = self._find_value(network.adj[start][end], self.KEY_EDGE_WEIGHT)
                tract_matrix[start - 1][end - 1] = self._find_value(network.adj[start][end], self.KEY_EDGE_TRACT)

            result = Connectivity()
            result.storage_path = self.storage_path
            result.region_labels = labels_vector
            result.centres = positions
            result.set_metadata({'description': 'Array Columns: labels, X, Y, Z'}, 'centres')
            result.hemispheres = hemisphere
            result.cortical = cortical
            result.weights = weights_matrix
            result.tract_lengths = tract_matrix
            return result

        except KeyError, err:
            self.logger.exception("Could not parse Connectivity")
            raise ParseException(err)
Пример #8
0
    def _cut_connectivity(self,
                          original_conn,
                          new_weights,
                          interest_areas,
                          new_tracts=None):
        # type: (Connectivity, numpy.array, numpy.array, numpy.array) -> Connectivity
        """
        Generate new Connectivity object based on current one, by removing nodes (e.g. simulate lesion).
        Only the selected nodes will get used in the result. The order of the indices in interest_areas matters.
        If indices are not sorted then the nodes will be permuted accordingly.

        :param original_conn: Original Connectivity(HasTraits), to cut nodes from
        :param new_weights: weights matrix for the new connectivity
        :param interest_areas: ndarray with the selected node id's.
        :param new_tracts: tracts matrix for the new connectivity
        """
        new_weights, interest_areas, new_tracts = self._reorder_arrays(
            original_conn, new_weights, interest_areas, new_tracts)
        if new_tracts is None:
            new_tracts = original_conn.tract_lengths[
                interest_areas, :][:, interest_areas]
        else:
            new_tracts = new_tracts[interest_areas, :][:, interest_areas]
        new_weights = new_weights[interest_areas, :][:, interest_areas]

        final_conn = Connectivity()
        final_conn.parent_connectivity = None
        final_conn.weights = new_weights
        final_conn.centres = original_conn.centres[interest_areas, :]
        final_conn.region_labels = original_conn.region_labels[interest_areas]
        if original_conn.orientations is not None and len(
                original_conn.orientations):
            final_conn.orientations = original_conn.orientations[
                interest_areas, :]
        if original_conn.cortical is not None and len(original_conn.cortical):
            final_conn.cortical = original_conn.cortical[interest_areas]
        if original_conn.hemispheres is not None and len(
                original_conn.hemispheres):
            final_conn.hemispheres = original_conn.hemispheres[interest_areas]
        if original_conn.areas is not None and len(original_conn.areas):
            final_conn.areas = original_conn.areas[interest_areas]
        final_conn.tract_lengths = new_tracts
        final_conn.saved_selection = []
        final_conn.configure()
        return final_conn
    def launch(self, uploaded, rotate_x=0, rotate_y=0, rotate_z=0):
        """
        Execute import operations: unpack ZIP and build Connectivity object as result.

        :param uploaded: an archive containing the Connectivity data to be imported

        :returns: `Connectivity`

        :raises LaunchException: when `uploaded` is empty or nonexistent
        :raises Exception: when
                    * weights or tracts matrix is invalid (negative values, wrong shape)
                    * any of the vector orientation, areas, cortical or hemisphere is \
                      different from the expected number of nodes
        """
        if uploaded is None:
            raise LaunchException("Please select ZIP file which contains data to import")
        
        files = FilesHelper().unpack_zip(uploaded, self.storage_path)
        
        weights_matrix = None
        centres = None
        labels_vector = None
        tract_matrix = None
        orientation = None
        areas = None
        cortical_vector = None
        hemisphere_vector = None
        
        for file_name in files:
            if file_name.lower().find(self.WEIGHT_TOKEN) >= 0:
                weights_matrix = read_list_data(file_name)
                continue
            if file_name.lower().find(self.POSITION_TOKEN) >= 0:
                centres = read_list_data(file_name, skiprows=1, usecols=[1, 2, 3])
                labels_vector = read_list_data(file_name, dtype=numpy.str, skiprows=1, usecols=[0])
                continue
            if file_name.lower().find(self.TRACT_TOKEN) >= 0:
                tract_matrix = read_list_data(file_name)
                continue
            if file_name.lower().find(self.ORIENTATION_TOKEN) >= 0:
                orientation = read_list_data(file_name)
                continue
            if file_name.lower().find(self.AREA_TOKEN) >= 0:
                areas = read_list_data(file_name)
                continue
            if file_name.lower().find(self.CORTICAL_INFO) >= 0:
                cortical_vector = read_list_data(file_name, dtype=numpy.bool)
                continue
            if file_name.lower().find(self.HEMISPHERE_INFO) >= 0:
                hemisphere_vector = read_list_data(file_name, dtype=numpy.bool)
                continue
        ### Clean remaining text-files.
        FilesHelper.remove_files(files, True)
        
        result = Connectivity()
        result.storage_path = self.storage_path
        result.nose_correction = [rotate_x, rotate_y, rotate_z]
        
        ### Fill positions
        if centres is None:
            raise Exception("Positions for Connectivity Regions are required! "
                            "We expect a file *position* inside the uploaded ZIP.")
        expected_number_of_nodes = len(centres)
        if expected_number_of_nodes < 2:
            raise Exception("A connectivity with at least 2 nodes is expected")
        result.centres = centres
        if labels_vector is not None:
            result.region_labels = labels_vector
            
        ### Fill and check weights
        if weights_matrix is not None:
            if numpy.any([x < 0 for x in weights_matrix.flatten()]):
                raise Exception("Negative values are not accepted in weights matrix! "
                                "Please check your file, and use values >= 0")
            if weights_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes):
                raise Exception("Unexpected shape for weights matrix! "
                                "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes))
            result.weights = weights_matrix
            
        ### Fill and check tracts    
        if tract_matrix is not None:
            if numpy.any([x < 0 for x in tract_matrix.flatten()]):
                raise Exception("Negative values are not accepted in tracts matrix! "
                                "Please check your file, and use values >= 0")
            if tract_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes):
                raise Exception("Unexpected shape for tracts matrix! "
                                "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes))
            result.tract_lengths = tract_matrix
        
        
        if orientation is not None:
            if len(orientation) != expected_number_of_nodes:
                raise Exception("Invalid size for vector orientation. "
                                "Expected the same as region-centers number %d" % expected_number_of_nodes)
            result.orientations = orientation
            
        if areas is not None:
            if len(areas) != expected_number_of_nodes:
                raise Exception("Invalid size for vector areas. "
                                "Expected the same as region-centers number %d" % expected_number_of_nodes)
            result.areas = areas
            
        if cortical_vector is not None:
            if len(cortical_vector) != expected_number_of_nodes:
                raise Exception("Invalid size for vector cortical. "
                                "Expected the same as region-centers number %d" % expected_number_of_nodes)
            result.cortical = cortical_vector
            
        if hemisphere_vector is not None:
            if len(hemisphere_vector) != expected_number_of_nodes:
                raise Exception("Invalid size for vector hemispheres. "
                                "Expected the same as region-centers number %d" % expected_number_of_nodes)
            result.hemispheres = hemisphere_vector
        return result
Пример #10
0
    def launch(self, view_model):
        # type: (ZIPConnectivityImporterModel) -> [ConnectivityIndex]
        """
        Execute import operations: unpack ZIP and build Connectivity object as result.
        :raises LaunchException: when `uploaded` is empty or nonexistent
        :raises Exception: when
                    * weights or tracts matrix is invalid (negative values, wrong shape)
                    * any of the vector orientation, areas, cortical or hemisphere is \
                      different from the expected number of nodes
        """
        if view_model.uploaded is None:
            raise LaunchException(
                "Please select ZIP file which contains data to import")

        files = self.storage_interface.unpack_zip(view_model.uploaded,
                                                  self.get_storage_path())

        weights_matrix = None
        centres = None
        labels_vector = None
        tract_matrix = None
        orientation = None
        areas = None
        cortical_vector = None
        hemisphere_vector = None

        for file_name in files:
            file_name_low = file_name.lower()
            if self.WEIGHT_TOKEN in file_name_low:
                weights_matrix = self.read_list_data(file_name)
            elif self.CENTRES_TOKEN in file_name_low or self.CENTRES_TOKEN2 in file_name_low:
                centres = self.read_list_data(file_name, usecols=[1, 2, 3])
                labels_vector = self.read_list_data(file_name,
                                                    dtype=numpy.str,
                                                    usecols=[0])
            elif self.TRACT_TOKEN in file_name_low:
                tract_matrix = self.read_list_data(file_name)
            elif self.ORIENTATION_TOKEN in file_name_low:
                orientation = self.read_list_data(file_name)
            elif self.AREA_TOKEN in file_name_low:
                areas = self.read_list_data(file_name)
            elif self.CORTICAL_INFO in file_name_low:
                cortical_vector = self.read_list_data(file_name,
                                                      dtype=numpy.bool)
            elif self.HEMISPHERE_INFO in file_name_low:
                hemisphere_vector = self.read_list_data(file_name,
                                                        dtype=numpy.bool)

        # Clean remaining text-files.
        self.storage_interface.remove_files(files, True)

        result = Connectivity()

        # Fill positions
        if centres is None:
            raise Exception(
                "Region centres are required for Connectivity Regions! "
                "We expect a file that contains *centres* inside the uploaded ZIP."
            )
        expected_number_of_nodes = len(centres)
        if expected_number_of_nodes < 2:
            raise Exception("A connectivity with at least 2 nodes is expected")
        result.centres = centres
        if labels_vector is not None:
            result.region_labels = labels_vector

        # Fill and check weights
        if weights_matrix is not None:
            if weights_matrix.shape != (expected_number_of_nodes,
                                        expected_number_of_nodes):
                raise Exception(
                    "Unexpected shape for weights matrix! "
                    "Should be %d x %d " %
                    (expected_number_of_nodes, expected_number_of_nodes))
            result.weights = weights_matrix
            if view_model.normalization:
                result.weights = result.scaled_weights(
                    view_model.normalization)

        # Fill and check tracts. Allow empty files for tracts, they will be computed by tvb-library.
        if tract_matrix is not None:
            if tract_matrix.size != 0:
                if numpy.any([x < 0 for x in tract_matrix.flatten()]):
                    raise Exception(
                        "Negative values are not accepted in tracts matrix! "
                        "Please check your file, and use values >= 0")
                if tract_matrix.shape != (expected_number_of_nodes,
                                          expected_number_of_nodes):
                    raise Exception(
                        "Unexpected shape for tracts matrix! "
                        "Should be %d x %d " %
                        (expected_number_of_nodes, expected_number_of_nodes))
            result.tract_lengths = tract_matrix

        if orientation is not None:
            if len(orientation) != expected_number_of_nodes:
                raise Exception(
                    "Invalid size for vector orientation. "
                    "Expected the same as region-centers number %d" %
                    expected_number_of_nodes)
            result.orientations = orientation

        if areas is not None:
            if len(areas) != expected_number_of_nodes:
                raise Exception(
                    "Invalid size for vector areas. "
                    "Expected the same as region-centers number %d" %
                    expected_number_of_nodes)
            result.areas = areas

        if cortical_vector is not None:
            if len(cortical_vector) != expected_number_of_nodes:
                raise Exception(
                    "Invalid size for vector cortical. "
                    "Expected the same as region-centers number %d" %
                    expected_number_of_nodes)
            result.cortical = cortical_vector

        if hemisphere_vector is not None:
            if len(hemisphere_vector) != expected_number_of_nodes:
                raise Exception(
                    "Invalid size for vector hemispheres. "
                    "Expected the same as region-centers number %d" %
                    expected_number_of_nodes)
            result.hemispheres = hemisphere_vector

        result.configure()
        return self.store_complete(result)
    def launch(self, uploaded, rotate_x=0, rotate_y=0, rotate_z=0):
        """
        Execute import operations: unpack ZIP and build Connectivity object as result.
        """
        if uploaded is None:
            raise LaunchException(
                "Please select ZIP file which contains data to import")

        files = FilesHelper().unpack_zip(uploaded, self.storage_path)

        weights_matrix = None
        centres = None
        labels_vector = None
        tract_matrix = None
        orientation = None
        areas = None
        cortical_vector = None
        hemisphere_vector = None

        for file_name in files:
            if file_name.lower().find(self.WEIGHT_TOKEN) >= 0:
                weights_matrix = read_list_data(file_name)
                continue
            if file_name.lower().find(self.POSITION_TOKEN) >= 0:
                centres = read_list_data(file_name,
                                         skiprows=1,
                                         usecols=[1, 2, 3])
                labels_vector = read_list_data(file_name,
                                               dtype=numpy.str,
                                               skiprows=1,
                                               usecols=[0])
                continue
            if file_name.lower().find(self.TRACT_TOKEN) >= 0:
                tract_matrix = read_list_data(file_name)
                continue
            if file_name.lower().find(self.ORIENTATION_TOKEN) >= 0:
                orientation = read_list_data(file_name)
                continue
            if file_name.lower().find(self.AREA_TOKEN) >= 0:
                areas = read_list_data(file_name)
                continue
            if file_name.lower().find(self.CORTICAL_INFO) >= 0:
                cortical_vector = read_list_data(file_name, dtype=numpy.bool)
                continue
            if file_name.lower().find(self.HEMISPHERE_INFO) >= 0:
                hemisphere_vector = read_list_data(file_name, dtype=numpy.bool)
                continue
        ### Clean remaining text-files.
        FilesHelper.remove_files(files, True)

        result = Connectivity()
        result.storage_path = self.storage_path
        result.nose_correction = [rotate_x, rotate_y, rotate_z]

        ### Fill positions
        if centres is None:
            raise Exception(
                "Positions for Connectivity Regions are required! "
                "We expect a file *position* inside the uploaded ZIP.")
        expected_number_of_nodes = len(centres)
        if expected_number_of_nodes < 2:
            raise Exception("A connectivity with at least 2 nodes is expected")
        result.centres = centres
        if labels_vector is not None:
            result.region_labels = labels_vector

        ### Fill and check weights
        if weights_matrix is not None:
            if numpy.any([x < 0 for x in weights_matrix.flatten()]):
                raise Exception(
                    "Negative values are not accepted in weights matrix! "
                    "Please check your file, and use values >= 0")
            if weights_matrix.shape != (expected_number_of_nodes,
                                        expected_number_of_nodes):
                raise Exception(
                    "Unexpected shape for weights matrix! "
                    "Should be %d x %d " %
                    (expected_number_of_nodes, expected_number_of_nodes))
            result.weights = weights_matrix

        ### Fill and check tracts
        if tract_matrix is not None:
            if numpy.any([x < 0 for x in tract_matrix.flatten()]):
                raise Exception(
                    "Negative values are not accepted in tracts matrix! "
                    "Please check your file, and use values >= 0")
            if tract_matrix.shape != (expected_number_of_nodes,
                                      expected_number_of_nodes):
                raise Exception(
                    "Unexpected shape for tracts matrix! "
                    "Should be %d x %d " %
                    (expected_number_of_nodes, expected_number_of_nodes))
            result.tract_lengths = tract_matrix

        if orientation is not None:
            if len(orientation) != expected_number_of_nodes:
                raise Exception(
                    "Invalid size for vector orientation. "
                    "Expected the same as region-centers number %d" %
                    expected_number_of_nodes)
            result.orientations = orientation

        if areas is not None:
            if len(areas) != expected_number_of_nodes:
                raise Exception(
                    "Invalid size for vector areas. "
                    "Expected the same as region-centers number %d" %
                    expected_number_of_nodes)
            result.areas = areas

        if cortical_vector is not None:
            if len(cortical_vector) != expected_number_of_nodes:
                raise Exception(
                    "Invalid size for vector cortical. "
                    "Expected the same as region-centers number %d" %
                    expected_number_of_nodes)
            result.cortical = cortical_vector

        if hemisphere_vector is not None:
            if len(hemisphere_vector) != expected_number_of_nodes:
                raise Exception(
                    "Invalid size for vector hemispheres. "
                    "Expected the same as region-centers number %d" %
                    expected_number_of_nodes)
            result.hemispheres = hemisphere_vector
        return result