def launch(self, weights, weights_delimiter, tracts, tracts_delimiter, input_data): """ Execute import operations: process the weights and tracts csv files, then use the reference connectivity passed as input_data for the rest of the attributes. :param weights: csv file containing the weights measures :param tracts: csv file containing the tracts measures :param input_data: a reference connectivity with the additional attributes :raises LaunchException: when the number of nodes in CSV files doesn't match the one in the connectivity """ weights_matrix = self._read_csv_file(weights, weights_delimiter) tract_matrix = self._read_csv_file(tracts, tracts_delimiter) FilesHelper.remove_files([weights, tracts]) if weights_matrix.shape[0] != input_data.number_of_regions: raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference " "has only %s nodes." % (weights_matrix.shape[0], input_data.number_of_regions)) result = Connectivity() result.storage_path = self.storage_path result.centres = input_data.centres result.region_labels = input_data.region_labels result.weights = weights_matrix result.tract_lengths = tract_matrix result.orientations = input_data.orientations result.areas = input_data.areas result.cortical = input_data.cortical result.hemispheres = input_data.hemispheres return result
def launch(self, weights, tracts, input_data): """ Execute import operations: process the weights and tracts csv files, then use the reference connectivity passed as input_data for the rest of the attributes. """ dti_service = dtipipelineservice.DTIPipelineService() dti_service._process_csv_file(weights, dti_service.WEIGHTS_FILE) dti_service._process_csv_file(tracts, dti_service.TRACT_FILE) weights_matrix = read_list_data(os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE)) tract_matrix = read_list_data(os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE)) FilesHelper.remove_files([os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE), os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE)]) if weights_matrix.shape[0] != input_data.orientations.shape[0]: raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference has only %s nodes."%( weights_matrix.shape[0], input_data.orientations.shape[0])) result = Connectivity() result.storage_path = self.storage_path result.nose_correction = input_data.nose_correction result.centres = input_data.centres result.region_labels = input_data.region_labels result.weights = weights_matrix result.tract_lengths = tract_matrix result.orientations = input_data.orientations result.areas = input_data.areas result.cortical = input_data.cortical result.hemispheres = input_data.hemispheres return result
def launch(self, weights, tracts, input_data): """ Execute import operations: process the weights and tracts csv files, then use the reference connectivity passed as input_data for the rest of the attributes. :param weights: csv file containing the weights measures :param tracts: csv file containing the tracts measures :param input_data: a reference connectivity with the additional attributes :raises LaunchException: when the number of nodes in CSV files doesn't match the one in the connectivity """ dti_service = DTIPipelineService() dti_service._process_csv_file(weights, dti_service.WEIGHTS_FILE) dti_service._process_csv_file(tracts, dti_service.TRACT_FILE) weights_matrix = read_list_data(os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE)) tract_matrix = read_list_data(os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE)) FilesHelper.remove_files([os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE), os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE)]) if weights_matrix.shape[0] != input_data.orientations.shape[0]: raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference " "has only %s nodes." % (weights_matrix.shape[0], input_data.orientations.shape[0])) result = Connectivity() result.storage_path = self.storage_path result.nose_correction = input_data.nose_correction result.centres = input_data.centres result.region_labels = input_data.region_labels result.weights = weights_matrix result.tract_lengths = tract_matrix result.orientations = input_data.orientations result.areas = input_data.areas result.cortical = input_data.cortical result.hemispheres = input_data.hemispheres return result
def parse(self, network): """ Populate Connectivity DataType from NetworkX object. Tested with results from Connectome Mapper Toolkit. :param network: NetworkX graph :return: Connectivity object """ graph_size = len(network.nodes()) weights_matrix = numpy.zeros((graph_size, graph_size)) tract_matrix = numpy.zeros((graph_size, graph_size)) labels_vector, positions, cortical, hemisphere = [], [], [], [] try: for node in xrange(1, graph_size + 1): node_data = network.nodes[node] pos = self._find_value(node_data, self.KEY_NODE_COORDINATES) positions.append(list(pos)) label = self._find_value(node_data, self.KEY_NODE_LABEL) labels_vector.append(str(label)) if self.REGION_CORTICAL == self._find_value( node_data, self.KEY_NODE_REGION): cortical.append(1) else: cortical.append(0) if self.HEMISPHERE_RIGHT == self._find_value( node_data, self.KEY_NODE_HEMISPHERE): hemisphere.append(True) else: hemisphere.append(False) # Iterate over edges: for start, end in network.edges(): weights_matrix[start - 1][end - 1] = self._find_value( network.adj[start][end], self.KEY_EDGE_WEIGHT) tract_matrix[start - 1][end - 1] = self._find_value( network.adj[start][end], self.KEY_EDGE_TRACT) result = Connectivity() result.storage_path = self.storage_path result.region_labels = labels_vector result.centres = positions result.set_metadata( {'description': 'Array Columns: labels, X, Y, Z'}, 'centres') result.hemispheres = hemisphere result.cortical = cortical result.weights = weights_matrix result.tract_lengths = tract_matrix return result except KeyError as err: self.logger.exception("Could not parse Connectivity") raise ParseException(err)
def parse(self, network): """ Populate Connectivity DataType from NetworkX object. Tested with results from Connectome Mapper Toolkit. :param network: NetworkX graph :return: Connectivity object """ graph_size = len(network.nodes()) weights_matrix = numpy.zeros((graph_size, graph_size)) tract_matrix = numpy.zeros((graph_size, graph_size)) labels_vector, positions, cortical, hemisphere = [], [], [], [] try: for node in network.nodes(): node_data = network.node[node] pos = self._find_value(node_data, self.KEY_NODE_COORDINATES) positions.append(list(pos)) label = self._find_value(node_data, self.KEY_NODE_LABEL) labels_vector.append(str(label)) if self.REGION_CORTICAL == self._find_value(node_data, self.KEY_NODE_REGION): cortical.append(1) else: cortical.append(0) if self.HEMISPHERE_RIGHT == self._find_value(node_data, self.KEY_NODE_HEMISPHERE): hemisphere.append(True) else: hemisphere.append(False) # Iterate over edges: for start, end in network.edges(): weights_matrix[start - 1][end - 1] = self._find_value(network.adj[start][end], self.KEY_EDGE_WEIGHT) tract_matrix[start - 1][end - 1] = self._find_value(network.adj[start][end], self.KEY_EDGE_TRACT) result = Connectivity() result.storage_path = self.storage_path result.region_labels = labels_vector result.centres = positions result.set_metadata({'description': 'Array Columns: labels, X, Y, Z'}, 'centres') result.hemispheres = hemisphere result.cortical = cortical result.weights = weights_matrix result.tract_lengths = tract_matrix return result except KeyError, err: self.logger.exception("Could not parse Connectivity") raise ParseException(err)
def networkx2connectivity(network_obj, storage_path): """ Populate Connectivity DataType from NetworkX object. """ network_obj.load() weights_matrix, tract_matrix, labels_vector = [], [], [] positions, areas, orientation = [], [], [] # Read all nodes graph_data = network_obj.data graph_size = len(graph_data.nodes()) for node in graph_data.nodes(): positions.append([ graph_data.node[node][ct.KEY_POS_X], graph_data.node[node][ct.KEY_POS_Y], graph_data.node[node][ct.KEY_POS_Z] ]) labels_vector.append(graph_data.node[node][ct.KEY_POS_LABEL]) if ct.KEY_AREA in graph_data.node[node]: areas.append(graph_data.node[node][ct.KEY_AREA]) if ct.KEY_ORIENTATION_AVG in graph_data.node[node]: orientation.append(graph_data.node[node][ct.KEY_ORIENTATION_AVG]) weights_matrix.append([0.0] * graph_size) tract_matrix.append([0.0] * graph_size) # Read all edges for edge in network_obj.data.edges(): start = edge[0] end = edge[1] weights_matrix[start][end] = graph_data.adj[start][end][ct.KEY_WEIGHT] tract_matrix[start][end] = graph_data.adj[start][end][ct.KEY_TRACT] meta = network_obj.get_metadata_as_dict() result = Connectivity() result.storage_path = storage_path result.nose_correction = meta[ct.KEY_NOSE] if ct.KEY_NOSE in meta else None result.weights = weights_matrix result.centres = positions result.region_labels = labels_vector result.set_metadata({'description': 'Array Columns: labels, X, Y, Z'}, 'centres') result.orientations = orientation result.areas = areas result.tract_lengths = tract_matrix return result, (meta[ct.KEY_UID] if ct.KEY_UID in meta else None)
def networkx2connectivity(network_obj, storage_path): """ Populate Connectivity DataType from NetworkX object. """ network_obj.load() weights_matrix, tract_matrix, labels_vector = [], [], [] positions, areas, orientation = [], [], [] # Read all nodes graph_data = network_obj.data graph_size = len(graph_data.nodes()) for node in graph_data.nodes(): positions.append([graph_data.node[node][ct.KEY_POS_X], graph_data.node[node][ct.KEY_POS_Y], graph_data.node[node][ct.KEY_POS_Z]]) labels_vector.append(graph_data.node[node][ct.KEY_POS_LABEL]) if ct.KEY_AREA in graph_data.node[node]: areas.append(graph_data.node[node][ct.KEY_AREA]) if ct.KEY_ORIENTATION_AVG in graph_data.node[node]: orientation.append(graph_data.node[node][ct.KEY_ORIENTATION_AVG]) weights_matrix.append([0.0] * graph_size) tract_matrix.append([0.0] * graph_size) # Read all edges for edge in network_obj.data.edges(): start = edge[0] end = edge[1] weights_matrix[start][end] = graph_data.adj[start][end][ct.KEY_WEIGHT] tract_matrix[start][end] = graph_data.adj[start][end][ct.KEY_TRACT] meta = network_obj.get_metadata_as_dict() result = Connectivity() result.storage_path = storage_path result.nose_correction = meta[ct.KEY_NOSE] if ct.KEY_NOSE in meta else None result.weights = weights_matrix result.centres = positions result.region_labels = labels_vector result.set_metadata({'description':'Array Columns: labels, X, Y, Z'},'centres') result.orientations = orientation result.areas = areas result.tract_lengths = tract_matrix return result, (meta[ct.KEY_UID] if ct.KEY_UID in meta else None)
def launch(self, uploaded, rotate_x=0, rotate_y=0, rotate_z=0): """ Execute import operations: unpack ZIP and build Connectivity object as result. :param uploaded: an archive containing the Connectivity data to be imported :returns: `Connectivity` :raises LaunchException: when `uploaded` is empty or nonexistent :raises Exception: when * weights or tracts matrix is invalid (negative values, wrong shape) * any of the vector orientation, areas, cortical or hemisphere is \ different from the expected number of nodes """ if uploaded is None: raise LaunchException("Please select ZIP file which contains data to import") files = FilesHelper().unpack_zip(uploaded, self.storage_path) weights_matrix = None centres = None labels_vector = None tract_matrix = None orientation = None areas = None cortical_vector = None hemisphere_vector = None for file_name in files: if file_name.lower().find(self.WEIGHT_TOKEN) >= 0: weights_matrix = read_list_data(file_name) continue if file_name.lower().find(self.POSITION_TOKEN) >= 0: centres = read_list_data(file_name, skiprows=1, usecols=[1, 2, 3]) labels_vector = read_list_data(file_name, dtype=numpy.str, skiprows=1, usecols=[0]) continue if file_name.lower().find(self.TRACT_TOKEN) >= 0: tract_matrix = read_list_data(file_name) continue if file_name.lower().find(self.ORIENTATION_TOKEN) >= 0: orientation = read_list_data(file_name) continue if file_name.lower().find(self.AREA_TOKEN) >= 0: areas = read_list_data(file_name) continue if file_name.lower().find(self.CORTICAL_INFO) >= 0: cortical_vector = read_list_data(file_name, dtype=numpy.bool) continue if file_name.lower().find(self.HEMISPHERE_INFO) >= 0: hemisphere_vector = read_list_data(file_name, dtype=numpy.bool) continue ### Clean remaining text-files. FilesHelper.remove_files(files, True) result = Connectivity() result.storage_path = self.storage_path result.nose_correction = [rotate_x, rotate_y, rotate_z] ### Fill positions if centres is None: raise Exception("Positions for Connectivity Regions are required! " "We expect a file *position* inside the uploaded ZIP.") expected_number_of_nodes = len(centres) if expected_number_of_nodes < 2: raise Exception("A connectivity with at least 2 nodes is expected") result.centres = centres if labels_vector is not None: result.region_labels = labels_vector ### Fill and check weights if weights_matrix is not None: if numpy.any([x < 0 for x in weights_matrix.flatten()]): raise Exception("Negative values are not accepted in weights matrix! " "Please check your file, and use values >= 0") if weights_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes): raise Exception("Unexpected shape for weights matrix! " "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes)) result.weights = weights_matrix ### Fill and check tracts if tract_matrix is not None: if numpy.any([x < 0 for x in tract_matrix.flatten()]): raise Exception("Negative values are not accepted in tracts matrix! " "Please check your file, and use values >= 0") if tract_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes): raise Exception("Unexpected shape for tracts matrix! " "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes)) result.tract_lengths = tract_matrix if orientation is not None: if len(orientation) != expected_number_of_nodes: raise Exception("Invalid size for vector orientation. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.orientations = orientation if areas is not None: if len(areas) != expected_number_of_nodes: raise Exception("Invalid size for vector areas. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.areas = areas if cortical_vector is not None: if len(cortical_vector) != expected_number_of_nodes: raise Exception("Invalid size for vector cortical. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.cortical = cortical_vector if hemisphere_vector is not None: if len(hemisphere_vector) != expected_number_of_nodes: raise Exception("Invalid size for vector hemispheres. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.hemispheres = hemisphere_vector return result
def launch(self, uploaded, rotate_x=0, rotate_y=0, rotate_z=0): """ Execute import operations: unpack ZIP and build Connectivity object as result. """ if uploaded is None: raise LaunchException( "Please select ZIP file which contains data to import") files = FilesHelper().unpack_zip(uploaded, self.storage_path) weights_matrix = None centres = None labels_vector = None tract_matrix = None orientation = None areas = None cortical_vector = None hemisphere_vector = None for file_name in files: if file_name.lower().find(self.WEIGHT_TOKEN) >= 0: weights_matrix = read_list_data(file_name) continue if file_name.lower().find(self.POSITION_TOKEN) >= 0: centres = read_list_data(file_name, skiprows=1, usecols=[1, 2, 3]) labels_vector = read_list_data(file_name, dtype=numpy.str, skiprows=1, usecols=[0]) continue if file_name.lower().find(self.TRACT_TOKEN) >= 0: tract_matrix = read_list_data(file_name) continue if file_name.lower().find(self.ORIENTATION_TOKEN) >= 0: orientation = read_list_data(file_name) continue if file_name.lower().find(self.AREA_TOKEN) >= 0: areas = read_list_data(file_name) continue if file_name.lower().find(self.CORTICAL_INFO) >= 0: cortical_vector = read_list_data(file_name, dtype=numpy.bool) continue if file_name.lower().find(self.HEMISPHERE_INFO) >= 0: hemisphere_vector = read_list_data(file_name, dtype=numpy.bool) continue ### Clean remaining text-files. FilesHelper.remove_files(files, True) result = Connectivity() result.storage_path = self.storage_path result.nose_correction = [rotate_x, rotate_y, rotate_z] ### Fill positions if centres is None: raise Exception( "Positions for Connectivity Regions are required! " "We expect a file *position* inside the uploaded ZIP.") expected_number_of_nodes = len(centres) if expected_number_of_nodes < 2: raise Exception("A connectivity with at least 2 nodes is expected") result.centres = centres if labels_vector is not None: result.region_labels = labels_vector ### Fill and check weights if weights_matrix is not None: if numpy.any([x < 0 for x in weights_matrix.flatten()]): raise Exception( "Negative values are not accepted in weights matrix! " "Please check your file, and use values >= 0") if weights_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes): raise Exception( "Unexpected shape for weights matrix! " "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes)) result.weights = weights_matrix ### Fill and check tracts if tract_matrix is not None: if numpy.any([x < 0 for x in tract_matrix.flatten()]): raise Exception( "Negative values are not accepted in tracts matrix! " "Please check your file, and use values >= 0") if tract_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes): raise Exception( "Unexpected shape for tracts matrix! " "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes)) result.tract_lengths = tract_matrix if orientation is not None: if len(orientation) != expected_number_of_nodes: raise Exception( "Invalid size for vector orientation. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.orientations = orientation if areas is not None: if len(areas) != expected_number_of_nodes: raise Exception( "Invalid size for vector areas. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.areas = areas if cortical_vector is not None: if len(cortical_vector) != expected_number_of_nodes: raise Exception( "Invalid size for vector cortical. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.cortical = cortical_vector if hemisphere_vector is not None: if len(hemisphere_vector) != expected_number_of_nodes: raise Exception( "Invalid size for vector hemispheres. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.hemispheres = hemisphere_vector return result
def launch(self, uploaded, normalization=None): """ Execute import operations: unpack ZIP and build Connectivity object as result. :param uploaded: an archive containing the Connectivity data to be imported :returns: `Connectivity` :raises LaunchException: when `uploaded` is empty or nonexistent :raises Exception: when * weights or tracts matrix is invalid (negative values, wrong shape) * any of the vector orientation, areas, cortical or hemisphere is \ different from the expected number of nodes """ if uploaded is None: raise LaunchException( "Please select ZIP file which contains data to import") files = FilesHelper().unpack_zip(uploaded, self.storage_path) weights_matrix = None centres = None labels_vector = None tract_matrix = None orientation = None areas = None cortical_vector = None hemisphere_vector = None for file_name in files: file_name_low = file_name.lower() if self.WEIGHT_TOKEN in file_name_low: weights_matrix = self.read_list_data(file_name) elif self.CENTRES_TOKEN in file_name_low: centres = self.read_list_data(file_name, usecols=[1, 2, 3]) labels_vector = self.read_list_data(file_name, dtype=numpy.str, usecols=[0]) elif self.TRACT_TOKEN in file_name_low: tract_matrix = self.read_list_data(file_name) elif self.ORIENTATION_TOKEN in file_name_low: orientation = self.read_list_data(file_name) elif self.AREA_TOKEN in file_name_low: areas = self.read_list_data(file_name) elif self.CORTICAL_INFO in file_name_low: cortical_vector = self.read_list_data(file_name, dtype=numpy.bool) elif self.HEMISPHERE_INFO in file_name_low: hemisphere_vector = self.read_list_data(file_name, dtype=numpy.bool) ### Clean remaining text-files. FilesHelper.remove_files(files, True) result = Connectivity() result.storage_path = self.storage_path ### Fill positions if centres is None: raise Exception( "Region centres are required for Connectivity Regions! " "We expect a file that contains *centres* inside the uploaded ZIP." ) expected_number_of_nodes = len(centres) if expected_number_of_nodes < 2: raise Exception("A connectivity with at least 2 nodes is expected") result.centres = centres if labels_vector is not None: result.region_labels = labels_vector ### Fill and check weights if weights_matrix is not None: if numpy.any([x < 0 for x in weights_matrix.flatten()]): raise Exception( "Negative values are not accepted in weights matrix! " "Please check your file, and use values >= 0") if weights_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes): raise Exception( "Unexpected shape for weights matrix! " "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes)) result.weights = weights_matrix if normalization: result.weights = result.scaled_weights(normalization) ### Fill and check tracts if tract_matrix is not None: if numpy.any([x < 0 for x in tract_matrix.flatten()]): raise Exception( "Negative values are not accepted in tracts matrix! " "Please check your file, and use values >= 0") if tract_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes): raise Exception( "Unexpected shape for tracts matrix! " "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes)) result.tract_lengths = tract_matrix if orientation is not None: if len(orientation) != expected_number_of_nodes: raise Exception( "Invalid size for vector orientation. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.orientations = orientation if areas is not None: if len(areas) != expected_number_of_nodes: raise Exception( "Invalid size for vector areas. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.areas = areas if cortical_vector is not None: if len(cortical_vector) != expected_number_of_nodes: raise Exception( "Invalid size for vector cortical. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.cortical = cortical_vector if hemisphere_vector is not None: if len(hemisphere_vector) != expected_number_of_nodes: raise Exception( "Invalid size for vector hemispheres. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.hemispheres = hemisphere_vector return result