def _run_interface(self, runtime): key = 'congraph' edge_key = 'weight' iflogger.info( 'T-value Threshold file: {t}'.format(t=self.inputs.t_value_threshold_file)) iflogger.info( 'Independent component to use: {i}'.format(i=self.inputs.component_index)) path, name, ext = split_filename(self.inputs.t_value_threshold_file) if ext == '.mat': t_value_dict = sio.loadmat(self.inputs.t_value_threshold_file) t_values = t_value_dict['t_value_per_node'] t_value_per_node = t_values[:, self.inputs.component_index - 1] number_of_ICs = np.shape(t_values)[1] else: iflogger.info( "Please save the t-values as a Matlab file with key 't_value_per_node'") functional = nb.load(self.inputs.in_file) functionaldata = functional.get_data() segmentation = nb.load(self.inputs.segmentation_file) segmentationdata = segmentation.get_data() rois = get_roi_list(self.inputs.segmentation_file) number_of_nodes = len(rois) iflogger.info( 'Found {roi} unique region values'.format(roi=number_of_nodes)) iflogger.info('Significance threshold: {p}'.format( p=self.inputs.significance_threshold)) #number_of_images = self.inputs.number_of_images # degrees_of_freedom = number_of_images - \ # number_of_ICs - 1 #sig = self.inputs.significance_threshold #threshold = tinv((1-sig), degrees_of_freedom) #threshold = 2*threshold #iflogger.info('Weight threshold: {w}'.format(w=threshold)) iflogger.info( 'Functional image: {img}'.format(img=self.inputs.in_file)) iflogger.info( 'Segmentation image: {img}'.format(img=self.inputs.segmentation_file)) if not get_data_dims(self.inputs.in_file) == get_data_dims(self.inputs.segmentation_file): iflogger.error( 'Image dimensions are not the same, please reslice the images to the same dimensions') dx, dy, dz = get_data_dims(self.inputs.in_file) iflogger.error('Functional image dimensions: {dimx}, {dimy}, {dimz}'.format( dimx=dx, dimy=dy, dimz=dz)) dx, dy, dz = get_data_dims(self.inputs.segmentation_file) iflogger.error('Segmentation image dimensions: {dimx}, {dimy}, {dimz}'.format( dimx=dx, dimy=dy, dimz=dz)) stats = {} if self.inputs.give_nodes_values: func_mean = [] for idx, roi in enumerate(rois): values = [] x, y, z = np.where(segmentationdata == roi) for index in range(0, len(x)): value = functionaldata[x[index]][y[index]][z[index]] values.append(value) func_mean.append(np.mean(values)) iflogger.info( 'Region ID: {id}, Mean Value: {avg}'.format(id=roi, avg=np.mean(values))) stats[key] = func_mean connectivity_matrix = np.zeros((number_of_nodes, number_of_nodes)) correlation_matrix = np.zeros((number_of_nodes, number_of_nodes)) anticorrelation_matrix = np.zeros((number_of_nodes, number_of_nodes)) iflogger.info('Drawing edges...') for idx_i, roi_i in enumerate(rois): t_i = t_value_per_node[idx_i] #iflogger.info('ROI:{i}, T-value: {t}'.format(i=roi_i, t=t_i)) for idx_j, roi_j in enumerate(rois): t_j = t_value_per_node[idx_j] #iflogger.info('...ROI:{j}, T-value: {t}'.format(j=roi_j, t=t_j)) if idx_j > idx_i: if (t_i > 0 and t_j > 0) or (t_i < 0 and t_j < 0): weight = abs(t_i) + abs(t_j) - abs(t_i - t_j) #iflogger.info('Weight = {w} T-values for ROIs {i}-{j}'.format(w=weight, i=t_i, j=t_j)) # if weight > threshold: connectivity_matrix[idx_i, idx_j] = weight correlation_matrix[idx_i, idx_j] = weight #iflogger.info('Drawing a correlation edge for ROIs {i}-{j} at {id_i},{id_j}'.format(i=roi_i, j=roi_j, id_i=idx_i, id_j=idx_j)) elif (t_i < 0 and t_j > 0) or (t_i > 0 and t_j < 0): weight = abs(t_i) + abs(t_j) - abs(t_i + t_j) #iflogger.info('Weight = {w} T-values for ROIs {i}-{j}'.format(w=weight, i=t_i, j=t_j)) # if weight > threshold: connectivity_matrix[idx_i, idx_j] = -weight anticorrelation_matrix[idx_i, idx_j] = weight #iflogger.info('Drawing an anticorrelation edge for ROIs {i}-{j} at {id_i},{id_j}'.format(i=roi_i, j=roi_j, id_i=idx_i, id_j=idx_j)) #iflogger.info('Weight = {w} T-values for ROIs {i}-{j}'.format(w=weight, i=t_i, j=t_j)) edges = len(np.nonzero(connectivity_matrix)[0]) cor_edges = len(np.nonzero(correlation_matrix)[0]) anticor_edges = len(np.nonzero(anticorrelation_matrix)[0]) iflogger.info('Total edges: {e}'.format(e=edges)) iflogger.info('Total correlation edges: {c}'.format(c=cor_edges)) iflogger.info( 'Total anticorrelation edges: {a}'.format(a=anticor_edges)) connectivity_matrix = connectivity_matrix + connectivity_matrix.T correlation_matrix = correlation_matrix + correlation_matrix.T anticorrelation_matrix = anticorrelation_matrix + \ anticorrelation_matrix.T stats[edge_key] = connectivity_matrix stats['correlation'] = correlation_matrix stats['anticorrelation'] = anticorrelation_matrix try: gp = nx.read_gpickle(self.inputs.resolution_network_file) except IndexError: gp = nx.read_graphml(self.inputs.resolution_network_file) nodedict = gp.node[gp.nodes()[0]] if not nodedict.has_key('dn_position'): iflogger.info("Creating node positions from segmentation") G = nx.Graph() for u, d in gp.nodes_iter(data=True): G.add_node(int(u), d) xyz = tuple( np.mean(np.where(np.flipud(segmentationdata) == int(d["dn_correspondence_id"])), axis=1)) G.node[int(u)]['dn_position'] = xyz ntwkname = op.abspath('nodepositions.pck') nx.write_gpickle(G, ntwkname) else: ntwkname = self.inputs.resolution_network_file try: ntwkname = nx.read_gpickle(ntwkname) except IndexError: ntwkname = nx.read_graphml(ntwkname) newntwk = ntwkname.copy() newntwk = remove_all_edges(newntwk) if self.inputs.give_nodes_values: newntwk = add_node_data(stats[key], newntwk) corntwk = add_node_data(stats[key], newntwk) anticorntwk = add_node_data(stats[key], newntwk) newntwk = add_edge_data(stats[edge_key], newntwk) corntwk = add_edge_data(stats['correlation'], corntwk) anticorntwk = add_edge_data(stats['anticorrelation'], anticorntwk) else: newntwk = add_edge_data(stats[edge_key], ntwkname) corntwk = add_edge_data(stats['correlation'], ntwkname) anticorntwk = add_edge_data(stats['anticorrelation'], ntwkname) if isdefined(self.inputs.out_network_file): path, name, ext = split_filename(self.inputs.out_network_file) if not ext == '.pck': ext = '.pck' out_network_file = op.abspath(name + ext) else: if isdefined(self.inputs.subject_id): out_network_file = op.abspath( self.inputs.subject_id + '_IC_' + str(self.inputs.component_index) + '.pck') else: out_network_file = op.abspath( 'IC_' + str(self.inputs.component_index) + '.pck') path, name, ext = split_filename(out_network_file) iflogger.info( 'Saving output network as {ntwk}'.format(ntwk=out_network_file)) nx.write_gpickle(newntwk, out_network_file) out_correlation_network = op.abspath(name + '_correlation' + ext) iflogger.info( 'Saving correlation network as {ntwk}'.format(ntwk=out_correlation_network)) nx.write_gpickle(corntwk, out_correlation_network) out_anticorrelation_network = op.abspath( name + '_anticorrelation' + ext) iflogger.info('Saving anticorrelation network as {ntwk}'.format( ntwk=out_anticorrelation_network)) nx.write_gpickle(anticorntwk, out_anticorrelation_network) if isdefined(self.inputs.subject_id): stats['subject_id'] = self.inputs.subject_id if isdefined(self.inputs.out_stats_file): path, name, ext = split_filename(self.inputs.out_stats_file) if not ext == '.mat': ext = '.mat' out_stats_file = op.abspath(name + ext) else: if isdefined(self.inputs.subject_id): out_stats_file = op.abspath( self.inputs.subject_id + '_IC_' + str(self.inputs.component_index) + '.mat') else: out_stats_file = op.abspath( 'IC_' + str(self.inputs.component_index) + '.mat') iflogger.info( 'Saving image statistics as {stats}'.format(stats=out_stats_file)) sio.savemat(out_stats_file, stats) return runtime
def _run_interface(self, runtime): if len(self.inputs.in_files) > 1: iflogger.info('Multiple input images detected') iflogger.info(len(self.inputs.in_files)) in_files = self.inputs.in_files elif isdefined(self.inputs.in_file4d): iflogger.info('Single four-dimensional image selected') in_file4d = nb.load(self.inputs.in_file4d) in_files = nb.four_to_three(in_file4d) else: iflogger.info('Single functional image provided') in_files = self.inputs.in_files if len(in_files) == 1: iflogger.error( "Only one functional image was input. Pearson's correlation coefficient can not be calculated") raise ValueError else: rois = get_roi_list(self.inputs.segmentation_file) fMRI_timecourse = get_timecourse_by_region( in_files, self.inputs.segmentation_file, rois) timecourse_at_each_node = fMRI_timecourse.T iflogger.info(np.shape(timecourse_at_each_node)) iflogger.info( 'Structural Network: {s}'.format(s=self.inputs.structural_network)) structural_network = nx.read_gpickle(self.inputs.structural_network) rois = structural_network.nodes() #rois = get_roi_list(self.inputs.segmentation_file) number_of_nodes = len(rois) iflogger.info('Found {roi} unique region values'.format(roi=len(rois))) newntwk = structural_network.copy() newntwk = remove_all_edges(newntwk) simple_correlation_matrix = np.zeros( (number_of_nodes, number_of_nodes)) iflogger.info('Drawing edges...') for idx_i, roi_i in enumerate(rois): #iflogger.info('ROI:{i}, T-value: {t}'.format(i=roi_i, t=t_i)) for idx_j, roi_j in enumerate(rois): #iflogger.info('...ROI:{j}, T-value: {t}'.format(j=roi_j, t=t_j)) if idx_j > idx_i: simple_correlation_matrix[idx_i, idx_j] = pearsonr( fMRI_timecourse[idx_i], fMRI_timecourse[idx_j])[0] elif roi_i == roi_j: simple_correlation_matrix[idx_i, idx_j] = 0.5 simple_correlation_matrix = simple_correlation_matrix + \ simple_correlation_matrix.T stats = {'correlation': simple_correlation_matrix} newntwk = add_edge_data(simple_correlation_matrix, newntwk) path, name, ext = split_filename(self.inputs.out_network_file) if not ext == '.pck': ext = '.pck' out_network_file = op.abspath(name + ext) iflogger.info( 'Saving simple correlation network as {out}'.format(out=out_network_file)) nx.write_gpickle(newntwk, out_network_file) path, name, ext = split_filename(self.inputs.out_stats_file) if not ext == '.mat': ext = '.mat' out_stats_file = op.abspath(name + ext) iflogger.info( 'Saving image statistics as {stats}'.format(stats=out_stats_file)) sio.savemat(out_stats_file, stats) return runtime
def _run_interface(self, runtime): key = 'congraph' edge_key = 'weight' iflogger.info('T-value Threshold file: {t}'.format( t=self.inputs.t_value_threshold_file)) iflogger.info('Independent component to use: {i}'.format( i=self.inputs.component_index)) path, name, ext = split_filename(self.inputs.t_value_threshold_file) if ext == '.mat': t_value_dict = sio.loadmat(self.inputs.t_value_threshold_file) t_values = t_value_dict['t_value_per_node'] t_value_per_node = t_values[:, self.inputs.component_index - 1] number_of_ICs = np.shape(t_values)[1] else: iflogger.info( "Please save the t-values as a Matlab file with key 't_value_per_node'" ) functional = nb.load(self.inputs.in_file) functionaldata = functional.get_data() segmentation = nb.load(self.inputs.segmentation_file) segmentationdata = segmentation.get_data() rois = get_roi_list(self.inputs.segmentation_file) number_of_nodes = len(rois) iflogger.info( 'Found {roi} unique region values'.format(roi=number_of_nodes)) iflogger.info('Significance threshold: {p}'.format( p=self.inputs.significance_threshold)) #number_of_images = self.inputs.number_of_images # degrees_of_freedom = number_of_images - \ # number_of_ICs - 1 #sig = self.inputs.significance_threshold #threshold = tinv((1-sig), degrees_of_freedom) #threshold = 2*threshold #iflogger.info('Weight threshold: {w}'.format(w=threshold)) iflogger.info( 'Functional image: {img}'.format(img=self.inputs.in_file)) iflogger.info('Segmentation image: {img}'.format( img=self.inputs.segmentation_file)) if not get_data_dims(self.inputs.in_file) == get_data_dims( self.inputs.segmentation_file): iflogger.error( 'Image dimensions are not the same, please reslice the images to the same dimensions' ) dx, dy, dz = get_data_dims(self.inputs.in_file) iflogger.error( 'Functional image dimensions: {dimx}, {dimy}, {dimz}'.format( dimx=dx, dimy=dy, dimz=dz)) dx, dy, dz = get_data_dims(self.inputs.segmentation_file) iflogger.error( 'Segmentation image dimensions: {dimx}, {dimy}, {dimz}'.format( dimx=dx, dimy=dy, dimz=dz)) stats = {} if self.inputs.give_nodes_values: func_mean = [] for idx, roi in enumerate(rois): values = [] x, y, z = np.where(segmentationdata == roi) for index in range(0, len(x)): value = functionaldata[x[index]][y[index]][z[index]] values.append(value) func_mean.append(np.mean(values)) iflogger.info('Region ID: {id}, Mean Value: {avg}'.format( id=roi, avg=np.mean(values))) stats[key] = func_mean connectivity_matrix = np.zeros((number_of_nodes, number_of_nodes)) correlation_matrix = np.zeros((number_of_nodes, number_of_nodes)) anticorrelation_matrix = np.zeros((number_of_nodes, number_of_nodes)) iflogger.info('Drawing edges...') for idx_i, roi_i in enumerate(rois): t_i = t_value_per_node[idx_i] #iflogger.info('ROI:{i}, T-value: {t}'.format(i=roi_i, t=t_i)) for idx_j, roi_j in enumerate(rois): t_j = t_value_per_node[idx_j] #iflogger.info('...ROI:{j}, T-value: {t}'.format(j=roi_j, t=t_j)) if idx_j > idx_i: if (t_i > 0 and t_j > 0) or (t_i < 0 and t_j < 0): weight = abs(t_i) + abs(t_j) - abs(t_i - t_j) #iflogger.info('Weight = {w} T-values for ROIs {i}-{j}'.format(w=weight, i=t_i, j=t_j)) # if weight > threshold: connectivity_matrix[idx_i, idx_j] = weight correlation_matrix[idx_i, idx_j] = weight #iflogger.info('Drawing a correlation edge for ROIs {i}-{j} at {id_i},{id_j}'.format(i=roi_i, j=roi_j, id_i=idx_i, id_j=idx_j)) elif (t_i < 0 and t_j > 0) or (t_i > 0 and t_j < 0): weight = abs(t_i) + abs(t_j) - abs(t_i + t_j) #iflogger.info('Weight = {w} T-values for ROIs {i}-{j}'.format(w=weight, i=t_i, j=t_j)) # if weight > threshold: connectivity_matrix[idx_i, idx_j] = -weight anticorrelation_matrix[idx_i, idx_j] = weight #iflogger.info('Drawing an anticorrelation edge for ROIs {i}-{j} at {id_i},{id_j}'.format(i=roi_i, j=roi_j, id_i=idx_i, id_j=idx_j)) #iflogger.info('Weight = {w} T-values for ROIs {i}-{j}'.format(w=weight, i=t_i, j=t_j)) edges = len(np.nonzero(connectivity_matrix)[0]) cor_edges = len(np.nonzero(correlation_matrix)[0]) anticor_edges = len(np.nonzero(anticorrelation_matrix)[0]) iflogger.info('Total edges: {e}'.format(e=edges)) iflogger.info('Total correlation edges: {c}'.format(c=cor_edges)) iflogger.info( 'Total anticorrelation edges: {a}'.format(a=anticor_edges)) connectivity_matrix = connectivity_matrix + connectivity_matrix.T correlation_matrix = correlation_matrix + correlation_matrix.T anticorrelation_matrix = anticorrelation_matrix + \ anticorrelation_matrix.T stats[edge_key] = connectivity_matrix stats['correlation'] = correlation_matrix stats['anticorrelation'] = anticorrelation_matrix try: gp = nx.read_gpickle(self.inputs.resolution_network_file) except IndexError: gp = nx.read_graphml(self.inputs.resolution_network_file) nodedict = gp.node[gp.nodes()[0]] if not nodedict.has_key('dn_position'): iflogger.info("Creating node positions from segmentation") G = nx.Graph() for u, d in gp.nodes_iter(data=True): G.add_node(int(u), d) xyz = tuple( np.mean(np.where( np.flipud(segmentationdata) == int( d["dn_correspondence_id"])), axis=1)) G.node[int(u)]['dn_position'] = xyz ntwkname = op.abspath('nodepositions.pck') nx.write_gpickle(G, ntwkname) else: ntwkname = self.inputs.resolution_network_file try: ntwkname = nx.read_gpickle(ntwkname) except IndexError: ntwkname = nx.read_graphml(ntwkname) newntwk = ntwkname.copy() newntwk = remove_all_edges(newntwk) if self.inputs.give_nodes_values: newntwk = add_node_data(stats[key], newntwk) corntwk = add_node_data(stats[key], newntwk) anticorntwk = add_node_data(stats[key], newntwk) newntwk = add_edge_data(stats[edge_key], newntwk) corntwk = add_edge_data(stats['correlation'], corntwk) anticorntwk = add_edge_data(stats['anticorrelation'], anticorntwk) else: newntwk = add_edge_data(stats[edge_key], ntwkname) corntwk = add_edge_data(stats['correlation'], ntwkname) anticorntwk = add_edge_data(stats['anticorrelation'], ntwkname) if isdefined(self.inputs.out_network_file): path, name, ext = split_filename(self.inputs.out_network_file) if not ext == '.pck': ext = '.pck' out_network_file = op.abspath(name + ext) else: if isdefined(self.inputs.subject_id): out_network_file = op.abspath( self.inputs.subject_id + '_IC_' + str(self.inputs.component_index) + '.pck') else: out_network_file = op.abspath( 'IC_' + str(self.inputs.component_index) + '.pck') path, name, ext = split_filename(out_network_file) iflogger.info( 'Saving output network as {ntwk}'.format(ntwk=out_network_file)) nx.write_gpickle(newntwk, out_network_file) out_correlation_network = op.abspath(name + '_correlation' + ext) iflogger.info('Saving correlation network as {ntwk}'.format( ntwk=out_correlation_network)) nx.write_gpickle(corntwk, out_correlation_network) out_anticorrelation_network = op.abspath(name + '_anticorrelation' + ext) iflogger.info('Saving anticorrelation network as {ntwk}'.format( ntwk=out_anticorrelation_network)) nx.write_gpickle(anticorntwk, out_anticorrelation_network) if isdefined(self.inputs.subject_id): stats['subject_id'] = self.inputs.subject_id if isdefined(self.inputs.out_stats_file): path, name, ext = split_filename(self.inputs.out_stats_file) if not ext == '.mat': ext = '.mat' out_stats_file = op.abspath(name + ext) else: if isdefined(self.inputs.subject_id): out_stats_file = op.abspath(self.inputs.subject_id + '_IC_' + str(self.inputs.component_index) + '.mat') else: out_stats_file = op.abspath('IC_' + str(self.inputs.component_index) + '.mat') iflogger.info( 'Saving image statistics as {stats}'.format(stats=out_stats_file)) sio.savemat(out_stats_file, stats) return runtime