def _run_interface(self,runtime):
     """ compute the average signal for each GM ROI.
     """
     print("Compute average rs-fMRI signal for each cortical ROI")
     print("====================================================")
 
     fdata = nib.load( self.inputs.func_file ).get_data()
 
     tp = fdata.shape[3]
     
     if self.inputs.parcellation_scheme != "Custom":
         resolutions = get_parcellation(self.inputs.parcellation_scheme)
     else:
         resolutions = self.inputs.atlas_info
     
     if self.inputs.apply_scrubbing:
         # load scrubbing FD and DVARS series
         FD = np.load( self.inputs.FD )
         DVARS = np.load( self.inputs.DVARS )
         # evaluate scrubbing mask
         FD_th = self.inputs.FD_th
         DVARS_th = self.inputs.DVARS_th
         FD_mask = np.array(np.nonzero(FD < FD_th))[0,:]
         DVARS_mask = np.array(np.nonzero(DVARS < DVARS_th))[0,:]
         index = np.sort(np.unique(np.concatenate((FD_mask,DVARS_mask)))) + 1
         index = np.concatenate(([0],index))
         log_scrubbing = "DISCARDED time points after scrubbing: " + str(FD.shape[0]-index.shape[0]+1) + " over " + str(FD.shape[0]+1)
         print(log_scrubbing)
         np.save( os.path.abspath( 'tp_after_scrubbing.npy'), index )
         sio.savemat( os.path.abspath('tp_after_scrubbing.mat'), {'index':index} )
     else:
         index = np.linspace(0,tp-1,tp).astype('int')
 
     # loop throughout all the resolutions ('scale33', ..., 'scale500')
     for parkey, parval in resolutions.items():
         print("Resolution = "+parkey)
         
         # Open the corresponding ROI
         print("Open the corresponding ROI")
         for vol in self.inputs.roi_volumes:
             if parkey in vol:
                 roi_fname = vol
                 print roi_fname
         roi       = nib.load(roi_fname)
         roiData   = roi.get_data().astype( np.uint32 )
 
         # Create matrix, add node information from parcellation and recover ROI indexes
         nROIs = parval['number_of_regions']
         print("Create the connection matrix (%s rois)" % nROIs)
         G     = nx.Graph()
         gp = nx.read_graphml(parval['node_information_graphml'])
         ROI_idx = []
         for u,d in gp.nodes_iter(data=True):
             G.add_node(int(u), d)
             # compute a position for the node based on the mean position of the
             # ROI in voxel coordinates (segmentation volume )
             G.node[int(u)]['dn_position'] = tuple(np.mean( np.where(roiData== int(d["dn_correspondence_id"]) ) , axis = 1))
             ROI_idx.append(int(d["dn_correspondence_id"]))
         
         # matrix number of rois vs timepoints
         odata = np.zeros( (nROIs,tp), dtype = np.float32 )
 
         # loop throughout all the ROIs (current resolution)
         roi_line = 0
         for i in ROI_idx:
             odata[roi_line,:] = fdata[roiData==i].mean( axis = 0 )
             roi_line += 1
 
         np.save( os.path.abspath('averageTimeseries_%s.npy' % parkey), odata )
         sio.savemat( os.path.abspath('averageTimeseries_%s.mat' % parkey), {'TCS':odata} )
             
         # Fill connectivity matrix
         i = -1
         for i_signal in odata:
             i += 1
             for j in xrange(i,nROIs):
                 j_signal = odata[j,:]
                 # apply scrubbing
                 value = np.corrcoef(i_signal[index],j_signal[index])[0,1]
                 G.add_edge(ROI_idx[i],ROI_idx[j],corr = value)
                 
         # storing network
         if 'gPickle' in self.inputs.output_types:
             nx.write_gpickle(G, 'connectome_%s.gpickle' % parkey)
         if 'mat' in self.inputs.output_types:
             # edges
             size_edges = (parval['number_of_regions'],parval['number_of_regions'])
             edge_keys = G.edges(data=True)[0][2].keys()
             
             edge_struct = {}
             for edge_key in edge_keys:
                 edge_struct[edge_key] = nx.to_numpy_matrix(G,weight=edge_key)
                 
             # nodes
             size_nodes = parval['number_of_regions']
             node_keys = G.nodes(data=True)[0][1].keys()
 
             node_struct = {}
             for node_key in node_keys:
                 if node_key == 'dn_position':
                     node_arr = np.zeros([size_nodes,3],dtype=np.float)
                 else:
                     node_arr = np.zeros(size_nodes,dtype=np.object_)
                 node_n = 0
                 for _,node_data in G.nodes(data=True):
                     node_arr[node_n] = node_data[node_key]
                     node_n += 1
                 node_struct[node_key] = node_arr
                 
             sio.savemat('connectome_%s.mat' % parkey, mdict={'sc':edge_struct,'nodes':node_struct})
         if 'graphml' in self.inputs.output_types:
             g2 = nx.Graph()
             for u_gml,v_gml,d_gml in G.edges_iter(data=True):
                 g2.add_edge(u_gml,v_gml,d_gml)
             for u_gml,d_gml in G.nodes(data=True):
                 g2.add_node(u_gml,{'dn_correspondence_id':d_gml['dn_correspondence_id'],
                                'dn_fsname':d_gml['dn_fsname'],
                                'dn_hemisphere':d_gml['dn_hemisphere'],
                                'dn_name':d_gml['dn_name'],
                                'dn_position_x':float(d_gml['dn_position'][0]),
                                'dn_position_y':float(d_gml['dn_position'][1]),
                                'dn_position_z':float(d_gml['dn_position'][2]),
                                'dn_region':d_gml['dn_region']})
             nx.write_graphml(g2,'connectome_%s.graphml' % parkey)
 
     print("[ DONE ]")
     return runtime
Beispiel #2
0
 def _gen_outfilenames(self, basename):
     filepaths = []
     for scale in get_parcellation(self.inputs.parcellation_scheme).keys():
         filepaths.append(op.abspath(basename+'_'+scale+'.nii.gz'))
     return filepaths
 def _run_interface(self,runtime):
     """ compute the average signal for each GM ROI.
     """
     print("Compute average rs-fMRI signal for each cortical ROI")
     print("====================================================")
 
     fdata = nib.load( self.inputs.func_file ).get_data()
 
     tp = fdata.shape[3]
     
     if self.inputs.parcellation_scheme != "Custom":
         resolutions = get_parcellation(self.inputs.parcellation_scheme)
     else:
         resolutions = self.inputs.atlas_info
     
     if self.inputs.apply_scrubbing:
         # load scrubbing FD and DVARS series
         FD = np.load( self.inputs.FD )
         DVARS = np.load( self.inputs.DVARS )
         # evaluate scrubbing mask
         FD_th = self.inputs.FD_th
         DVARS_th = self.inputs.DVARS_th
         FD_mask = np.array(np.nonzero(FD < FD_th))[0,:]
         DVARS_mask = np.array(np.nonzero(DVARS < DVARS_th))[0,:]
         index = np.sort(np.unique(np.concatenate((FD_mask,DVARS_mask)))) + 1
         index = np.concatenate(([0],index))
         log_scrubbing = "DISCARDED time points after scrubbing: " + str(FD.shape[0]-index.shape[0]+1) + " over " + str(FD.shape[0]+1)
         print(log_scrubbing)
         np.save( os.path.abspath( 'tp_after_scrubbing.npy'), index )
         sio.savemat( os.path.abspath('tp_after_scrubbing.mat'), {'index':index} )
     else:
         index = np.linspace(0,tp-1,tp).astype('int')
 
     # loop throughout all the resolutions ('scale33', ..., 'scale500')
     for parkey, parval in resolutions.items():
         print("Resolution = "+parkey)
         
         # Open the corresponding ROI
         print("Open the corresponding ROI")
         for vol in self.inputs.roi_volumes:
             if parkey in vol:
                 roi_fname = vol
                 print roi_fname
         roi       = nib.load(roi_fname)
         roiData   = roi.get_data().astype( np.uint32 )
 
         # Create matrix, add node information from parcellation and recover ROI indexes
         nROIs = parval['number_of_regions']
         print("Create the connection matrix (%s rois)" % nROIs)
         G     = nx.Graph()
         gp = nx.read_graphml(parval['node_information_graphml'])
         ROI_idx = []
         for u,d in gp.nodes_iter(data=True):
             G.add_node(int(u), d)
             # compute a position for the node based on the mean position of the
             # ROI in voxel coordinates (segmentation volume )
             G.node[int(u)]['dn_position'] = tuple(np.mean( np.where(roiData== int(d["dn_correspondence_id"]) ) , axis = 1))
             ROI_idx.append(int(d["dn_correspondence_id"]))
         
         # matrix number of rois vs timepoints
         odata = np.zeros( (nROIs,tp), dtype = np.float32 )
 
         # loop throughout all the ROIs (current resolution)
         roi_line = 0
         for i in ROI_idx:
             odata[roi_line,:] = fdata[roiData==i].mean( axis = 0 )
             roi_line += 1
 
         np.save( os.path.abspath('averageTimeseries_%s.npy' % parkey), odata )
         sio.savemat( os.path.abspath('averageTimeseries_%s.mat' % parkey), {'TCS':odata} )
             
         # Fill connectivity matrix
         i = -1
         for i_signal in odata:
             i += 1
             for j in xrange(i,nROIs):
                 j_signal = odata[j,:]
                 # apply scrubbing
                 value = np.corrcoef(i_signal[index],j_signal[index])[0,1]
                 G.add_edge(ROI_idx[i],ROI_idx[j],corr = value)
                 
         # storing network
         if 'gPickle' in self.inputs.output_types:
             nx.write_gpickle(G, 'connectome_%s.gpickle' % parkey)
         if 'mat' in self.inputs.output_types:
             # edges
             size_edges = (parval['number_of_regions'],parval['number_of_regions'])
             edge_keys = G.edges(data=True)[0][2].keys()
             
             edge_struct = {}
             for edge_key in edge_keys:
                 edge_struct[edge_key] = nx.to_numpy_matrix(G,weight=edge_key)
                 
             # nodes
             size_nodes = parval['number_of_regions']
             node_keys = G.nodes(data=True)[0][1].keys()
 
             node_struct = {}
             for node_key in node_keys:
                 if node_key == 'dn_position':
                     node_arr = np.zeros([size_nodes,3],dtype=np.float)
                 else:
                     node_arr = np.zeros(size_nodes,dtype=np.object_)
                 node_n = 0
                 for _,node_data in G.nodes(data=True):
                     node_arr[node_n] = node_data[node_key]
                     node_n += 1
                 node_struct[node_key] = node_arr
                 
             sio.savemat('connectome_%s.mat' % parkey, mdict={'sc':edge_struct,'nodes':node_struct})
         if 'graphml' in self.inputs.output_types:
             g2 = nx.Graph()
             for u_gml,d_gml in G.nodes(data=True):
                 g2.add_node(u_gml,{'dn_correspondence_id':d_gml['dn_correspondence_id'],
                                'dn_fsname':d_gml['dn_fsname'],
                                'dn_hemisphere':d_gml['dn_hemisphere'],
                                'dn_name':d_gml['dn_name'],
                                'dn_position_x':float(d_gml['dn_position'][0]),
                                'dn_position_y':float(d_gml['dn_position'][1]),
                                'dn_position_z':float(d_gml['dn_position'][2]),
                                'dn_region':d_gml['dn_region']})
             for u_gml,v_gml,d_gml in G.edges_iter(data=True):
                 g2.add_edge(u_gml,v_gml,{'corr' : float(d_gml['corr'])})
             nx.write_graphml(g2,'connectome_%s.graphml' % parkey)
             
         if 'cff' in self.inputs.output_types:
             cvt = cmtk.CFFConverter()
             cvt.inputs.title = 'Connectome mapper'
             cvt.inputs.nifti_volumes = self.inputs.roi_volumes
             cvt.inputs.gpickled_networks = glob.glob(os.path.abspath("connectome_*.gpickle"))
             cvt.run()
 
     print("[ DONE ]")
     return runtime