def remove_unconnected_graphs_avg_and_cff(in_files, resolution_network_file, group_id): import nipype.interfaces.cmtk as cmtk import nipype.pipeline.engine as pe from nipype.utils.filemanip import split_filename import os import os.path as op import networkx as nx connected = [] if in_files == None or in_files == [None]: return None elif len(in_files) == 0: return None for in_file in in_files: graph = nx.read_gpickle(in_file) if not graph.number_of_edges() == 0: connected.append(in_file) print in_file if connected == []: return None avg_out_name = op.abspath(group_id + '_n=' + str(len(connected)) + '_average.pck') avg_out_cff_name = op.abspath(group_id + '_n=' + str(len(connected)) + '_Networks.cff') average_networks = cmtk.AverageNetworks() average_networks.inputs.in_files = connected average_networks.inputs.resolution_network_file = resolution_network_file average_networks.inputs.group_id = group_id average_networks.inputs.out_gpickled_groupavg = avg_out_name average_networks.run() _, name, ext = split_filename(avg_out_name) filtered_network_file = op.abspath(name + '_filt' + ext) threshold_graphs = cmtk.ThresholdGraph() from nipype.interfaces.cmtk.functional import tinv weight_threshold = 1 # tinv(0.95, 198-30-1) threshold_graphs.inputs.network_file = avg_out_name threshold_graphs.inputs.weight_threshold = weight_threshold threshold_graphs.inputs.above_threshold = True threshold_graphs.inputs.edge_key = "value" threshold_graphs.inputs.out_filtered_network_file = op.abspath( filtered_network_file) threshold_graphs.run() out_files = [] out_files.append(avg_out_name) out_files.append(op.abspath(filtered_network_file)) out_files.extend(connected) average_cff = cmtk.CFFConverter() average_cff.inputs.gpickled_networks = out_files average_cff.inputs.out_file = avg_out_cff_name average_cff.run() out_files.append(op.abspath(avg_out_cff_name)) return out_files
def _run_interface(self, runtime): if isdefined(self.inputs.additional_maps): additional_maps = dict((split_filename(add_map)[1], add_map) for add_map in self.inputs.additional_maps if add_map != '') else: additional_maps = {} if self.inputs.probtrackx: probtrackx_cmat( voxel_connectivity_files=self.inputs.track_file, roi_volumes=self.inputs.roi_volumes, parcellation_scheme=self.inputs.parcellation_scheme, atlas_info=self.inputs.atlas_info, output_types=self.inputs.output_types) elif len(self.inputs.track_file) > 1: prob_cmat(intrk=self.inputs.track_file, roi_volumes=self.inputs.roi_volumes, parcellation_scheme=self.inputs.parcellation_scheme, atlas_info=self.inputs.atlas_info, output_types=self.inputs.output_types) else: cmat(intrk=self.inputs.track_file[0], roi_volumes=self.inputs.roi_volumes, parcellation_scheme=self.inputs.parcellation_scheme, atlas_info=self.inputs.atlas_info, compute_curvature=self.inputs.compute_curvature, additional_maps=additional_maps, output_types=self.inputs.output_types) if 'cff' in self.inputs.output_types: cvt = cmtk.CFFConverter() cvt.inputs.title = 'Connectome mapper' cvt.inputs.nifti_volumes = self.inputs.roi_volumes cvt.inputs.tract_files = ['streamline_final.trk'] cvt.inputs.gpickled_networks = glob.glob( os.path.abspath("connectome_*.gpickle")) cvt.run() return runtime
In you intend to view the meshes in gmsh or Blender, you should change the workflow creation to use stereolithographic (stl) format. """ tessflow = create_tessellation_flow(name='tessflow', out_format='gii') tessflow.inputs.inputspec.subject_id = 'fsaverage' tessflow.inputs.inputspec.subjects_dir = subjects_dir tessflow.inputs.inputspec.lookup_file = lookup_file """ We also create a conditional node to package the surfaces for ConnectomeViewer. Simply set cff to "False" to ignore this step. """ cff = True if cff: cff = pe.Node(interface=cmtk.CFFConverter(), name='cff') cff.inputs.out_file = 'Meshes.cff' """ Outputs ======= Create a datasink to organize the smoothed meshes Using regular-expression substitutions we can remove the extraneous folders generated by the mapnode. """ datasink = pe.Node(interface=nio.DataSink(), name="datasink") datasink.inputs.base_directory = 'meshes' datasink.inputs.regexp_substitutions = [('_smoother[\d]*/', '')] """ Execution =========
def create_connectivity_pipeline(name="connectivity"): """Creates a pipeline that does the same connectivity processing as in the :ref:`example_dmri_connectivity` example script. Given a subject id (and completed Freesurfer reconstruction) diffusion-weighted image, b-values, and b-vectors, the workflow will return the subject's connectome as a Connectome File Format (CFF) file for use in Connectome Viewer (http://www.cmtk.org). Example ------- >>> from nipype.workflows.dmri.camino.connectivity_mapping import create_connectivity_pipeline >>> conmapper = create_connectivity_pipeline("nipype_conmap") >>> conmapper.inputs.inputnode.subjects_dir = '.' >>> conmapper.inputs.inputnode.subject_id = 'subj1' >>> conmapper.inputs.inputnode.dwi = 'data.nii.gz' >>> conmapper.inputs.inputnode.bvecs = 'bvecs' >>> conmapper.inputs.inputnode.bvals = 'bvals' >>> conmapper.run() # doctest: +SKIP Inputs:: inputnode.subject_id inputnode.subjects_dir inputnode.dwi inputnode.bvecs inputnode.bvals inputnode.resolution_network_file Outputs:: outputnode.connectome outputnode.cmatrix outputnode.gpickled_network outputnode.fa outputnode.struct outputnode.trace outputnode.tracts outputnode.tensors """ inputnode_within = pe.Node(interface=util.IdentityInterface(fields=["subject_id", "dwi", "bvecs", "bvals", "subjects_dir", "resolution_network_file", ]), name="inputnode_within") FreeSurferSource = pe.Node(interface=nio.FreeSurferSource(), name='fssource') FreeSurferSourceLH = pe.Node(interface=nio.FreeSurferSource(), name='fssourceLH') FreeSurferSourceLH.inputs.hemi = 'lh' FreeSurferSourceRH = pe.Node(interface=nio.FreeSurferSource(), name='fssourceRH') FreeSurferSourceRH.inputs.hemi = 'rh' """ Since the b values and b vectors come from the FSL course, we must convert it to a scheme file for use in Camino. """ fsl2scheme = pe.Node(interface=camino.FSL2Scheme(), name="fsl2scheme") fsl2scheme.inputs.usegradmod = True """ FSL's Brain Extraction tool is used to create a mask from the b0 image """ b0Strip = pe.Node(interface=fsl.BET(mask = True), name = 'bet_b0') """ FSL's FLIRT function is used to coregister the b0 mask and the structural image. A convert_xfm node is then used to obtain the inverse of the transformation matrix. FLIRT is used once again to apply the inverse transformation to the parcellated brain image. """ coregister = pe.Node(interface=fsl.FLIRT(dof=6), name = 'coregister') coregister.inputs.cost = ('normmi') convertxfm = pe.Node(interface=fsl.ConvertXFM(), name = 'convertxfm') convertxfm.inputs.invert_xfm = True inverse = pe.Node(interface=fsl.FLIRT(), name = 'inverse') inverse.inputs.interp = ('nearestneighbour') inverse_AparcAseg = pe.Node(interface=fsl.FLIRT(), name = 'inverse_AparcAseg') inverse_AparcAseg.inputs.interp = ('nearestneighbour') """ A number of conversion operations are required to obtain NIFTI files from the FreesurferSource for each subject. Nodes are used to convert the following: * Original structural image to NIFTI * Parcellated white matter image to NIFTI * Parcellated whole-brain image to NIFTI * Pial, white, inflated, and spherical surfaces for both the left and right hemispheres are converted to GIFTI for visualization in ConnectomeViewer * Parcellated annotation files for the left and right hemispheres are also converted to GIFTI """ mri_convert_Brain = pe.Node(interface=fs.MRIConvert(), name='mri_convert_Brain') mri_convert_Brain.inputs.out_type = 'nii' mri_convert_AparcAseg = mri_convert_Brain.clone('mri_convert_AparcAseg') mris_convertLH = pe.Node(interface=fs.MRIsConvert(), name='mris_convertLH') mris_convertLH.inputs.out_datatype = 'gii' mris_convertRH = mris_convertLH.clone('mris_convertRH') mris_convertRHwhite = mris_convertLH.clone('mris_convertRHwhite') mris_convertLHwhite = mris_convertLH.clone('mris_convertLHwhite') mris_convertRHinflated = mris_convertLH.clone('mris_convertRHinflated') mris_convertLHinflated = mris_convertLH.clone('mris_convertLHinflated') mris_convertRHsphere = mris_convertLH.clone('mris_convertRHsphere') mris_convertLHsphere = mris_convertLH.clone('mris_convertLHsphere') mris_convertLHlabels = mris_convertLH.clone('mris_convertLHlabels') mris_convertRHlabels = mris_convertLH.clone('mris_convertRHlabels') """ In this section we create the nodes necessary for diffusion analysis. First, the diffusion image is converted to voxel order, since this is the format in which Camino does its processing. """ image2voxel = pe.Node(interface=camino.Image2Voxel(), name="image2voxel") """ Second, diffusion tensors are fit to the voxel-order data. If desired, these tensors can be converted to a Nifti tensor image using the DT2NIfTI interface. """ dtifit = pe.Node(interface=camino.DTIFit(),name='dtifit') """ Next, a lookup table is generated from the schemefile and the signal-to-noise ratio (SNR) of the unweighted (q=0) data. """ dtlutgen = pe.Node(interface=camino.DTLUTGen(), name="dtlutgen") dtlutgen.inputs.snr = 16.0 dtlutgen.inputs.inversion = 1 """ In this tutorial we implement probabilistic tractography using the PICo algorithm. PICo tractography requires an estimate of the fibre direction and a model of its uncertainty in each voxel; this probabilitiy distribution map is produced using the following node. """ picopdfs = pe.Node(interface=camino.PicoPDFs(), name="picopdfs") picopdfs.inputs.inputmodel = 'dt' """ Finally, tractography is performed. In this tutorial, we will use only one iteration for time-saving purposes. It is important to note that we use the TrackPICo interface here. This interface now expects the files required for PICo tracking (i.e. the output from picopdfs). Similar interfaces exist for alternative types of tracking, such as Bayesian tracking with Dirac priors (TrackBayesDirac). """ track = pe.Node(interface=camino.TrackPICo(), name="track") track.inputs.iterations = 1 """ Currently, the best program for visualizing tracts is TrackVis. For this reason, a node is included to convert the raw tract data to .trk format. Solely for testing purposes, another node is added to perform the reverse. """ camino2trackvis = pe.Node(interface=cam2trk.Camino2Trackvis(), name="camino2trackvis") camino2trackvis.inputs.min_length = 30 camino2trackvis.inputs.voxel_order = 'LAS' trk2camino = pe.Node(interface=cam2trk.Trackvis2Camino(), name="trk2camino") """ Tracts can also be converted to VTK and OOGL formats, for use in programs such as GeomView and Paraview, using the following two nodes. """ vtkstreamlines = pe.Node(interface=camino.VtkStreamlines(), name="vtkstreamlines") procstreamlines = pe.Node(interface=camino.ProcStreamlines(), name="procstreamlines") """ We can easily produce a variety of scalar values from our fitted tensors. The following nodes generate the fractional anisotropy and diffusivity trace maps and their associated headers, and then merge them back into a single .nii file. """ fa = pe.Node(interface=camino.ComputeFractionalAnisotropy(),name='fa') trace = pe.Node(interface=camino.ComputeTensorTrace(),name='trace') dteig = pe.Node(interface=camino.ComputeEigensystem(), name='dteig') analyzeheader_fa = pe.Node(interface=camino.AnalyzeHeader(),name='analyzeheader_fa') analyzeheader_fa.inputs.datatype = 'double' analyzeheader_trace = pe.Node(interface=camino.AnalyzeHeader(),name='analyzeheader_trace') analyzeheader_trace.inputs.datatype = 'double' fa2nii = pe.Node(interface=misc.CreateNifti(),name='fa2nii') trace2nii = fa2nii.clone("trace2nii") """ This section adds the Connectome Mapping Toolkit (CMTK) nodes. These interfaces are fairly experimental and may not function properly. In order to perform connectivity mapping using CMTK, the parcellated structural data is rewritten using the indices and parcellation scheme from the connectome mapper (CMP). This process has been written into the ROIGen interface, which will output a remapped aparc+aseg image as well as a dictionary of label information (i.e. name, display colours) pertaining to the original and remapped regions. These label values are input from a user-input lookup table, if specified, and otherwise the default Freesurfer LUT (/freesurfer/FreeSurferColorLUT.txt). """ roigen = pe.Node(interface=cmtk.ROIGen(), name="ROIGen") roigen_structspace = roigen.clone("ROIGen_structspace") """ The CreateMatrix interface takes in the remapped aparc+aseg image as well as the label dictionary and fiber tracts and outputs a number of different files. The most important of which is the connectivity network itself, which is stored as a 'gpickle' and can be loaded using Python's NetworkX package (see CreateMatrix docstring). Also outputted are various NumPy arrays containing detailed tract information, such as the start and endpoint regions, and statistics on the mean and standard deviation for the fiber length of each connection. These matrices can be used in the ConnectomeViewer to plot the specific tracts that connect between user-selected regions. """ createnodes = pe.Node(interface=cmtk.CreateNodes(), name="CreateNodes") creatematrix = pe.Node(interface=cmtk.CreateMatrix(), name="CreateMatrix") creatematrix.inputs.count_region_intersections = True """ Here we define the endpoint of this tutorial, which is the CFFConverter node, as well as a few nodes which use the Nipype Merge utility. These are useful for passing lists of the files we want packaged in our CFF file. """ CFFConverter = pe.Node(interface=cmtk.CFFConverter(), name="CFFConverter") giftiSurfaces = pe.Node(interface=util.Merge(8), name="GiftiSurfaces") giftiLabels = pe.Node(interface=util.Merge(2), name="GiftiLabels") niftiVolumes = pe.Node(interface=util.Merge(3), name="NiftiVolumes") fiberDataArrays = pe.Node(interface=util.Merge(4), name="FiberDataArrays") gpickledNetworks = pe.Node(interface=util.Merge(1), name="NetworkFiles") """ Since we have now created all our nodes, we can define our workflow and start making connections. """ mapping = pe.Workflow(name='mapping') """ First, we connect the input node to the early conversion functions. FreeSurfer input nodes: """ mapping.connect([(inputnode_within, FreeSurferSource,[("subjects_dir","subjects_dir")])]) mapping.connect([(inputnode_within, FreeSurferSource,[("subject_id","subject_id")])]) mapping.connect([(inputnode_within, FreeSurferSourceLH,[("subjects_dir","subjects_dir")])]) mapping.connect([(inputnode_within, FreeSurferSourceLH,[("subject_id","subject_id")])]) mapping.connect([(inputnode_within, FreeSurferSourceRH,[("subjects_dir","subjects_dir")])]) mapping.connect([(inputnode_within, FreeSurferSourceRH,[("subject_id","subject_id")])]) """ Required conversions for processing in Camino: """ mapping.connect([(inputnode_within, image2voxel, [("dwi", "in_file")]), (inputnode_within, fsl2scheme, [("bvecs", "bvec_file"), ("bvals", "bval_file")]), (image2voxel, dtifit,[['voxel_order','in_file']]), (fsl2scheme, dtifit,[['scheme','scheme_file']]) ]) """ Nifti conversions for the subject's stripped brain image from Freesurfer: """ mapping.connect([(FreeSurferSource, mri_convert_Brain,[('brain','in_file')])]) """ Surface conversions to GIFTI (pial, white, inflated, and sphere for both hemispheres) """ mapping.connect([(FreeSurferSourceLH, mris_convertLH,[('pial','in_file')])]) mapping.connect([(FreeSurferSourceRH, mris_convertRH,[('pial','in_file')])]) mapping.connect([(FreeSurferSourceLH, mris_convertLHwhite,[('white','in_file')])]) mapping.connect([(FreeSurferSourceRH, mris_convertRHwhite,[('white','in_file')])]) mapping.connect([(FreeSurferSourceLH, mris_convertLHinflated,[('inflated','in_file')])]) mapping.connect([(FreeSurferSourceRH, mris_convertRHinflated,[('inflated','in_file')])]) mapping.connect([(FreeSurferSourceLH, mris_convertLHsphere,[('sphere','in_file')])]) mapping.connect([(FreeSurferSourceRH, mris_convertRHsphere,[('sphere','in_file')])]) """ The annotation files are converted using the pial surface as a map via the MRIsConvert interface. One of the functions defined earlier is used to select the lh.aparc.annot and rh.aparc.annot files specifically (rather than i.e. rh.aparc.a2009s.annot) from the output list given by the FreeSurferSource. """ mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels,[('pial','in_file')])]) mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels,[('pial','in_file')])]) mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, [(('annot', select_aparc_annot), 'annot_file')])]) mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, [(('annot', select_aparc_annot), 'annot_file')])]) """ This section coregisters the diffusion-weighted and parcellated white-matter / whole brain images. At present the conmap node connection is left commented, as there have been recent changes in Camino code that have presented some users with errors. """ mapping.connect([(inputnode_within, b0Strip,[('dwi','in_file')])]) mapping.connect([(inputnode_within, b0Strip,[('dwi','t2_guided')])]) # Added to improve damaged brain extraction mapping.connect([(b0Strip, coregister,[('out_file','in_file')])]) mapping.connect([(mri_convert_Brain, coregister,[('out_file','reference')])]) mapping.connect([(coregister, convertxfm,[('out_matrix_file','in_file')])]) mapping.connect([(b0Strip, inverse,[('out_file','reference')])]) mapping.connect([(convertxfm, inverse,[('out_file','in_matrix_file')])]) mapping.connect([(mri_convert_Brain, inverse,[('out_file','in_file')])]) """ The tractography pipeline consists of the following nodes. Further information about the tractography can be found in nipype/examples/dmri_camino_dti.py. """ mapping.connect([(b0Strip, track,[("mask_file","seed_file")])]) mapping.connect([(fsl2scheme, dtlutgen,[("scheme","scheme_file")])]) mapping.connect([(dtlutgen, picopdfs,[("dtLUT","luts")])]) mapping.connect([(dtifit, picopdfs,[("tensor_fitted","in_file")])]) mapping.connect([(picopdfs, track,[("pdfs","in_file")])]) """ Connecting the Fractional Anisotropy and Trace nodes is simple, as they obtain their input from the tensor fitting. This is also where our voxel- and data-grabbing functions come in. We pass these functions, along with the original DWI image from the input node, to the header-generating nodes. This ensures that the files will be correct and readable. """ mapping.connect([(dtifit, fa,[("tensor_fitted","in_file")])]) mapping.connect([(fa, analyzeheader_fa,[("fa","in_file")])]) mapping.connect([(inputnode_within, analyzeheader_fa,[(('dwi', get_vox_dims), 'voxel_dims'), (('dwi', get_data_dims), 'data_dims')])]) mapping.connect([(fa, fa2nii,[('fa','data_file')])]) mapping.connect([(inputnode_within, fa2nii,[(('dwi', get_affine), 'affine')])]) mapping.connect([(analyzeheader_fa, fa2nii,[('header', 'header_file')])]) mapping.connect([(dtifit, trace,[("tensor_fitted","in_file")])]) mapping.connect([(trace, analyzeheader_trace,[("trace","in_file")])]) mapping.connect([(inputnode_within, analyzeheader_trace,[(('dwi', get_vox_dims), 'voxel_dims'), (('dwi', get_data_dims), 'data_dims')])]) mapping.connect([(trace, trace2nii,[('trace','data_file')])]) mapping.connect([(inputnode_within, trace2nii,[(('dwi', get_affine), 'affine')])]) mapping.connect([(analyzeheader_trace, trace2nii,[('header', 'header_file')])]) mapping.connect([(dtifit, dteig,[("tensor_fitted","in_file")])]) """ The output tracts are converted to Trackvis format (and back). Here we also use the voxel- and data-grabbing functions defined at the beginning of the pipeline. """ mapping.connect([(track, camino2trackvis, [('tracked','in_file')]), (track, vtkstreamlines,[['tracked','in_file']]), (camino2trackvis, trk2camino,[['trackvis','in_file']]) ]) mapping.connect([(inputnode_within, camino2trackvis,[(('dwi', get_vox_dims), 'voxel_dims'), (('dwi', get_data_dims), 'data_dims')])]) """ Here the CMTK connectivity mapping nodes are connected. The original aparc+aseg image is converted to NIFTI, then registered to the diffusion image and delivered to the ROIGen node. The remapped parcellation, original tracts, and label file are then given to CreateMatrix. """ mapping.connect(inputnode_within, 'resolution_network_file', createnodes, 'resolution_network_file') mapping.connect(createnodes, 'node_network', creatematrix, 'resolution_network_file') mapping.connect([(FreeSurferSource, mri_convert_AparcAseg, [(('aparc_aseg', select_aparc), 'in_file')])]) mapping.connect([(b0Strip, inverse_AparcAseg,[('out_file','reference')])]) mapping.connect([(convertxfm, inverse_AparcAseg,[('out_file','in_matrix_file')])]) mapping.connect([(mri_convert_AparcAseg, inverse_AparcAseg,[('out_file','in_file')])]) mapping.connect([(mri_convert_AparcAseg, roigen_structspace,[('out_file','aparc_aseg_file')])]) mapping.connect([(roigen_structspace, createnodes,[("roi_file","roi_file")])]) mapping.connect([(inverse_AparcAseg, roigen,[("out_file","aparc_aseg_file")])]) mapping.connect([(roigen, creatematrix,[("roi_file","roi_file")])]) mapping.connect([(camino2trackvis, creatematrix,[("trackvis","tract_file")])]) mapping.connect([(inputnode_within, creatematrix,[("subject_id","out_matrix_file")])]) mapping.connect([(inputnode_within, creatematrix,[("subject_id","out_matrix_mat_file")])]) """ The merge nodes defined earlier are used here to create lists of the files which are destined for the CFFConverter. """ mapping.connect([(mris_convertLH, giftiSurfaces,[("converted","in1")])]) mapping.connect([(mris_convertRH, giftiSurfaces,[("converted","in2")])]) mapping.connect([(mris_convertLHwhite, giftiSurfaces,[("converted","in3")])]) mapping.connect([(mris_convertRHwhite, giftiSurfaces,[("converted","in4")])]) mapping.connect([(mris_convertLHinflated, giftiSurfaces,[("converted","in5")])]) mapping.connect([(mris_convertRHinflated, giftiSurfaces,[("converted","in6")])]) mapping.connect([(mris_convertLHsphere, giftiSurfaces,[("converted","in7")])]) mapping.connect([(mris_convertRHsphere, giftiSurfaces,[("converted","in8")])]) mapping.connect([(mris_convertLHlabels, giftiLabels,[("converted","in1")])]) mapping.connect([(mris_convertRHlabels, giftiLabels,[("converted","in2")])]) mapping.connect([(roigen, niftiVolumes,[("roi_file","in1")])]) mapping.connect([(inputnode_within, niftiVolumes,[("dwi","in2")])]) mapping.connect([(mri_convert_Brain, niftiVolumes,[("out_file","in3")])]) mapping.connect([(creatematrix, fiberDataArrays,[("endpoint_file","in1")])]) mapping.connect([(creatematrix, fiberDataArrays,[("endpoint_file_mm","in2")])]) mapping.connect([(creatematrix, fiberDataArrays,[("fiber_length_file","in3")])]) mapping.connect([(creatematrix, fiberDataArrays,[("fiber_label_file","in4")])]) """ This block actually connects the merged lists to the CFF converter. We pass the surfaces and volumes that are to be included, as well as the tracts and the network itself. The currently running pipeline (dmri_connectivity.py) is also scraped and included in the CFF file. This makes it easy for the user to examine the entire processing pathway used to generate the end product. """ CFFConverter.inputs.script_files = op.abspath(inspect.getfile(inspect.currentframe())) mapping.connect([(giftiSurfaces, CFFConverter,[("out","gifti_surfaces")])]) mapping.connect([(giftiLabels, CFFConverter,[("out","gifti_labels")])]) mapping.connect([(creatematrix, CFFConverter,[("matrix_files","gpickled_networks")])]) mapping.connect([(niftiVolumes, CFFConverter,[("out","nifti_volumes")])]) mapping.connect([(fiberDataArrays, CFFConverter,[("out","data_files")])]) mapping.connect([(camino2trackvis, CFFConverter,[("trackvis","tract_files")])]) mapping.connect([(inputnode_within, CFFConverter,[("subject_id","title")])]) """ Finally, we create another higher-level workflow to connect our mapping workflow with the info and datagrabbing nodes declared at the beginning. Our tutorial can is now extensible to any arbitrary number of subjects by simply adding their names to the subject list and their data to the proper folders. """ inputnode = pe.Node(interface=util.IdentityInterface(fields=["subject_id", "dwi", "bvecs", "bvals", "subjects_dir", "resolution_network_file"]), name="inputnode") outputnode = pe.Node(interface = util.IdentityInterface(fields=["fa", "struct", "trace", "tracts", "connectome", "cmatrix", "networks", "rois", "mean_fiber_length", "fiber_length_std", "tensors"]), name="outputnode") connectivity = pe.Workflow(name="connectivity") connectivity.base_output_dir=name connectivity.connect([(inputnode, mapping, [("dwi", "inputnode_within.dwi"), ("bvals", "inputnode_within.bvals"), ("bvecs", "inputnode_within.bvecs"), ("subject_id", "inputnode_within.subject_id"), ("subjects_dir", "inputnode_within.subjects_dir"), ("resolution_network_file", "inputnode_within.resolution_network_file")]) ]) connectivity.connect([(mapping, outputnode, [("camino2trackvis.trackvis", "tracts"), ("CFFConverter.connectome_file", "connectome"), ("CreateMatrix.matrix_mat_file", "cmatrix"), ("CreateMatrix.mean_fiber_length_matrix_mat_file", "mean_fiber_length"), ("CreateMatrix.fiber_length_std_matrix_mat_file", "fiber_length_std"), ("fa2nii.nifti_file", "fa"), ("CreateMatrix.matrix_files", "networks"), ("ROIGen.roi_file", "rois"), ("mri_convert_Brain.out_file", "struct"), ("trace2nii.nifti_file", "trace"), ("dtifit.tensor_fitted", "tensors")]) ]) return connectivity
cmp_config = cmp.configuration.PipelineConfiguration() cmp_config.parcellation_scheme = "Lausanne2008" createnodes = pe.Node(interface=cmtk.CreateNodes(), name="CreateNodes") createnodes.inputs.resolution_network_file = cmp_config._get_lausanne_parcellation( 'Lausanne2008')[parcellation_name]['node_information_graphml'] creatematrix = pe.Node(interface=cmtk.CreateMatrix(), name="CreateMatrix") creatematrix.inputs.count_region_intersections = True """ Next we define the endpoint of this tutorial, which is the CFFConverter node, as well as a few nodes which use the Nipype Merge utility. These are useful for passing lists of the files we want packaged in our CFF file. The inspect.getfile command is used to package this script into the resulting CFF file, so that it is easy to look back at the processing parameters that were used. """ CFFConverter = pe.Node(interface=cmtk.CFFConverter(), name="CFFConverter") CFFConverter.inputs.script_files = op.abspath( inspect.getfile(inspect.currentframe())) giftiSurfaces = pe.Node(interface=util.Merge(8), name="GiftiSurfaces") giftiLabels = pe.Node(interface=util.Merge(2), name="GiftiLabels") niftiVolumes = pe.Node(interface=util.Merge(3), name="NiftiVolumes") fiberDataArrays = pe.Node(interface=util.Merge(4), name="FiberDataArrays") gpickledNetworks = pe.Node(interface=util.Merge(2), name="NetworkFiles") """ We also create a workflow to calculate several network metrics on our resulting file, and another CFF converter which will be used to package these networks into a single file. """ networkx = create_networkx_pipeline(name='networkx') cmats_to_csv = create_cmats_to_csv_pipeline(name='cmats_to_csv') NxStatsCFFConverter = pe.Node(interface=cmtk.CFFConverter(),
def create_average_networks_by_group_workflow(group_list, data_dir, subjects_dir, output_dir, title='group_average'): """Creates a fourth-level pipeline to average the networks for two groups and merge them into a single CFF file. This pipeline will also output the average networks in .gexf format, for visualization in other graph viewers, such as Gephi. Example ------- >>> import nipype.workflows.dmri.connectivity.group_connectivity as groupwork >>> from nipype.testing import example_data >>> subjects_dir = '.' >>> data_dir = '.' >>> output_dir = '.' >>> group_list = {} >>> group_list['group1'] = ['subj1', 'subj2'] >>> group_list['group2'] = ['subj3', 'subj4'] >>> l4pipeline = groupwork.create_average_networks_by_group_workflow(group_list, data_dir, subjects_dir, output_dir) >>> l4pipeline.run() # doctest: +SKIP Inputs:: group_list: Dictionary of subject lists, keyed by group name data_dir: Path to the data directory subjects_dir: Path to the Freesurfer 'subjects' directory output_dir: Path for the output files title: String to use as a title for the output merged CFF file (default 'group') """ l4infosource = pe.Node( interface=util.IdentityInterface(fields=['group_id1', 'group_id2']), name='l4infosource') l4infosource.inputs.group_id1 = group_list.keys()[0] l4infosource.inputs.group_id2 = group_list.keys()[1] l4info = dict(networks=[['group_id', '']], CMatrices=[['group_id', '']], fibmean=[['group_id', 'mean_fiber_length']], fibdev=[['group_id', 'fiber_length_std']]) l4source_grp1 = pe.Node(nio.DataGrabber(infields=['group_id'], outfields=l4info.keys()), name='l4source_grp1') l4source_grp1.inputs.template = '%s/%s' l4source_grp1.inputs.field_template = dict( networks=op.join(output_dir, '%s/networks/*/*%s*.pck'), CMatrices=op.join(output_dir, '%s/cmatrix/*/*%s*.mat'), fibmean=op.join(output_dir, '%s/mean_fiber_length/*/*%s*.mat'), fibdev=op.join(output_dir, '%s/fiber_length_std/*/*%s*.mat')) l4source_grp1.inputs.base_directory = output_dir l4source_grp1.inputs.template_args = l4info l4source_grp2 = l4source_grp1.clone(name='l4source_grp2') l4inputnode = pe.Node(interface=util.IdentityInterface(fields=[ 'networks_grp1', 'networks_grp2', 'CMatrices_grp1', 'CMatrices_grp2', 'fibmean_grp1', 'fibmean_grp2', 'fibdev_grp1', 'fibdev_grp2' ]), name='l4inputnode') average_networks_grp1 = pe.Node(interface=cmtk.AverageNetworks(), name='average_networks_grp1') average_networks_grp2 = average_networks_grp1.clone( 'average_networks_grp2') averagecff = pe.Node(interface=cmtk.CFFConverter(), name="averagecff") averagecff.inputs.out_file = title merge_gpickled_averages = pe.Node(interface=util.Merge(2), name='merge_gpickled_averages') merge_gexf_averages = merge_gpickled_averages.clone('merge_gexf_averages') l4datasink = pe.Node(interface=nio.DataSink(), name="l4datasink") l4datasink.inputs.base_directory = output_dir l4pipeline = pe.Workflow(name="l4output") l4pipeline.base_dir = output_dir l4pipeline.connect([ (l4infosource, l4source_grp1, [('group_id1', 'group_id')]), (l4infosource, l4source_grp2, [('group_id2', 'group_id')]), (l4source_grp1, l4inputnode, [('CMatrices', 'CMatrices_grp1')]), (l4source_grp2, l4inputnode, [('CMatrices', 'CMatrices_grp2')]), (l4source_grp1, l4inputnode, [('networks', 'networks_grp1')]), (l4source_grp2, l4inputnode, [('networks', 'networks_grp2')]), (l4source_grp1, l4inputnode, [('fibmean', 'fibmean_grp1')]), (l4source_grp2, l4inputnode, [('fibmean', 'fibmean_grp2')]), (l4source_grp1, l4inputnode, [('fibdev', 'fibdev_grp1')]), (l4source_grp2, l4inputnode, [('fibdev', 'fibdev_grp2')]), ]) l4pipeline.connect([(l4inputnode, average_networks_grp1, [('networks_grp1', 'in_files')])]) l4pipeline.connect([(l4infosource, average_networks_grp1, [('group_id1', 'group_id')])]) l4pipeline.connect([(l4inputnode, average_networks_grp2, [('networks_grp2', 'in_files')])]) l4pipeline.connect([(l4infosource, average_networks_grp2, [('group_id2', 'group_id')])]) l4pipeline.connect([(average_networks_grp1, merge_gpickled_averages, [('gpickled_groupavg', 'in1')])]) l4pipeline.connect([(average_networks_grp2, merge_gpickled_averages, [('gpickled_groupavg', 'in2')])]) l4pipeline.connect([(average_networks_grp1, merge_gexf_averages, [('gexf_groupavg', 'in1')])]) l4pipeline.connect([(average_networks_grp2, merge_gexf_averages, [('gexf_groupavg', 'in2')])]) l4pipeline.connect([(merge_gpickled_averages, l4datasink, [('out', '@l4output.gpickled')])]) l4pipeline.connect([(merge_gpickled_averages, averagecff, [('out', 'gpickled_networks')])]) l4pipeline.connect([(averagecff, l4datasink, [('connectome_file', '@l4output.averagecff')])]) l4pipeline.connect([(merge_gexf_averages, l4datasink, [('out', '@l4output.gexf')])]) return l4pipeline
def create_connectivity_pipeline(name="connectivity", parcellation_name='scale500'): inputnode_within = pe.Node(util.IdentityInterface(fields=[ "subject_id", "dwi", "bvecs", "bvals", "subjects_dir", "resolution_network_file" ]), name="inputnode_within") FreeSurferSource = pe.Node(interface=nio.FreeSurferSource(), name='fssource') FreeSurferSourceLH = pe.Node(interface=nio.FreeSurferSource(), name='fssourceLH') FreeSurferSourceLH.inputs.hemi = 'lh' FreeSurferSourceRH = pe.Node(interface=nio.FreeSurferSource(), name='fssourceRH') FreeSurferSourceRH.inputs.hemi = 'rh' """ Creating the workflow's nodes ============================= """ """ Conversion nodes ---------------- """ """ A number of conversion operations are required to obtain NIFTI files from the FreesurferSource for each subject. Nodes are used to convert the following: * Original structural image to NIFTI * Pial, white, inflated, and spherical surfaces for both the left and right hemispheres are converted to GIFTI for visualization in ConnectomeViewer * Parcellated annotation files for the left and right hemispheres are also converted to GIFTI """ mri_convert_Brain = pe.Node(interface=fs.MRIConvert(), name='mri_convert_Brain') mri_convert_Brain.inputs.out_type = 'nii' mri_convert_ROI_scale500 = mri_convert_Brain.clone( 'mri_convert_ROI_scale500') mris_convertLH = pe.Node(interface=fs.MRIsConvert(), name='mris_convertLH') mris_convertLH.inputs.out_datatype = 'gii' mris_convertRH = mris_convertLH.clone('mris_convertRH') mris_convertRHwhite = mris_convertLH.clone('mris_convertRHwhite') mris_convertLHwhite = mris_convertLH.clone('mris_convertLHwhite') mris_convertRHinflated = mris_convertLH.clone('mris_convertRHinflated') mris_convertLHinflated = mris_convertLH.clone('mris_convertLHinflated') mris_convertRHsphere = mris_convertLH.clone('mris_convertRHsphere') mris_convertLHsphere = mris_convertLH.clone('mris_convertLHsphere') mris_convertLHlabels = mris_convertLH.clone('mris_convertLHlabels') mris_convertRHlabels = mris_convertLH.clone('mris_convertRHlabels') """ Diffusion processing nodes -------------------------- .. seealso:: dmri_mrtrix_dti.py Tutorial that focuses solely on the MRtrix diffusion processing http://www.brain.org.au/software/mrtrix/index.html MRtrix's online documentation """ """ b-values and b-vectors stored in FSL's format are converted into a single encoding file for MRTrix. """ fsl2mrtrix = pe.Node(interface=mrtrix.FSL2MRTrix(), name='fsl2mrtrix') """ Distortions induced by eddy currents are corrected prior to fitting the tensors. The first image is used as a reference for which to warp the others. """ eddycorrect = create_eddy_correct_pipeline(name='eddycorrect') eddycorrect.inputs.inputnode.ref_num = 1 """ Tensors are fitted to each voxel in the diffusion-weighted image and from these three maps are created: * Major eigenvector in each voxel * Apparent diffusion coefficient * Fractional anisotropy """ dwi2tensor = pe.Node(interface=mrtrix.DWI2Tensor(), name='dwi2tensor') tensor2vector = pe.Node(interface=mrtrix.Tensor2Vector(), name='tensor2vector') tensor2adc = pe.Node(interface=mrtrix.Tensor2ApparentDiffusion(), name='tensor2adc') tensor2fa = pe.Node(interface=mrtrix.Tensor2FractionalAnisotropy(), name='tensor2fa') MRconvert_fa = pe.Node(interface=mrtrix.MRConvert(), name='MRconvert_fa') MRconvert_fa.inputs.extension = 'nii' """ These nodes are used to create a rough brain mask from the b0 image. The b0 image is extracted from the original diffusion-weighted image, put through a simple thresholding routine, and smoothed using a 3x3 median filter. """ MRconvert = pe.Node(interface=mrtrix.MRConvert(), name='MRconvert') MRconvert.inputs.extract_at_axis = 3 MRconvert.inputs.extract_at_coordinate = [0] threshold_b0 = pe.Node(interface=mrtrix.Threshold(), name='threshold_b0') median3d = pe.Node(interface=mrtrix.MedianFilter3D(), name='median3d') """ The brain mask is also used to help identify single-fiber voxels. This is done by passing the brain mask through two erosion steps, multiplying the remaining mask with the fractional anisotropy map, and thresholding the result to obtain some highly anisotropic within-brain voxels. """ erode_mask_firstpass = pe.Node(interface=mrtrix.Erode(), name='erode_mask_firstpass') erode_mask_secondpass = pe.Node(interface=mrtrix.Erode(), name='erode_mask_secondpass') MRmultiply = pe.Node(interface=mrtrix.MRMultiply(), name='MRmultiply') MRmult_merge = pe.Node(interface=util.Merge(2), name='MRmultiply_merge') threshold_FA = pe.Node(interface=mrtrix.Threshold(), name='threshold_FA') threshold_FA.inputs.absolute_threshold_value = 0.7 """ For whole-brain tracking we also require a broad white-matter seed mask. This is created by generating a white matter mask, given a brainmask, and thresholding it at a reasonably high level. """ bet = pe.Node(interface=fsl.BET(mask=True), name='bet_b0') gen_WM_mask = pe.Node(interface=mrtrix.GenerateWhiteMatterMask(), name='gen_WM_mask') threshold_wmmask = pe.Node(interface=mrtrix.Threshold(), name='threshold_wmmask') threshold_wmmask.inputs.absolute_threshold_value = 0.4 """ The spherical deconvolution step depends on the estimate of the response function in the highly anisotropic voxels we obtained above. .. warning:: For damaged or pathological brains one should take care to lower the maximum harmonic order of these steps. """ estimateresponse = pe.Node(interface=mrtrix.EstimateResponseForSH(), name='estimateresponse') estimateresponse.inputs.maximum_harmonic_order = 6 csdeconv = pe.Node(interface=mrtrix.ConstrainedSphericalDeconvolution(), name='csdeconv') csdeconv.inputs.maximum_harmonic_order = 6 """ Finally, we track probabilistically using the orientation distribution functions obtained earlier. The tracts are then used to generate a tract-density image, and they are also converted to TrackVis format. """ probCSDstreamtrack = pe.Node( interface=mrtrix.ProbabilisticSphericallyDeconvolutedStreamlineTrack(), name='probCSDstreamtrack') probCSDstreamtrack.inputs.inputmodel = 'SD_PROB' probCSDstreamtrack.inputs.desired_number_of_tracks = 150000 tracks2prob = pe.Node(interface=mrtrix.Tracks2Prob(), name='tracks2prob') tracks2prob.inputs.colour = True MRconvert_tracks2prob = MRconvert_fa.clone(name='MRconvert_tracks2prob') tck2trk = pe.Node(interface=mrtrix.MRTrix2TrackVis(), name='tck2trk') """ Structural segmentation nodes ----------------------------- """ """ The following node identifies the transformation between the diffusion-weighted image and the structural image. This transformation is then applied to the tracts so that they are in the same space as the regions of interest. """ coregister = pe.Node(interface=fsl.FLIRT(dof=6), name='coregister') coregister.inputs.cost = ('normmi') """ Parcellation is performed given the aparc+aseg image from Freesurfer. The CMTK Parcellation step subdivides these regions to return a higher-resolution parcellation scheme. The parcellation used here is entitled "scale500" and returns 1015 regions. """ parcellate = pe.Node(interface=cmtk.Parcellate(), name="Parcellate") parcellate.inputs.parcellation_name = parcellation_name """ The CreateMatrix interface takes in the remapped aparc+aseg image as well as the label dictionary and fiber tracts and outputs a number of different files. The most important of which is the connectivity network itself, which is stored as a 'gpickle' and can be loaded using Python's NetworkX package (see CreateMatrix docstring). Also outputted are various NumPy arrays containing detailed tract information, such as the start and endpoint regions, and statistics on the mean and standard deviation for the fiber length of each connection. These matrices can be used in the ConnectomeViewer to plot the specific tracts that connect between user-selected regions. Here we choose the Lausanne2008 parcellation scheme, since we are incorporating the CMTK parcellation step. """ creatematrix = pe.Node(interface=cmtk.CreateMatrix(), name="CreateMatrix") creatematrix.inputs.count_region_intersections = True """ Next we define the endpoint of this tutorial, which is the CFFConverter node, as well as a few nodes which use the Nipype Merge utility. These are useful for passing lists of the files we want packaged in our CFF file. The inspect.getfile command is used to package this script into the resulting CFF file, so that it is easy to look back at the processing parameters that were used. """ CFFConverter = pe.Node(interface=cmtk.CFFConverter(), name="CFFConverter") CFFConverter.inputs.script_files = op.abspath( inspect.getfile(inspect.currentframe())) giftiSurfaces = pe.Node(interface=util.Merge(8), name="GiftiSurfaces") giftiLabels = pe.Node(interface=util.Merge(2), name="GiftiLabels") niftiVolumes = pe.Node(interface=util.Merge(3), name="NiftiVolumes") fiberDataArrays = pe.Node(interface=util.Merge(4), name="FiberDataArrays") """ We also create a node to calculate several network metrics on our resulting file, and another CFF converter which will be used to package these networks into a single file. """ networkx = create_networkx_pipeline(name='networkx') cmats_to_csv = create_cmats_to_csv_pipeline(name='cmats_to_csv') nfibs_to_csv = pe.Node(interface=misc.Matlab2CSV(), name='nfibs_to_csv') merge_nfib_csvs = pe.Node(interface=misc.MergeCSVFiles(), name='merge_nfib_csvs') merge_nfib_csvs.inputs.extra_column_heading = 'Subject' merge_nfib_csvs.inputs.out_file = 'fibers.csv' NxStatsCFFConverter = pe.Node(interface=cmtk.CFFConverter(), name="NxStatsCFFConverter") NxStatsCFFConverter.inputs.script_files = op.abspath( inspect.getfile(inspect.currentframe())) """ Connecting the workflow ======================= Here we connect our processing pipeline. """ """ Connecting the inputs, FreeSurfer nodes, and conversions -------------------------------------------------------- """ mapping = pe.Workflow(name='mapping') """ First, we connect the input node to the FreeSurfer input nodes. """ mapping.connect([(inputnode_within, FreeSurferSource, [("subjects_dir", "subjects_dir")])]) mapping.connect([(inputnode_within, FreeSurferSource, [("subject_id", "subject_id")])]) mapping.connect([(inputnode_within, FreeSurferSourceLH, [("subjects_dir", "subjects_dir")])]) mapping.connect([(inputnode_within, FreeSurferSourceLH, [("subject_id", "subject_id")])]) mapping.connect([(inputnode_within, FreeSurferSourceRH, [("subjects_dir", "subjects_dir")])]) mapping.connect([(inputnode_within, FreeSurferSourceRH, [("subject_id", "subject_id")])]) mapping.connect([(inputnode_within, parcellate, [("subjects_dir", "subjects_dir")])]) mapping.connect([(inputnode_within, parcellate, [("subject_id", "subject_id")])]) mapping.connect([(parcellate, mri_convert_ROI_scale500, [('roi_file', 'in_file')])]) """ Nifti conversion for subject's stripped brain image from Freesurfer: """ mapping.connect([(FreeSurferSource, mri_convert_Brain, [('brain', 'in_file')])]) """ Surface conversions to GIFTI (pial, white, inflated, and sphere for both hemispheres) """ mapping.connect([(FreeSurferSourceLH, mris_convertLH, [('pial', 'in_file') ])]) mapping.connect([(FreeSurferSourceRH, mris_convertRH, [('pial', 'in_file') ])]) mapping.connect([(FreeSurferSourceLH, mris_convertLHwhite, [('white', 'in_file')])]) mapping.connect([(FreeSurferSourceRH, mris_convertRHwhite, [('white', 'in_file')])]) mapping.connect([(FreeSurferSourceLH, mris_convertLHinflated, [('inflated', 'in_file')])]) mapping.connect([(FreeSurferSourceRH, mris_convertRHinflated, [('inflated', 'in_file')])]) mapping.connect([(FreeSurferSourceLH, mris_convertLHsphere, [('sphere', 'in_file')])]) mapping.connect([(FreeSurferSourceRH, mris_convertRHsphere, [('sphere', 'in_file')])]) """ The annotation files are converted using the pial surface as a map via the MRIsConvert interface. One of the functions defined earlier is used to select the lh.aparc.annot and rh.aparc.annot files specifically (rather than e.g. rh.aparc.a2009s.annot) from the output list given by the FreeSurferSource. """ mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, [('pial', 'in_file')])]) mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, [('pial', 'in_file')])]) mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, [(('annot', select_aparc_annot), 'annot_file')])]) mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, [(('annot', select_aparc_annot), 'annot_file')])]) """ Diffusion Processing -------------------- Now we connect the tensor computations: """ mapping.connect([(inputnode_within, fsl2mrtrix, [("bvecs", "bvec_file"), ("bvals", "bval_file")])]) mapping.connect([(inputnode_within, eddycorrect, [("dwi", "inputnode.in_file")])]) mapping.connect([(eddycorrect, dwi2tensor, [("outputnode.eddy_corrected", "in_file")])]) mapping.connect([(fsl2mrtrix, dwi2tensor, [("encoding_file", "encoding_file")])]) mapping.connect([ (dwi2tensor, tensor2vector, [['tensor', 'in_file']]), (dwi2tensor, tensor2adc, [['tensor', 'in_file']]), (dwi2tensor, tensor2fa, [['tensor', 'in_file']]), ]) mapping.connect([(tensor2fa, MRmult_merge, [("FA", "in1")])]) mapping.connect([(tensor2fa, MRconvert_fa, [("FA", "in_file")])]) """ This block creates the rough brain mask to be multiplied, mulitplies it with the fractional anisotropy image, and thresholds it to get the single-fiber voxels. """ mapping.connect([(eddycorrect, MRconvert, [("outputnode.eddy_corrected", "in_file")])]) mapping.connect([(MRconvert, threshold_b0, [("converted", "in_file")])]) mapping.connect([(threshold_b0, median3d, [("out_file", "in_file")])]) mapping.connect([(median3d, erode_mask_firstpass, [("out_file", "in_file") ])]) mapping.connect([(erode_mask_firstpass, erode_mask_secondpass, [("out_file", "in_file")])]) mapping.connect([(erode_mask_secondpass, MRmult_merge, [("out_file", "in2") ])]) mapping.connect([(MRmult_merge, MRmultiply, [("out", "in_files")])]) mapping.connect([(MRmultiply, threshold_FA, [("out_file", "in_file")])]) """ Here the thresholded white matter mask is created for seeding the tractography. """ mapping.connect([(eddycorrect, bet, [("outputnode.eddy_corrected", "in_file")])]) mapping.connect([(eddycorrect, gen_WM_mask, [("outputnode.eddy_corrected", "in_file")])]) mapping.connect([(bet, gen_WM_mask, [("mask_file", "binary_mask")])]) mapping.connect([(fsl2mrtrix, gen_WM_mask, [("encoding_file", "encoding_file")])]) mapping.connect([(gen_WM_mask, threshold_wmmask, [("WMprobabilitymap", "in_file")])]) """ Next we estimate the fiber response distribution. """ mapping.connect([(eddycorrect, estimateresponse, [("outputnode.eddy_corrected", "in_file")])]) mapping.connect([(fsl2mrtrix, estimateresponse, [("encoding_file", "encoding_file")])]) mapping.connect([(threshold_FA, estimateresponse, [("out_file", "mask_image")])]) """ Run constrained spherical deconvolution. """ mapping.connect([(eddycorrect, csdeconv, [("outputnode.eddy_corrected", "in_file")])]) mapping.connect([(gen_WM_mask, csdeconv, [("WMprobabilitymap", "mask_image")])]) mapping.connect([(estimateresponse, csdeconv, [("response", "response_file")])]) mapping.connect([(fsl2mrtrix, csdeconv, [("encoding_file", "encoding_file") ])]) """ Connect the tractography and compute the tract density image. """ mapping.connect([(threshold_wmmask, probCSDstreamtrack, [("out_file", "seed_file")])]) mapping.connect([(csdeconv, probCSDstreamtrack, [("spherical_harmonics_image", "in_file")])]) mapping.connect([(probCSDstreamtrack, tracks2prob, [("tracked", "in_file") ])]) mapping.connect([(eddycorrect, tracks2prob, [("outputnode.eddy_corrected", "template_file")])]) mapping.connect([(tracks2prob, MRconvert_tracks2prob, [("tract_image", "in_file")])]) """ Structural Processing --------------------- First, we coregister the diffusion image to the structural image """ mapping.connect([(eddycorrect, coregister, [("outputnode.eddy_corrected", "in_file")])]) mapping.connect([(mri_convert_Brain, coregister, [('out_file', 'reference') ])]) """ The MRtrix-tracked fibers are converted to TrackVis format (with voxel and data dimensions grabbed from the DWI). The connectivity matrix is created with the transformed .trk fibers and the parcellation file. """ mapping.connect([(eddycorrect, tck2trk, [("outputnode.eddy_corrected", "image_file")])]) mapping.connect([(mri_convert_Brain, tck2trk, [("out_file", "registration_image_file")])]) mapping.connect([(coregister, tck2trk, [("out_matrix_file", "matrix_file") ])]) mapping.connect([(probCSDstreamtrack, tck2trk, [("tracked", "in_file")])]) mapping.connect([(tck2trk, creatematrix, [("out_file", "tract_file")])]) mapping.connect(inputnode_within, 'resolution_network_file', creatematrix, 'resolution_network_file') mapping.connect([(inputnode_within, creatematrix, [("subject_id", "out_matrix_file")])]) mapping.connect([(inputnode_within, creatematrix, [("subject_id", "out_matrix_mat_file")])]) mapping.connect([(parcellate, creatematrix, [("roi_file", "roi_file")])]) """ The merge nodes defined earlier are used here to create lists of the files which are destined for the CFFConverter. """ mapping.connect([(mris_convertLH, giftiSurfaces, [("converted", "in1")])]) mapping.connect([(mris_convertRH, giftiSurfaces, [("converted", "in2")])]) mapping.connect([(mris_convertLHwhite, giftiSurfaces, [("converted", "in3") ])]) mapping.connect([(mris_convertRHwhite, giftiSurfaces, [("converted", "in4") ])]) mapping.connect([(mris_convertLHinflated, giftiSurfaces, [("converted", "in5")])]) mapping.connect([(mris_convertRHinflated, giftiSurfaces, [("converted", "in6")])]) mapping.connect([(mris_convertLHsphere, giftiSurfaces, [("converted", "in7")])]) mapping.connect([(mris_convertRHsphere, giftiSurfaces, [("converted", "in8")])]) mapping.connect([(mris_convertLHlabels, giftiLabels, [("converted", "in1") ])]) mapping.connect([(mris_convertRHlabels, giftiLabels, [("converted", "in2") ])]) mapping.connect([(parcellate, niftiVolumes, [("roi_file", "in1")])]) mapping.connect([(eddycorrect, niftiVolumes, [("outputnode.eddy_corrected", "in2")])]) mapping.connect([(mri_convert_Brain, niftiVolumes, [("out_file", "in3")])]) mapping.connect([(creatematrix, fiberDataArrays, [("endpoint_file", "in1") ])]) mapping.connect([(creatematrix, fiberDataArrays, [("endpoint_file_mm", "in2")])]) mapping.connect([(creatematrix, fiberDataArrays, [("fiber_length_file", "in3")])]) mapping.connect([(creatematrix, fiberDataArrays, [("fiber_label_file", "in4")])]) """ This block actually connects the merged lists to the CFF converter. We pass the surfaces and volumes that are to be included, as well as the tracts and the network itself. The currently running pipeline (dmri_connectivity_advanced.py) is also scraped and included in the CFF file. This makes it easy for the user to examine the entire processing pathway used to generate the end product. """ mapping.connect([(giftiSurfaces, CFFConverter, [("out", "gifti_surfaces")]) ]) mapping.connect([(giftiLabels, CFFConverter, [("out", "gifti_labels")])]) mapping.connect([(creatematrix, CFFConverter, [("matrix_files", "gpickled_networks")])]) mapping.connect([(niftiVolumes, CFFConverter, [("out", "nifti_volumes")])]) mapping.connect([(fiberDataArrays, CFFConverter, [("out", "data_files")])]) mapping.connect([(creatematrix, CFFConverter, [("filtered_tractography", "tract_files")])]) mapping.connect([(inputnode_within, CFFConverter, [("subject_id", "title") ])]) """ The graph theoretical metrics which have been generated are placed into another CFF file. """ mapping.connect([(inputnode_within, networkx, [("subject_id", "inputnode.extra_field")])]) mapping.connect([(creatematrix, networkx, [("intersection_matrix_file", "inputnode.network_file")])]) mapping.connect([(networkx, NxStatsCFFConverter, [("outputnode.network_files", "gpickled_networks")])]) mapping.connect([(giftiSurfaces, NxStatsCFFConverter, [("out", "gifti_surfaces")])]) mapping.connect([(giftiLabels, NxStatsCFFConverter, [("out", "gifti_labels")])]) mapping.connect([(niftiVolumes, NxStatsCFFConverter, [("out", "nifti_volumes")])]) mapping.connect([(fiberDataArrays, NxStatsCFFConverter, [("out", "data_files")])]) mapping.connect([(inputnode_within, NxStatsCFFConverter, [("subject_id", "title")])]) mapping.connect([(inputnode_within, cmats_to_csv, [("subject_id", "inputnode.extra_field")])]) mapping.connect([(creatematrix, cmats_to_csv, [ ("matlab_matrix_files", "inputnode.matlab_matrix_files") ])]) mapping.connect([(creatematrix, nfibs_to_csv, [("stats_file", "in_file")]) ]) mapping.connect([(nfibs_to_csv, merge_nfib_csvs, [("csv_files", "in_files") ])]) mapping.connect([(inputnode_within, merge_nfib_csvs, [("subject_id", "extra_field")])]) """ Create a higher-level workflow -------------------------------------- Finally, we create another higher-level workflow to connect our mapping workflow with the info and datagrabbing nodes declared at the beginning. Our tutorial can is now extensible to any arbitrary number of subjects by simply adding their names to the subject list and their data to the proper folders. """ inputnode = pe.Node(interface=util.IdentityInterface( fields=["subject_id", "dwi", "bvecs", "bvals", "subjects_dir"]), name="inputnode") outputnode = pe.Node(interface=util.IdentityInterface(fields=[ "fa", "struct", "tracts", "tracks2prob", "connectome", "nxstatscff", "nxmatlab", "nxcsv", "fiber_csv", "cmatrices_csv", "nxmergedcsv", "cmatrix", "networks", "filtered_tracts", "rois", "odfs", "tdi", "mean_fiber_length", "median_fiber_length", "fiber_length_std" ]), name="outputnode") connectivity = pe.Workflow(name="connectivity") connectivity.base_output_dir = name connectivity.base_dir = name connectivity.connect([(inputnode, mapping, [ ("dwi", "inputnode_within.dwi"), ("bvals", "inputnode_within.bvals"), ("bvecs", "inputnode_within.bvecs"), ("subject_id", "inputnode_within.subject_id"), ("subjects_dir", "inputnode_within.subjects_dir") ])]) connectivity.connect([(mapping, outputnode, [ ("tck2trk.out_file", "tracts"), ("CFFConverter.connectome_file", "connectome"), ("NxStatsCFFConverter.connectome_file", "nxstatscff"), ("CreateMatrix.matrix_mat_file", "cmatrix"), ("CreateMatrix.mean_fiber_length_matrix_mat_file", "mean_fiber_length"), ("CreateMatrix.median_fiber_length_matrix_mat_file", "median_fiber_length"), ("CreateMatrix.fiber_length_std_matrix_mat_file", "fiber_length_std"), ("CreateMatrix.matrix_files", "networks"), ("CreateMatrix.filtered_tractographies", "filtered_tracts"), ("merge_nfib_csvs.csv_file", "fiber_csv"), ("mri_convert_ROI_scale500.out_file", "rois"), ("csdeconv.spherical_harmonics_image", "odfs"), ("mri_convert_Brain.out_file", "struct"), ("MRconvert_fa.converted", "fa"), ("MRconvert_tracks2prob.converted", "tracks2prob") ])]) connectivity.connect([(cmats_to_csv, outputnode, [("outputnode.csv_file", "cmatrices_csv")])]) connectivity.connect([(networkx, outputnode, [("outputnode.csv_files", "nxcsv")])]) return connectivity
def create_fmri_graphs(name="functional", with_simple_timecourse_correlation=False): try: coma_rest_lib_path = os.environ['COMA_REST_LIB_ROOT'] except KeyError: print 'COMA_REST_LIB_ROOT environment variable not set.' inputnode_within = pe.Node(interface=util.IdentityInterface(fields=[ "subject_id", "functional_images", "segmentation_file", "repetition_time", "resolution_network_file" ]), name="inputnode_within") ica = pe.Node(interface=SingleSubjectICA(), name='ica') # Create the resampling nodes. Functional images and ICA maps must have the same dimensions as the segmentation file resampleFunctional = pe.MapNode(interface=fs.MRIConvert(), name='resampleFunctional', iterfield=['in_file']) resampleFunctional.inputs.out_type = 'nii' resampleICAmaps = pe.MapNode(interface=fs.MRIConvert(), name='resampleICAmaps', iterfield=['in_file']) resampleICAmaps.inputs.out_type = 'nii' resample_neuronal = pe.MapNode(interface=fs.MRIConvert(), name='resample_neuronal', iterfield=['in_file']) resample_neuronal.inputs.out_type = 'nii' if with_simple_timecourse_correlation: resample_non_neuronal = pe.MapNode(interface=fs.MRIConvert(), name='resample_non_neuronal', iterfield=['in_file']) resample_non_neuronal.inputs.out_type = 'nii' # Create the ComaRestLib nodes denoised_image = pe.Node(interface=CreateDenoisedImage(), name='denoised_image') denoised_image.inputs.coma_rest_lib_path = coma_rest_lib_path matching_classification = pe.Node(interface=MatchingClassification(), name='matching_classification') matching_classification.inputs.coma_rest_lib_path = coma_rest_lib_path compute_fingerprints = pe.MapNode(interface=ComputeFingerprint(), name='compute_fingerprints', iterfield=['in_file', 'component_index']) compute_fingerprints.inputs.coma_rest_lib_path = coma_rest_lib_path # Create the functional connectivity thresholding and mapping nodes createnodes = pe.Node(interface=cmtk.CreateNodes(), name="CreateNodes") connectivity_threshold = pe.Node( interface=cmtk.CreateConnectivityThreshold(), name='connectivity_threshold') connectivity_graph = pe.MapNode(interface=cmtk.ConnectivityGraph(), name='connectivity_graph', iterfield=['in_file', 'component_index']) neuronal_regional_timecourses = pe.Node( interface=cmtk.RegionalValues(), name="neuronal_regional_timecourses") # Define the CFF Converter, NetworkX MATLAB -> CommaSeparatedValue nodes graphCFFConverter = pe.Node(interface=cmtk.CFFConverter(), name="graphCFFConverter") neuronalCFFConverter = pe.Node(interface=cmtk.CFFConverter(), name="neuronalCFFConverter") neuronalCFFConverter.inputs.out_file = 'neuronal.cff' correlationCFFConverter = pe.Node(interface=cmtk.CFFConverter(), name="correlationCFFConverter") correlationCFFConverter.inputs.out_file = 'correlation.cff' ConnectivityGraphNetworkXMetrics_correlation = pe.MapNode( interface=cmtk.NetworkXMetrics(), name="cor_fMRIConnectivityGraphNetworkXMetrics", iterfield=['in_file']) Matlab2CSV_nx_cor = pe.MapNode(interface=misc.Matlab2CSV(), name="Matlab2CSV_nx_cor", iterfield=['in_file']) anticorrelationCFFConverter = pe.Node(interface=cmtk.CFFConverter(), name="anticorrelationCFFConverter") anticorrelationCFFConverter.inputs.out_file = 'anticorrelation.cff' ConnectivityGraphNetworkXMetrics_anticorrelation = pe.MapNode( interface=cmtk.NetworkXMetrics(), name="anticor_fMRIConnectivityGraphNetworkXMetrics", iterfield=['in_file']) Matlab2CSV_nx_anticor = pe.MapNode(interface=misc.Matlab2CSV(), name="Matlab2CSV_nx_anticor", iterfield=['in_file']) MergeCSVFiles_cor = pe.MapNode(interface=misc.MergeCSVFiles(), name="MergeCSVFiles_cor", iterfield=['in_files', 'extra_field']) MergeCSVFiles_cor.inputs.extra_column_heading = 'template_name' MergeCSVFiles_anticor = MergeCSVFiles_cor.clone('MergeCSVFiles_anticor') # Uses a Function interface to group the fMRI graphs and save the neuronal graphs with more detailed names group_fmri_graphs_interface = Function(input_names=[ "subject_id", "in_file", "component_index", "matching_stats" ], output_names=["out_file"], function=group_fmri_graphs) # Create lists of the neuronal, correlation, and anticorrelation graphs group_graphs = pe.MapNode(interface=group_fmri_graphs_interface, name='group_graphs', iterfield=['in_file', 'component_index']) group_graphs_corr = pe.MapNode(interface=group_fmri_graphs_interface, name='group_graphs_corr', iterfield=['in_file', 'component_index']) group_graphs_anticorr = pe.MapNode( interface=group_fmri_graphs_interface, name='group_graphs_anticorr', iterfield=['in_file', 'component_index']) # Define a simple interface for a function which removes 'None' values from the graph grouping above removeNoneValues_interface = Function(input_names=["in_files"], output_names=["out_files"], function=removeNoneValues) # Create nodes for the removing 'None' values from the neuronal, correlation, and anticorrelation lists grouped_graphs = pe.Node(interface=removeNoneValues_interface, name='grouped_graphs') grouped_graphs_corr = pe.Node(interface=removeNoneValues_interface, name='grouped_graphs_corr') grouped_graphs_anticorr = pe.Node(interface=removeNoneValues_interface, name='grouped_graphs_anticorr') remove_unconnected_graphs_interface = Function( input_names=["in_files"], output_names=["out_files"], function=remove_unconnected_graphs) remove_unconnected_corr = pe.Node( interface=remove_unconnected_graphs_interface, name='remove_unconnected_corr') remove_unconnected_anticorr = remove_unconnected_corr.clone( name='remove_unconnected_anticorr') concat_csv_interface = Function(input_names=["in_files"], output_names=["out_name"], function=concatcsv) concatcsv_cor = pe.Node(interface=concat_csv_interface, name='concatcsv_cor') concatcsv_anticor = pe.Node(interface=concat_csv_interface, name='concatcsv_anticor') add_subjid_to_csv_cor = pe.Node(interface=misc.AddCSVColumn(), name='add_subjid_to_csv_cor') add_subjid_to_csv_cor.inputs.extra_column_heading = 'Subject' add_subjid_to_csv_anticor = pe.Node(interface=misc.AddCSVColumn(), name='add_subjid_to_csv_anticor') add_subjid_to_csv_anticor.inputs.extra_column_heading = 'Subject' split_neuronal = pe.Node(interface=fsl.Split(), name='split_neuronal') split_neuronal.inputs.dimension = 't' TCcorrCFFConverter = pe.Node(interface=cmtk.CFFConverter(), name="TCcorrCFFConverter") if with_simple_timecourse_correlation: split_non_neuronal = split_neuronal.clone(name='split_non_neuronal') neuronal_time_course_correlation = pe.Node( interface=cmtk.SimpleTimeCourseCorrelationGraph(), name='neuronal_time_course_correlation') neuronal_time_course_correlation.inputs.out_network_file = 'neuronal.pck' non_neuronal_time_course_correlation = pe.Node( interface=cmtk.SimpleTimeCourseCorrelationGraph(), name='non_neuronal_time_course_correlation') non_neuronal_time_course_correlation.inputs.out_network_file = 'non_neuronal.pck' TCcorrCFFConverter.inputs.out_file = 'time_course_correlation.cff' mergeSTCC = pe.Node(interface=util.Merge(2), name='mergeSTCC') else: TCcorrCFFConverter.inputs.out_file = 'time_courses.cff' ### Create the workflow ### func_ntwk = pe.Workflow(name='func_ntwk') func_ntwk.connect([(inputnode_within, ica, [('functional_images', 'in_files')])]) func_ntwk.connect([(inputnode_within, ica, [('subject_id', 'prefix')])]) # Create the denoised image func_ntwk.connect([(inputnode_within, denoised_image, [('subject_id', 'prefix')])]) func_ntwk.connect([(inputnode_within, denoised_image, [('repetition_time', 'repetition_time')])]) func_ntwk.connect([(ica, denoised_image, [('independent_component_images', 'in_files')])]) func_ntwk.connect([(ica, denoised_image, [('mask_image', 'ica_mask_image') ])]) func_ntwk.connect([(ica, denoised_image, [ ('independent_component_timecourse', 'time_course_image') ])]) # Runs the matching classification func_ntwk.connect([(inputnode_within, matching_classification, [('subject_id', 'prefix')])]) func_ntwk.connect([(inputnode_within, matching_classification, [('repetition_time', 'repetition_time')])]) func_ntwk.connect([(ica, matching_classification, [('independent_component_images', 'in_files')])]) func_ntwk.connect([(ica, matching_classification, [('mask_image', 'ica_mask_image')])]) func_ntwk.connect([(ica, matching_classification, [ ('independent_component_timecourse', 'time_course_image') ])]) # Computes and saves the fingerprint for each IC func_ntwk.connect([(inputnode_within, compute_fingerprints, [('repetition_time', 'repetition_time')])]) func_ntwk.connect([(ica, compute_fingerprints, [('independent_component_images', 'in_file')])]) func_ntwk.connect([(ica, compute_fingerprints, [('mask_image', 'ica_mask_image')])]) func_ntwk.connect([(ica, compute_fingerprints, [ ('independent_component_timecourse', 'time_course_image') ])]) func_ntwk.connect([(ica, compute_fingerprints, [(('independent_component_images', get_component_index), 'component_index')])]) # Calculates the the t-value threshold for each node/IC func_ntwk.connect([(inputnode_within, resampleFunctional, [('functional_images', 'in_file')])]) func_ntwk.connect([(inputnode_within, resampleFunctional, [('segmentation_file', 'reslice_like')])]) func_ntwk.connect([(resampleFunctional, connectivity_threshold, [('out_file', 'in_files')])]) func_ntwk.connect([(ica, connectivity_threshold, [ ('independent_component_timecourse', 'time_course_file') ])]) func_ntwk.connect([(inputnode_within, connectivity_threshold, [('segmentation_file', 'segmentation_file')])]) # Resamples the ICA z-score maps to the same dimensions as the segmentation file func_ntwk.connect([(ica, resampleICAmaps, [('independent_component_images', 'in_file')])]) func_ntwk.connect([(inputnode_within, resampleICAmaps, [('segmentation_file', 'reslice_like')])]) # Splits the 4d neuronal and non-neuronal images, resamples them, and creates the time-course correlation graph func_ntwk.connect([(denoised_image, split_neuronal, [('neuronal_image', 'in_file')])]) func_ntwk.connect([(split_neuronal, resample_neuronal, [('out_files', 'in_file')])]) func_ntwk.connect([(inputnode_within, resample_neuronal, [('segmentation_file', 'reslice_like')])]) # Calculates the fmri timecourse func_ntwk.connect([(inputnode_within, neuronal_regional_timecourses, [('segmentation_file', 'segmentation_file')])]) func_ntwk.connect([(createnodes, neuronal_regional_timecourses, [('node_network', 'resolution_network_file')])]) func_ntwk.connect([(resample_neuronal, neuronal_regional_timecourses, [('out_file', 'in_files')])]) neuronal_regional_timecourses.inputs.out_stats_file = 'denoised_fmri_timecourse.mat' if with_simple_timecourse_correlation: func_ntwk.connect([(inputnode_within, neuronal_time_course_correlation, [('segmentation_file', 'segmentation_file')])]) func_ntwk.connect([(createnodes, neuronal_time_course_correlation, [('node_network', 'structural_network')])]) func_ntwk.connect([(resample_neuronal, neuronal_time_course_correlation, [('out_file', 'in_files')])]) func_ntwk.connect([(neuronal_time_course_correlation, mergeSTCC, [('network_file', 'in1')])]) func_ntwk.connect([(denoised_image, split_non_neuronal, [('non_neuronal_image', 'in_file')])]) func_ntwk.connect([(split_non_neuronal, resample_non_neuronal, [('out_files', 'in_file')])]) func_ntwk.connect([(inputnode_within, resample_non_neuronal, [('segmentation_file', 'reslice_like')])]) func_ntwk.connect([ (inputnode_within, non_neuronal_time_course_correlation, [('segmentation_file', 'segmentation_file')]) ]) func_ntwk.connect([(createnodes, non_neuronal_time_course_correlation, [('node_network', 'structural_network')])]) func_ntwk.connect([ (resample_non_neuronal, non_neuronal_time_course_correlation, [('out_file', 'in_files')]) ]) func_ntwk.connect([(non_neuronal_time_course_correlation, mergeSTCC, [('network_file', 'in2')])]) func_ntwk.connect([(mergeSTCC, TCcorrCFFConverter, [('out', 'gpickled_networks')])]) # Creates the nodes for the graph from the input segmentation file and resolution network file func_ntwk.connect([(inputnode_within, createnodes, [('segmentation_file', 'roi_file')])]) func_ntwk.connect([(inputnode_within, createnodes, [ ('resolution_network_file', 'resolution_network_file') ])]) func_ntwk.connect([(inputnode_within, MergeCSVFiles_cor, [ (('resolution_network_file', pullnodeIDs), 'row_headings') ])]) func_ntwk.connect([(inputnode_within, MergeCSVFiles_anticor, [ (('resolution_network_file', pullnodeIDs), 'row_headings') ])]) # Creates a connectivity graph for each IC and stores all of the graphs in a CFF file func_ntwk.connect([(inputnode_within, connectivity_graph, [('segmentation_file', 'segmentation_file')])]) func_ntwk.connect([(createnodes, connectivity_graph, [('node_network', 'resolution_network_file')])]) func_ntwk.connect([(resampleICAmaps, connectivity_graph, [('out_file', 'in_file')])]) func_ntwk.connect([(resampleICAmaps, connectivity_graph, [ (('out_file', get_component_index_resampled), 'component_index') ])]) func_ntwk.connect([(connectivity_threshold, connectivity_graph, [ ('t_value_threshold_file', 't_value_threshold_file') ])]) func_ntwk.connect([(connectivity_graph, graphCFFConverter, [('network_file', 'gpickled_networks')])]) # Uses the matching classification to separate the neuronal connectivity graphs func_ntwk.connect([(inputnode_within, group_graphs, [('subject_id', 'subject_id')])]) func_ntwk.connect([(connectivity_graph, group_graphs, [('network_file', 'in_file')])]) func_ntwk.connect([(resampleICAmaps, group_graphs, [ (('out_file', get_component_index_resampled), 'component_index') ])]) func_ntwk.connect([(matching_classification, group_graphs, [('stats_file', 'matching_stats')])]) func_ntwk.connect([(group_graphs, grouped_graphs, [('out_file', 'in_files') ])]) func_ntwk.connect([(grouped_graphs, neuronalCFFConverter, [('out_files', 'gpickled_networks')])]) # Groups the correlation graphs as above, calculates NetworkX measures, outputs to a CSV file func_ntwk.connect([(inputnode_within, group_graphs_corr, [('subject_id', 'subject_id')])]) func_ntwk.connect([(connectivity_graph, group_graphs_corr, [('correlation_network', 'in_file')])]) func_ntwk.connect([(resampleICAmaps, group_graphs_corr, [ (('out_file', get_component_index_resampled), 'component_index') ])]) func_ntwk.connect([(matching_classification, group_graphs_corr, [('stats_file', 'matching_stats')])]) func_ntwk.connect([(group_graphs_corr, grouped_graphs_corr, [('out_file', 'in_files')])]) func_ntwk.connect([(grouped_graphs_corr, correlationCFFConverter, [('out_files', 'gpickled_networks')])]) func_ntwk.connect([(grouped_graphs_corr, remove_unconnected_corr, [('out_files', 'in_files')])]) func_ntwk.connect([ (grouped_graphs_corr, ConnectivityGraphNetworkXMetrics_correlation, [('out_files', 'in_file')]) ]) func_ntwk.connect([(grouped_graphs_corr, MergeCSVFiles_cor, [(('out_files', pull_template_name), 'extra_field')])]) func_ntwk.connect([(ConnectivityGraphNetworkXMetrics_correlation, Matlab2CSV_nx_cor, [("node_measures_matlab", "in_file") ])]) func_ntwk.connect([(Matlab2CSV_nx_cor, MergeCSVFiles_cor, [("csv_files", "in_files")])]) func_ntwk.connect([(MergeCSVFiles_cor, concatcsv_cor, [("csv_file", "in_files")])]) func_ntwk.connect([(concatcsv_cor, add_subjid_to_csv_cor, [("out_name", "in_file")])]) func_ntwk.connect([(inputnode_within, add_subjid_to_csv_cor, [("subject_id", "extra_field")])]) func_ntwk.connect([(inputnode_within, add_subjid_to_csv_cor, [("subject_id", "out_file")])]) # Groups the anticorrelation graphs as above, calculates NetworkX measures, outputs to a CSV file func_ntwk.connect([(inputnode_within, group_graphs_anticorr, [('subject_id', 'subject_id')])]) func_ntwk.connect([(connectivity_graph, group_graphs_anticorr, [('anticorrelation_network', 'in_file')])]) func_ntwk.connect([(resampleICAmaps, group_graphs_anticorr, [ (('out_file', get_component_index_resampled), 'component_index') ])]) func_ntwk.connect([(matching_classification, group_graphs_anticorr, [('stats_file', 'matching_stats')])]) func_ntwk.connect([(group_graphs_anticorr, grouped_graphs_anticorr, [('out_file', 'in_files')])]) func_ntwk.connect([(grouped_graphs_anticorr, anticorrelationCFFConverter, [('out_files', 'gpickled_networks')])]) func_ntwk.connect([(grouped_graphs_anticorr, remove_unconnected_anticorr, [('out_files', 'in_files')])]) func_ntwk.connect([(grouped_graphs_anticorr, ConnectivityGraphNetworkXMetrics_anticorrelation, [ ('out_files', 'in_file') ])]) func_ntwk.connect([(grouped_graphs_anticorr, MergeCSVFiles_anticor, [(('out_files', pull_template_name), 'extra_field')])]) func_ntwk.connect([(ConnectivityGraphNetworkXMetrics_anticorrelation, Matlab2CSV_nx_anticor, [("node_measures_matlab", "in_file")])]) func_ntwk.connect([(Matlab2CSV_nx_anticor, MergeCSVFiles_anticor, [("csv_files", "in_files")])]) func_ntwk.connect([(MergeCSVFiles_anticor, concatcsv_anticor, [("csv_file", "in_files")])]) func_ntwk.connect([(concatcsv_anticor, add_subjid_to_csv_anticor, [("out_name", "in_file")])]) func_ntwk.connect([(inputnode_within, add_subjid_to_csv_anticor, [("subject_id", "extra_field")])]) func_ntwk.connect([(inputnode_within, add_subjid_to_csv_anticor, [("subject_id", "out_file")])]) ConnectivityGraphNetworkXMetrics_correlation.inputs.ignore_exception = True ConnectivityGraphNetworkXMetrics_anticorrelation.inputs.ignore_exception = True # Create a higher-level workflow inputnode = pe.Node(interface=util.IdentityInterface(fields=[ "subject_id", "functional_images", "fmri_ICA_maps", "ica_mask_image", "fmri_ICA_timecourse", "segmentation_file", "repetition_time", "resolution_network_file" ]), name="inputnode") if with_simple_timecourse_correlation: outputnode = pe.Node(interface=util.IdentityInterface(fields=[ "matching_stats", "neuronal_ntwks", "neuronal_cff", "neuronal_regional_timecourse_stats", "correlation_ntwks", "correlation_cff", "anticorrelation_ntwks", "anticorrelation_cff", "correlation_stats", "anticorrelation_stats", "simple_correlation_ntwks", "simple_correlation_cff" ]), name="outputnode") else: outputnode = pe.Node(interface=util.IdentityInterface(fields=[ "matching_stats", "neuronal_ntwks", "neuronal_cff", "neuronal_regional_timecourse_stats", "correlation_ntwks", "correlation_cff", "correlation_stats", "anticorrelation_stats", "anticorrelation_ntwks", "anticorrelation_cff" ]), name="outputnode") functional = pe.Workflow(name=name) functional.base_output_dir = name functional.base_dir = name functional.connect([(inputnode, func_ntwk, [ ("subject_id", "inputnode_within.subject_id"), ("functional_images", "inputnode_within.functional_images"), ("segmentation_file", "inputnode_within.segmentation_file"), ("repetition_time", "inputnode_within.repetition_time"), ("resolution_network_file", "inputnode_within.resolution_network_file") ])]) functional.connect([(func_ntwk, outputnode, [('grouped_graphs.out_files', 'neuronal_ntwks')])]) functional.connect([(func_ntwk, outputnode, [ ('neuronalCFFConverter.connectome_file', 'neuronal_cff') ])]) functional.connect([(func_ntwk, outputnode, [('neuronal_regional_timecourses.stats_file', 'neuronal_regional_timecourse_stats')])]) functional.connect([(func_ntwk, outputnode, [ ('remove_unconnected_corr.out_files', 'correlation_ntwks') ])]) functional.connect([(func_ntwk, outputnode, [ ('correlationCFFConverter.connectome_file', 'correlation_cff') ])]) functional.connect([(func_ntwk, outputnode, [ ('add_subjid_to_csv_cor.csv_file', 'correlation_stats') ])]) functional.connect([(func_ntwk, outputnode, [ ('remove_unconnected_anticorr.out_files', 'anticorrelation_ntwks') ])]) functional.connect([(func_ntwk, outputnode, [ ('anticorrelationCFFConverter.connectome_file', 'anticorrelation_cff') ])]) functional.connect([(func_ntwk, outputnode, [ ('add_subjid_to_csv_anticor.csv_file', 'anticorrelation_stats') ])]) functional.connect([(func_ntwk, outputnode, [ ('matching_classification.stats_file', 'matching_stats') ])]) if with_simple_timecourse_correlation: functional.connect([(func_ntwk, outputnode, [('mergeSTCC.out', 'simple_correlation_ntwks')])]) functional.connect([(func_ntwk, outputnode, [ ('TCcorrCFFConverter.connectome_file', 'simple_correlation_cff') ])]) return functional
def _run_interface(self,runtime): """ compute the average signal for each GM ROI. """ print("Compute average rs-fMRI signal for each cortical ROI") print("====================================================") fdata = nib.load( self.inputs.func_file ).get_data() tp = fdata.shape[3] if self.inputs.parcellation_scheme != "Custom": resolutions = get_parcellation(self.inputs.parcellation_scheme) else: resolutions = self.inputs.atlas_info if self.inputs.apply_scrubbing: # load scrubbing FD and DVARS series FD = np.load( self.inputs.FD ) DVARS = np.load( self.inputs.DVARS ) # evaluate scrubbing mask FD_th = self.inputs.FD_th DVARS_th = self.inputs.DVARS_th FD_mask = np.array(np.nonzero(FD < FD_th))[0,:] DVARS_mask = np.array(np.nonzero(DVARS < DVARS_th))[0,:] index = np.sort(np.unique(np.concatenate((FD_mask,DVARS_mask)))) + 1 index = np.concatenate(([0],index)) log_scrubbing = "DISCARDED time points after scrubbing: " + str(FD.shape[0]-index.shape[0]+1) + " over " + str(FD.shape[0]+1) print(log_scrubbing) np.save( os.path.abspath( 'tp_after_scrubbing.npy'), index ) sio.savemat( os.path.abspath('tp_after_scrubbing.mat'), {'index':index} ) else: index = np.linspace(0,tp-1,tp).astype('int') # loop throughout all the resolutions ('scale33', ..., 'scale500') for parkey, parval in resolutions.items(): print("Resolution = "+parkey) # Open the corresponding ROI print("Open the corresponding ROI") for vol in self.inputs.roi_volumes: if parkey in vol: roi_fname = vol print roi_fname roi = nib.load(roi_fname) roiData = roi.get_data().astype( np.uint32 ) # Create matrix, add node information from parcellation and recover ROI indexes nROIs = parval['number_of_regions'] print("Create the connection matrix (%s rois)" % nROIs) G = nx.Graph() gp = nx.read_graphml(parval['node_information_graphml']) ROI_idx = [] for u,d in gp.nodes_iter(data=True): G.add_node(int(u), d) # compute a position for the node based on the mean position of the # ROI in voxel coordinates (segmentation volume ) G.node[int(u)]['dn_position'] = tuple(np.mean( np.where(roiData== int(d["dn_correspondence_id"]) ) , axis = 1)) ROI_idx.append(int(d["dn_correspondence_id"])) # matrix number of rois vs timepoints odata = np.zeros( (nROIs,tp), dtype = np.float32 ) # loop throughout all the ROIs (current resolution) roi_line = 0 for i in ROI_idx: odata[roi_line,:] = fdata[roiData==i].mean( axis = 0 ) roi_line += 1 np.save( os.path.abspath('averageTimeseries_%s.npy' % parkey), odata ) sio.savemat( os.path.abspath('averageTimeseries_%s.mat' % parkey), {'TCS':odata} ) # Fill connectivity matrix i = -1 for i_signal in odata: i += 1 for j in xrange(i,nROIs): j_signal = odata[j,:] # apply scrubbing value = np.corrcoef(i_signal[index],j_signal[index])[0,1] G.add_edge(ROI_idx[i],ROI_idx[j],corr = value) # storing network if 'gPickle' in self.inputs.output_types: nx.write_gpickle(G, 'connectome_%s.gpickle' % parkey) if 'mat' in self.inputs.output_types: # edges size_edges = (parval['number_of_regions'],parval['number_of_regions']) edge_keys = G.edges(data=True)[0][2].keys() edge_struct = {} for edge_key in edge_keys: edge_struct[edge_key] = nx.to_numpy_matrix(G,weight=edge_key) # nodes size_nodes = parval['number_of_regions'] node_keys = G.nodes(data=True)[0][1].keys() node_struct = {} for node_key in node_keys: if node_key == 'dn_position': node_arr = np.zeros([size_nodes,3],dtype=np.float) else: node_arr = np.zeros(size_nodes,dtype=np.object_) node_n = 0 for _,node_data in G.nodes(data=True): node_arr[node_n] = node_data[node_key] node_n += 1 node_struct[node_key] = node_arr sio.savemat('connectome_%s.mat' % parkey, mdict={'sc':edge_struct,'nodes':node_struct}) if 'graphml' in self.inputs.output_types: g2 = nx.Graph() for u_gml,d_gml in G.nodes(data=True): g2.add_node(u_gml,{'dn_correspondence_id':d_gml['dn_correspondence_id'], 'dn_fsname':d_gml['dn_fsname'], 'dn_hemisphere':d_gml['dn_hemisphere'], 'dn_name':d_gml['dn_name'], 'dn_position_x':float(d_gml['dn_position'][0]), 'dn_position_y':float(d_gml['dn_position'][1]), 'dn_position_z':float(d_gml['dn_position'][2]), 'dn_region':d_gml['dn_region']}) for u_gml,v_gml,d_gml in G.edges_iter(data=True): g2.add_edge(u_gml,v_gml,{'corr' : float(d_gml['corr'])}) nx.write_graphml(g2,'connectome_%s.graphml' % parkey) if 'cff' in self.inputs.output_types: cvt = cmtk.CFFConverter() cvt.inputs.title = 'Connectome mapper' cvt.inputs.nifti_volumes = self.inputs.roi_volumes cvt.inputs.gpickled_networks = glob.glob(os.path.abspath("connectome_*.gpickle")) cvt.run() print("[ DONE ]") return runtime
def create_fsconnectivity_pipeline(name="fsconnectivity", manual_seg_rois=False, parcellation_name="scale33"): inputfields = [ "subjects_dir", "subject_id", "dwi", "bvecs", "bvals", "resolution_network_file" ] inputnode = pe.Node(interface=util.IdentityInterface(fields=inputfields), name="inputnode") outputnode = pe.Node( interface=util.IdentityInterface( fields=[ # Outputs from the DWI workflow "single_fiber_mask", "fa", "rgb_fa", "md", "mode", "t1", "t1_brain", "wm_mask", "term_mask", "aparc_aseg", "tissue_class_files", "gm_prob", "wm_prob", "csf_prob", # Outputs from registration and labelling "rois_to_dwi", "wmmask_to_dwi", "termmask_to_dwi", "dwi_to_t1_matrix", "highres_t1_to_dwi_matrix", # T1 in DWI space for reference "t1_to_dwi", # Outputs from tracking "fiber_odfs", "fiber_tracks_tck_dwi", "fiber_tracks_trk_t1", # Outputs from connectivity mapping "connectome", "nxstatscff", "nxmatlab", "nxcsv", "cmatrix", "matrix_file", ]), name="outputnode") t1_to_dwi = pe.Node(interface=fsl.ApplyXfm(), name='t1_to_dwi') termmask_to_dwi = t1_to_dwi.clone("termmask_to_dwi") dtiproc = damaged_brain_dti_processing("dtiproc", use_FAST_masks=True) reg_label = create_reg_and_label_wf("reg_label", manual_seg_rois=True) FreeSurferSource = pe.Node(interface=nio.FreeSurferSource(), name='fssource') FreeSurferSourceLH = pe.Node(interface=nio.FreeSurferSource(), name='fssourceLH') FreeSurferSourceLH.inputs.hemi = 'lh' FreeSurferSourceRH = pe.Node(interface=nio.FreeSurferSource(), name='fssourceRH') FreeSurferSourceRH.inputs.hemi = 'rh' """ Creating the workflow's nodes ============================= """ """ Conversion nodes ---------------- """ """ A number of conversion operations are required to obtain NIFTI files from the FreesurferSource for each subject. Nodes are used to convert the following: * Original structural image to NIFTI * Pial, white, inflated, and spherical surfaces for both the left and right hemispheres are converted to GIFTI for visualization in ConnectomeViewer * Parcellated annotation files for the left and right hemispheres are also converted to GIFTI """ mri_convert_Brain = pe.Node(interface=fs.MRIConvert(), name='mri_convert_Brain') mri_convert_Brain.inputs.out_type = 'nii' mri_convert_ROI_scale500 = mri_convert_Brain.clone( 'mri_convert_ROI_scale500') mris_convertLH = pe.Node(interface=fs.MRIsConvert(), name='mris_convertLH') mris_convertLH.inputs.out_datatype = 'gii' mris_convertRH = mris_convertLH.clone('mris_convertRH') mris_convertRHwhite = mris_convertLH.clone('mris_convertRHwhite') mris_convertLHwhite = mris_convertLH.clone('mris_convertLHwhite') mris_convertRHinflated = mris_convertLH.clone('mris_convertRHinflated') mris_convertLHinflated = mris_convertLH.clone('mris_convertLHinflated') mris_convertLHlabels = mris_convertLH.clone('mris_convertLHlabels') mris_convertRHlabels = mris_convertLH.clone('mris_convertRHlabels') """ Parcellation is performed given the aparc+aseg image from Freesurfer. The CMTK Parcellation step subdivides these regions to return a higher-resolution parcellation scheme. The parcellation used here is entitled "scale500" and returns 1015 regions. """ parcellate = pe.Node(interface=cmtk.Parcellate(), name="Parcellate") parcellate.inputs.parcellation_name = parcellation_name """ The CreateMatrix interface takes in the remapped aparc+aseg image as well as the label dictionary and fiber tracts and outputs a number of different files. The most important of which is the connectivity network itself, which is stored as a 'gpickle' and can be loaded using Python's NetworkX package (see CreateMatrix docstring). Also outputted are various NumPy arrays containing detailed tract information, such as the start and endpoint regions, and statistics on the mean and standard deviation for the fiber length of each connection. These matrices can be used in the ConnectomeViewer to plot the specific tracts that connect between user-selected regions. Here we choose the Lausanne2008 parcellation scheme, since we are incorporating the CMTK parcellation step. """ creatematrix = pe.Node(interface=cmtk.CreateMatrix(), name="CreateMatrix") creatematrix.inputs.count_region_intersections = True """ Next we define the endpoint of this tutorial, which is the CFFConverter node, as well as a few nodes which use the Nipype Merge utility. These are useful for passing lists of the files we want packaged in our CFF file. The inspect.getfile command is used to package this script into the resulting CFF file, so that it is easy to look back at the processing parameters that were used. """ CFFConverter = pe.Node(interface=cmtk.CFFConverter(), name="CFFConverter") CFFConverter.inputs.script_files = op.abspath( inspect.getfile(inspect.currentframe())) giftiSurfaces = pe.Node(interface=util.Merge(6), name="GiftiSurfaces") giftiLabels = pe.Node(interface=util.Merge(2), name="GiftiLabels") niftiVolumes = pe.Node(interface=util.Merge(3), name="NiftiVolumes") fiberDataArrays = pe.Node(interface=util.Merge(4), name="FiberDataArrays") """ We also create a node to calculate several network metrics on our resulting file, and another CFF converter which will be used to package these networks into a single file. """ networkx = create_networkx_pipeline(name='networkx') cmats_to_csv = create_cmats_to_csv_pipeline(name='cmats_to_csv') nfibs_to_csv = pe.Node(interface=misc.Matlab2CSV(), name='nfibs_to_csv') merge_nfib_csvs = pe.Node(interface=misc.MergeCSVFiles(), name='merge_nfib_csvs') merge_nfib_csvs.inputs.extra_column_heading = 'Subject' merge_nfib_csvs.inputs.out_file = 'fibers.csv' NxStatsCFFConverter = pe.Node(interface=cmtk.CFFConverter(), name="NxStatsCFFConverter") NxStatsCFFConverter.inputs.script_files = op.abspath( inspect.getfile(inspect.currentframe())) workflow = pe.Workflow(name=name) workflow.base_output_dir = name workflow.connect([(inputnode, dtiproc, [("subjects_dir", "inputnode.subjects_dir"), ("subject_id", "inputnode.subject_id"), ("dwi", "inputnode.dwi"), ("bvecs", "inputnode.bvecs"), ("bvals", "inputnode.bvals")])]) workflow.connect([(inputnode, reg_label, [("subject_id", "inputnode.subject_id")])]) #workflow.connect([(mri_convert_ROI_scale500, reg_label, [("out_file", "inputnode.manual_seg_rois")])]) workflow.connect([(dtiproc, reg_label, [("outputnode.aparc_aseg", "inputnode.manual_seg_rois")])]) workflow.connect([(dtiproc, reg_label, [ ("outputnode.wm_mask", "inputnode.wm_mask"), ("outputnode.term_mask", "inputnode.termination_mask"), ("outputnode.fa", "inputnode.fa"), ("outputnode.aparc_aseg", "inputnode.aparc_aseg"), ])]) workflow.connect([(reg_label, t1_to_dwi, [("outputnode.t1_to_dwi_matrix", "in_matrix_file")])]) workflow.connect([(dtiproc, t1_to_dwi, [("outputnode.t1", "in_file")])]) workflow.connect([(dtiproc, t1_to_dwi, [("outputnode.fa", "reference")])]) workflow.connect([(inputnode, t1_to_dwi, [ (('subject_id', add_subj_name_to_T1_dwi), 'out_file') ])]) workflow.connect([(reg_label, termmask_to_dwi, [("outputnode.t1_to_dwi_matrix", "in_matrix_file")])]) workflow.connect([(dtiproc, termmask_to_dwi, [("outputnode.term_mask", "in_file")])]) workflow.connect([(dtiproc, termmask_to_dwi, [("outputnode.fa", "reference")])]) ''' Connect outputnode ''' workflow.connect([(t1_to_dwi, outputnode, [("out_file", "t1_to_dwi")])]) workflow.connect([(dtiproc, outputnode, [ ("outputnode.t1", "t1"), ("outputnode.wm_prob", "wm_prob"), ("outputnode.gm_prob", "gm_prob"), ("outputnode.csf_prob", "csf_prob"), ("outputnode.single_fiber_mask", "single_fiber_mask"), ("outputnode.fa", "fa"), ("outputnode.rgb_fa", "rgb_fa"), ("outputnode.md", "md"), ("outputnode.mode", "mode"), ("outputnode.t1_brain", "t1_brain"), ("outputnode.wm_mask", "wm_mask"), ("outputnode.term_mask", "term_mask"), ("outputnode.aparc_aseg", "aparc_aseg"), ("outputnode.tissue_class_files", "tissue_class_files"), ])]) workflow.connect([(reg_label, outputnode, [ ("outputnode.rois_to_dwi", "rois_to_dwi"), ("outputnode.wmmask_to_dwi", "wmmask_to_dwi"), ("outputnode.termmask_to_dwi", "termmask_to_dwi"), ("outputnode.dwi_to_t1_matrix", "dwi_to_t1_matrix"), ("outputnode.highres_t1_to_dwi_matrix", "highres_t1_to_dwi_matrix"), ])]) workflow.connect([(dtiproc, outputnode, [("outputnode.aparc_aseg", "rois") ])]) tracking = anatomically_constrained_tracking("tracking") workflow.connect([(inputnode, tracking, [ ("subject_id", "inputnode.subject_id"), ("dwi", "inputnode.dwi"), ("bvecs", "inputnode.bvecs"), ("bvals", "inputnode.bvals"), ])]) workflow.connect([(reg_label, tracking, [ ("outputnode.wmmask_to_dwi", "inputnode.wm_mask"), ("outputnode.termmask_to_dwi", "inputnode.termination_mask"), ("outputnode.dwi_to_t1_matrix", "inputnode.registration_matrix_file"), ])]) workflow.connect([(dtiproc, tracking, [ ("outputnode.t1", "inputnode.registration_image_file") ])]) workflow.connect([(dtiproc, tracking, [("outputnode.single_fiber_mask", "inputnode.single_fiber_mask")])]) workflow.connect([(tracking, outputnode, [ ("outputnode.fiber_odfs", "fiber_odfs"), ("outputnode.fiber_tracks_tck_dwi", "fiber_tracks_tck_dwi"), ("outputnode.fiber_tracks_trk_t1", "fiber_tracks_trk_t1"), ])]) workflow.connect([(tracking, creatematrix, [("outputnode.fiber_tracks_trk_t1", "tract_file")])]) workflow.connect([(inputnode, FreeSurferSource, [("subjects_dir", "subjects_dir")])]) workflow.connect([(inputnode, FreeSurferSource, [("subject_id", "subject_id")])]) workflow.connect([(inputnode, FreeSurferSourceLH, [("subjects_dir", "subjects_dir")])]) workflow.connect([(inputnode, FreeSurferSourceLH, [("subject_id", "subject_id")])]) workflow.connect([(inputnode, FreeSurferSourceRH, [("subjects_dir", "subjects_dir")])]) workflow.connect([(inputnode, FreeSurferSourceRH, [("subject_id", "subject_id")])]) workflow.connect([(inputnode, parcellate, [("subjects_dir", "subjects_dir") ])]) workflow.connect([(inputnode, parcellate, [("subject_id", "subject_id")])]) workflow.connect([(parcellate, mri_convert_ROI_scale500, [('roi_file', 'in_file')])]) """ Surface conversions to GIFTI (pial, white, inflated, and sphere for both hemispheres) """ workflow.connect([(FreeSurferSourceLH, mris_convertLH, [('pial', 'in_file') ])]) workflow.connect([(FreeSurferSourceRH, mris_convertRH, [('pial', 'in_file') ])]) workflow.connect([(FreeSurferSourceLH, mris_convertLHwhite, [('white', 'in_file')])]) workflow.connect([(FreeSurferSourceRH, mris_convertRHwhite, [('white', 'in_file')])]) workflow.connect([(FreeSurferSourceLH, mris_convertLHinflated, [('inflated', 'in_file')])]) workflow.connect([(FreeSurferSourceRH, mris_convertRHinflated, [('inflated', 'in_file')])]) """ The annotation files are converted using the pial surface as a map via the MRIsConvert interface. One of the functions defined earlier is used to select the lh.aparc.annot and rh.aparc.annot files specifically (rather than e.g. rh.aparc.a2009s.annot) from the output list given by the FreeSurferSource. """ workflow.connect([(FreeSurferSourceLH, mris_convertLHlabels, [('pial', 'in_file')])]) workflow.connect([(FreeSurferSourceRH, mris_convertRHlabels, [('pial', 'in_file')])]) workflow.connect([(FreeSurferSourceLH, mris_convertLHlabels, [(('annot', select_aparc_annot), 'annot_file')])]) workflow.connect([(FreeSurferSourceRH, mris_convertRHlabels, [(('annot', select_aparc_annot), 'annot_file')])]) workflow.connect(inputnode, 'resolution_network_file', creatematrix, 'resolution_network_file') workflow.connect([(inputnode, creatematrix, [("subject_id", "out_matrix_file")])]) workflow.connect([(inputnode, creatematrix, [("subject_id", "out_matrix_mat_file")])]) workflow.connect([(parcellate, creatematrix, [("roi_file", "roi_file")])]) workflow.connect([(creatematrix, fiberDataArrays, [("endpoint_file", "in1") ])]) workflow.connect([(creatematrix, fiberDataArrays, [("endpoint_file_mm", "in2")])]) workflow.connect([(creatematrix, fiberDataArrays, [("fiber_length_file", "in3")])]) workflow.connect([(creatematrix, fiberDataArrays, [("fiber_label_file", "in4")])]) workflow.connect([(mris_convertLH, giftiSurfaces, [("converted", "in1")])]) workflow.connect([(mris_convertRH, giftiSurfaces, [("converted", "in2")])]) workflow.connect([(mris_convertLHwhite, giftiSurfaces, [("converted", "in3")])]) workflow.connect([(mris_convertRHwhite, giftiSurfaces, [("converted", "in4")])]) workflow.connect([(mris_convertLHinflated, giftiSurfaces, [("converted", "in5")])]) workflow.connect([(mris_convertRHinflated, giftiSurfaces, [("converted", "in6")])]) workflow.connect([(mris_convertLHlabels, giftiLabels, [("converted", "in1") ])]) workflow.connect([(mris_convertRHlabels, giftiLabels, [("converted", "in2") ])]) workflow.connect([(giftiSurfaces, CFFConverter, [("out", "gifti_surfaces") ])]) workflow.connect([(giftiLabels, CFFConverter, [("out", "gifti_labels")])]) workflow.connect([(creatematrix, CFFConverter, [("matrix_files", "gpickled_networks")])]) workflow.connect([(fiberDataArrays, CFFConverter, [("out", "data_files")]) ]) workflow.connect([(inputnode, CFFConverter, [("subject_id", "title")])]) workflow.connect([(inputnode, CFFConverter, [ (('subject_id', add_subj_name_to_Connectome), 'out_file') ])]) """ The graph theoretical metrics which have been generated are placed into another CFF file. """ workflow.connect([(inputnode, networkx, [("subject_id", "inputnode.extra_field")])]) workflow.connect([(creatematrix, networkx, [("intersection_matrix_file", "inputnode.network_file")])]) workflow.connect([(networkx, NxStatsCFFConverter, [("outputnode.network_files", "gpickled_networks")])]) workflow.connect([(giftiSurfaces, NxStatsCFFConverter, [("out", "gifti_surfaces")])]) workflow.connect([(giftiLabels, NxStatsCFFConverter, [("out", "gifti_labels")])]) workflow.connect([(fiberDataArrays, NxStatsCFFConverter, [("out", "data_files")])]) workflow.connect([(inputnode, NxStatsCFFConverter, [("subject_id", "title") ])]) workflow.connect([(inputnode, NxStatsCFFConverter, [ (('subject_id', add_subj_name_to_nxConnectome), 'out_file') ])]) workflow.connect([(CFFConverter, outputnode, [("connectome_file", "connectome")])]) workflow.connect([(NxStatsCFFConverter, outputnode, [("connectome_file", "nxstatscff")])]) workflow.connect([(creatematrix, outputnode, [("intersection_matrix_file", "matrix_file")])]) workflow.connect([(creatematrix, outputnode, [("matrix_mat_file", "cmatrix")])]) workflow.connect([(networkx, outputnode, [("outputnode.csv_files", "nxcsv") ])]) return workflow
def create_rsfmri_correlation_network(name="functional", have_nodes_already=False): inputnode_within = pe.Node(interface=util.IdentityInterface(fields=[ "subject_id", "functional_images", "segmentation_file", "resolution_network_file" ]), name="inputnode_within") # Create the resampling nodes. Functional images must have the same # dimensions as the segmentation file resampleFunctional = pe.MapNode(interface=fs.MRIConvert(), name='resampleFunctional', iterfield=['in_file']) resampleFunctional.inputs.out_type = 'nii' # Create the nodes if not have_nodes_already: createnodes = pe.Node(interface=cmtk.CreateNodes(), name="CreateNodes") # Define the correlation mapping node, the CFF Converter, and NetworkX # MATLAB -> CommaSeparatedValue node time_course_correlation = pe.Node( interface=ci.SimpleTimeCourseCorrelationGraph(), name='time_course_correlation') correlationCFFConverter = pe.Node(interface=cmtk.CFFConverter(), name="correlationCFFConverter") correlationCFFConverter.inputs.out_file = 'correlation.cff' correlationNetworkXMetrics = pe.Node(interface=cmtk.NetworkXMetrics(), name="correlationNetworkXMetrics") correlationMatlab2CSV_nx = pe.Node(interface=misc.Matlab2CSV(), name="correlationMatlab2CSV_nx") # Create the workflow ### cor_ntwk = pe.Workflow(name='cor_ntwk') # Calculates the the t-value threshold for each node/IC cor_ntwk.connect([(inputnode_within, resampleFunctional, [('functional_images', 'in_file')])]) cor_ntwk.connect([(inputnode_within, resampleFunctional, [('segmentation_file', 'reslice_like')])]) cor_ntwk.connect([(resampleFunctional, time_course_correlation, [('out_file', 'in_files')])]) # Creates the nodes for the graph from the input segmentation file and # resolution network file if not have_nodes_already: cor_ntwk.connect([(inputnode_within, createnodes, [('segmentation_file', 'roi_file')])]) cor_ntwk.connect([(inputnode_within, createnodes, [ ('resolution_network_file', 'resolution_network_file') ])]) cor_ntwk.connect([(createnodes, time_course_correlation, [('node_network', 'structural_network')])]) else: cor_ntwk.connect([(inputnode_within, time_course_correlation, [ ('resolution_network_file', 'structural_network') ])]) # Creates a connectivity graph for each IC and stores all of the graphs in # a CFF file cor_ntwk.connect([(inputnode_within, time_course_correlation, [('segmentation_file', 'segmentation_file')])]) cor_ntwk.connect([(time_course_correlation, correlationCFFConverter, [('network_file', 'gpickled_networks')])]) cor_ntwk.connect([(time_course_correlation, correlationNetworkXMetrics, [('network_file', 'in_file')])]) cor_ntwk.connect([(correlationNetworkXMetrics, correlationMatlab2CSV_nx, [("node_measures_matlab", "in_file")])]) # Create a higher-level workflow inputnode = pe.Node(interface=util.IdentityInterface(fields=[ "subject_id", "functional_images", "segmentation_file", "resolution_network_file" ]), name="inputnode") outputnode = pe.Node(interface=util.IdentityInterface( fields=["correlation_ntwk", "correlation_cff"]), name="outputnode") correlation = pe.Workflow(name=name) correlation.base_output_dir = name correlation.base_dir = name correlation.connect([(inputnode, cor_ntwk, [ ("subject_id", "inputnode_within.subject_id"), ("functional_images", "inputnode_within.functional_images"), ("segmentation_file", "inputnode_within.segmentation_file"), ("resolution_network_file", "inputnode_within.resolution_network_file") ])]) correlation.connect([(cor_ntwk, outputnode, [ ('time_course_correlation.network_file', 'correlation_ntwk') ])]) correlation.connect([(cor_ntwk, outputnode, [ ('correlationCFFConverter.connectome_file', 'correlation_cff') ])]) return correlation