def extract_timeseries_nativespace(SinkTag="connectivity", wf_name="extract_timeseries_nativespace", global_signal=True): # this workflow transforms atlas back to native space and uses TsExtractor import os import nipype import nipype.pipeline as pe import nipype.interfaces.io as io import nipype.interfaces.utility as utility import PUMI.func_preproc.func2standard as transform import PUMI.utils.globals as globals import PUMI.utils.QC as qc SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag) if not os.path.exists(SinkDir): os.makedirs(SinkDir) wf = nipype.Workflow(wf_name) inputspec = pe.Node( utility.IdentityInterface(fields=[ 'atlas', 'labels', 'modules', 'anat', # only obligatory if stdreg==globals._RegType_.ANTS 'inv_linear_reg_mtrx', 'inv_nonlinear_reg_mtrx', 'func', 'gm_mask', 'confounds', 'confound_names' ]), name="inputspec") # transform atlas back to native EPI spaces! atlas2native = transform.atlas2func(stdreg=globals._regType_) wf.connect(inputspec, 'atlas', atlas2native, 'inputspec.atlas') wf.connect(inputspec, 'anat', atlas2native, 'inputspec.anat') wf.connect(inputspec, 'inv_linear_reg_mtrx', atlas2native, 'inputspec.inv_linear_reg_mtrx') wf.connect(inputspec, 'inv_nonlinear_reg_mtrx', atlas2native, 'inputspec.inv_nonlinear_reg_mtrx') wf.connect(inputspec, 'func', atlas2native, 'inputspec.func') wf.connect(inputspec, 'gm_mask', atlas2native, 'inputspec.example_func') wf.connect(inputspec, 'confounds', atlas2native, 'inputspec.confounds') wf.connect(inputspec, 'confound_names', atlas2native, 'inputspec.confound_names') # extract timeseries extract_timeseries = pe.MapNode(interface=utility.Function( input_names=['labels', 'labelmap', 'func', 'mask', 'global_signal'], output_names=['out_file', 'labels', 'out_gm_label'], function=TsExtractor), iterfield=['labelmap', 'func', 'mask'], name='extract_timeseries') extract_timeseries.inputs.global_signal = global_signal wf.connect(atlas2native, 'outputspec.atlas2func', extract_timeseries, 'labelmap') wf.connect(inputspec, 'labels', extract_timeseries, 'labels') wf.connect(inputspec, 'gm_mask', extract_timeseries, 'mask') wf.connect(inputspec, 'func', extract_timeseries, 'func') # Save outputs which are important ds_regts = pe.Node(interface=io.DataSink(), name='ds_regts') ds_regts.inputs.base_directory = globals._SinkDir_ ds_regts.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".tsv")] wf.connect(extract_timeseries, 'out_file', ds_regts, 'regional_timeseries') # QC timeseries_qc = qc.regTimeseriesQC("regional_timeseries", tag=wf_name) wf.connect(inputspec, 'modules', timeseries_qc, 'inputspec.modules') wf.connect(inputspec, 'atlas', timeseries_qc, 'inputspec.atlas') wf.connect(extract_timeseries, 'out_file', timeseries_qc, 'inputspec.timeseries') # Basic interface class generates identity mappings outputspec = pe.Node( utility.IdentityInterface(fields=['timeseries', 'out_gm_label']), name='outputspec') wf.connect(extract_timeseries, 'out_file', outputspec, 'timeseries') wf.connect(extract_timeseries, 'out_gm_label', outputspec, 'out_gm_label') return wf
def extract_timeseries(SinkTag="connectivity", wf_name="extract_timeseries", modularise=True): ######################################################################## # Extract timeseries ######################################################################## import nipype.interfaces.nilearn as learn import nipype.pipeline as pe import nipype.interfaces.utility as utility import nipype.interfaces.io as io from nipype.interfaces.utility import Function import PUMI.utils.globals as globals import PUMI.utils.QC as qc import os SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag) if not os.path.exists(SinkDir): os.makedirs(SinkDir) # Identitiy mapping for input variables inputspec = pe.Node( utility.IdentityInterface(fields=[ 'std_func', 'atlas_file', # nii labelmap (or 4D probmaps) 'labels', # list of short names to regions 'modules' # list of modules of regions ]), name='inputspec') # re-label atlas, so that regions corresponding to the same modules follow each other if modularise: relabel_atls = pe.Node(interface=Function( input_names=['atlas_file', 'modules', 'labels'], output_names=[ 'relabelled_atlas_file', 'reordered_modules', 'reordered_labels', 'newlabels_file' ], function=relabel_atlas), name='relabel_atlas') # Save outputs which are important ds_nii = pe.Node(interface=io.DataSink(), name='ds_relabeled_atlas') ds_nii.inputs.base_directory = SinkDir ds_nii.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")] # Save outputs which are important ds_newlabels = pe.Node(interface=io.DataSink(), name='ds_newlabels') ds_newlabels.inputs.base_directory = SinkDir ds_newlabels.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".tsv")] extract_timesereies = pe.MapNode( interface=learn.SignalExtraction(detrend=False), iterfield=['in_file'], name='extract_timeseries') # Save outputs which are important ds_txt = pe.Node(interface=io.DataSink(), name='ds_txt') ds_txt.inputs.base_directory = SinkDir ds_txt.inputs.regexp_substitutions = [("(\/)[^\/]*$", wf_name + ".tsv")] #QC timeseries_qc = qc.regTimeseriesQC("regional_timeseries", tag=wf_name) outputspec = pe.Node(utility.IdentityInterface(fields=[ 'timeseries_file', 'relabelled_atlas_file', 'reordered_modules', 'reordered_labels' ]), name='outputspec') # Create workflow analysisflow = pe.Workflow(wf_name) analysisflow.connect(inputspec, 'std_func', extract_timesereies, 'in_file') if modularise: analysisflow.connect(inputspec, 'atlas_file', relabel_atls, 'atlas_file') analysisflow.connect(inputspec, 'modules', relabel_atls, 'modules') analysisflow.connect(inputspec, 'labels', relabel_atls, 'labels') analysisflow.connect(relabel_atls, 'relabelled_atlas_file', extract_timesereies, 'label_files') analysisflow.connect(relabel_atls, 'reordered_labels', extract_timesereies, 'class_labels') analysisflow.connect(relabel_atls, 'reordered_modules', timeseries_qc, 'inputspec.modules') analysisflow.connect(relabel_atls, 'relabelled_atlas_file', timeseries_qc, 'inputspec.atlas') analysisflow.connect(relabel_atls, 'relabelled_atlas_file', ds_nii, 'atlas_relabeled') analysisflow.connect(relabel_atls, 'newlabels_file', ds_newlabels, 'atlas_relabeled') analysisflow.connect(relabel_atls, 'relabelled_atlas_file', outputspec, 'relabelled_atlas_file') analysisflow.connect(relabel_atls, 'reordered_labels', outputspec, 'reordered_labels') analysisflow.connect(relabel_atls, 'reordered_modules', outputspec, 'reordered_modules') else: analysisflow.connect(inputspec, 'atlas_file', extract_timesereies, 'label_files') analysisflow.connect(inputspec, 'labels', extract_timesereies, 'class_labels') analysisflow.connect(inputspec, 'modules', timeseries_qc, 'inputspec.modules') analysisflow.connect(inputspec, 'atlas_file', timeseries_qc, 'inputspec.atlas') analysisflow.connect(inputspec, 'atlas_file', outputspec, 'relabelled_atlas_file') analysisflow.connect(inputspec, 'labels', outputspec, 'reordered_labels') analysisflow.connect(inputspec, 'modules', outputspec, 'reordered_modules') analysisflow.connect(extract_timesereies, 'out_file', ds_txt, 'regional_timeseries') analysisflow.connect(extract_timesereies, 'out_file', timeseries_qc, 'inputspec.timeseries') analysisflow.connect(extract_timesereies, 'out_file', outputspec, 'timeseries_file') return analysisflow