Beispiel #1
0
def apply_xfm_node(config: dict, **kwargs):
    '''
    Parses config file to return desired apply_xfm node.
    Parameters
    ----------
    config : dict
        PALS config file
    kwargs
        Keyword arguments to send to registration method.

    Returns
    -------
    MapNode
    '''

    if (not config['Analysis']['Registration']):
        # No registration; no xfm to apply.
        n = MapNode(Function(function=infile_to_outfile,
                             input_names=['in_file', 'in_matrix_file'],
                             output_names='out_file'),
                    name='transformation_skip',
                    iterfield=['in_file', 'in_matrix_file'])
    else:
        n = MapNode(fsl.FLIRT(apply_xfm=True,
                              reference=config['Registration']['reference']),
                    name='transformation_flirt',
                    iterfield=['in_file', 'in_matrix_file'])
    return n
Beispiel #2
0
def registration_node(config: dict, **kwargs):
    '''
    Parses config file to return desired registration method.
    Parameters
    ----------
    config : dict
        PALS config file
    kwargs
        Keyword arguments to send to registration method.

    Returns
    -------
    MapNode
    '''
    # Get registration method
    reg_method = config['Analysis']['RegistrationMethod']
    if (not config['Analysis']['Registration']):
        # No registration; in -> out
        n = MapNode(Function(function=reg_no_reg,
                             input_names=['in_file'],
                             output_names=['out_file', 'out_matrix_file']),
                    name='registration_identity',
                    iterfield='in_file')
    elif (reg_method.lower() == 'flirt'):
        # Use FLIRT
        n = MapNode(fsl.FLIRT(),
                    name='registration_flirt',
                    iterfield='in_file')
        for k, v in kwargs.items():
            setattr(n.inputs, k, v)
    else:
        raise (NotImplementedError(
            f'Registration method {reg_method} not implemented.'))
    return n
Beispiel #3
0
def extraction_node(config: dict, **kwargs):
    '''
    Parses config file to return desired brain extraction method.
    Parameters
    ----------
    config : dict
        PALS config file
    kwargs
        Keyword arguments to send to brain extraction method.

    Returns
    -------
    MapNode
    '''
    # Get extraction type
    extract_type = config['Analysis']['BrainExtractionMethod']
    if (not config['Analysis']['BrainExtraction']):
        # No brain extraction; in-> out
        n = MapNode(Function(function=infile_to_outfile,
                             input_names='in_file',
                             output_names='out_file'),
                    name='extract_skip',
                    iterfield='in_file')
        return n
    elif (extract_type.lower() == 'bet'):
        n = MapNode(fsl.BET(**kwargs),
                    name='extraction_bet',
                    iterfield='in_file')
        return n
    else:
        raise (NotImplementedError(
            f'Extraction method {extract_type} not implemented.'))
Beispiel #4
0
def fristons_twenty_four_wf(wf_name='fristons_twenty_four'):
    """ The main purpose of this workflow is to calculate 24 parameters including
    the 6 motion parameters of the current volume and the preceeding volume,
    plus each of these values squared.

    Parameters
    ----------
    wf_name: str
        Workflow name

    Returns
    -------
    wf: workflow object

    Nipype Inputs
    -------------
    f24_input.in_file: str
        Path to the input movement file from motion correction.

    Nipype Outputs
    -------------
    f24_output.out_file: str
        Path to 1D file containing the friston 24 parameters.

    References
    ----------
 .. [1] Friston, K. J., Williams, S., Howard, R., Frackowiak, R. S., & Turner, R. (1996).
       Movement-related effects in fMRI time-series. Magnetic Resonance in Medicine, 35(3),346-355
    """
    wf = pe.Workflow(name=wf_name)

    # specify input and output fields
    in_fields = [
        "in_file",
    ]

    out_fields = [
        "out_file",
    ]

    f24_input = setup_node(IdentityInterface(fields=in_fields,
                                             mandatory_inputs=True),
                           name='f24_input')

    calc_friston = setup_node(Function(input_names=['in_file'],
                                       output_names=['out_file'],
                                       function=calc_friston_twenty_four),
                              name='calc_friston')

    f24_output = setup_node(IdentityInterface(fields=out_fields),
                            name='f24_output')

    # Connect the nodes
    wf.connect([
        (f24_input, calc_friston, [("in_file", "in_file")]),
        (calc_friston, f24_output, [("out_file", "out_file")]),
    ])
    return wf
def create_volatlas_workflow(wf_name, wf_base_dir, subject_list, cnxn_names,
                             fstr_dict, ref_file, agg_cnxn_names_dict):

    from nipype.pipeline import engine as pe
    from nipype.pipeline.engine import Node, JoinNode, MapNode, Workflow

    from nipype.pipeline.engine.utils import IdentityInterface
    from nipype.interfaces import Function

    from nipype.interfaces.io import DataSink
    """
  Variables
  """

    dpy_fstr = fstr_dict['dpy']
    warp_fstr = fstr_dict['warp']
    parc_fstr = fstr_dict['parc']
    cnxn_mapping_fstr = fstr_dict['cnxn_mapping']
    """
  Node: Infosource
  """

    # (iterates over subjects)
    def mirror(subject_id):
        return subject_id

    mirror_npfunc = Function(['subject_id'], ['subject_id'], mirror)

    node__infosource = Node(interface=mirror_npfunc, name="infosource")
    node__infosource.iterables = [("subject_id", subject_list)]
    """
  Node: Get data
  """

    # (also iterates over cnxn ids)

    def get_sub_files_func(subject_id, cnxn_name, dpy_fstr, warp_fstr,
                           parc_fstr, cnxn_mapping_fstr):
        dpy_file = dpy_fstr % subject_id
        cnxn_mapping_file = cnxn_mapping_fstr % subject_id
        parc_file = parc_fstr % subject_id
        warp_file = warp_fstr % subject_id

        return dpy_file, parc_file, warp_file, cnxn_mapping_file, cnxn_name, subject_id

    get_sub_files_npfunc = Function([
        'subject_id', 'cnxn_name', 'dpy_fstr', 'warp_fstr', 'parc_fstr',
        'cnxn_mapping_fstr'
    ], [
        'dpy_file', 'parc_file', 'warp_file', 'cnxn_mapping_file', 'cnxn_name',
        'subject_id'
    ], get_sub_files_func)

    node__datasource = Node(interface=get_sub_files_npfunc, name='datasource')
    node__datasource.inputs.dpy_fstr = dpy_fstr
    node__datasource.inputs.parc_fstr = parc_fstr
    node__datasource.inputs.warp_fstr = warp_fstr
    node__datasource.inputs.cnxn_mapping_fstr = cnxn_mapping_fstr
    node__datasource.iterables = [('cnxn_name', cnxn_names)]
    """
  Node: Make sub cnxn visitation map
  """

    make_sub_vismap_npfunc = Function([
        'sub', 'dpy_file', 'parc_file', 'warp_file', 'ref_file',
        'cnxn_inds_file', 'cnxn_name', 'vismap_fstr'
    ], ['sub_vismap_file'], make_sub_cnxn_visitation_map)

    node__makesubvismap = Node(interface=make_sub_vismap_npfunc,
                               name="make_sub_vismap")

    node__makesubvismap.inputs.ref_file = ref_file
    node__makesubvismap.inputs.vismap_fstr = 'temp_vismap_%s.nii.gz'
    #node__makesubvismap.inputs.overwrite=True
    """
  Node: make grp cnxn visitation map
  """

    make_grp_vismap_npfunc = Function(
        ['cnxn_name', 'sub_vismaps', 'grp_vismap_fstr', 'subs_list'],
        ['grp_vismap_fpath', 'grp_vismap_norm_fpath', 'subs_list_file'],
        make_group_cnxn_visitation_map)

    node__makegrpvismap = JoinNode(interface=make_grp_vismap_npfunc,
                                   name='make_grp_vismap',
                                   joinsource="infosource",
                                   joinfield=["sub_vismaps",
                                              'subs_list'])  #subject_id")

    node__makegrpvismap.inputs.grp_vismap_fstr = 'grp_vismap_%s.nii.gz'  # this needs to be changed to come from previous node in wf
    """
  Node: aggregate group cnxn visitation map
  """
    # (to do...)

    agg_grp_vismap_npfunc = Function(['in_files', 'cnxn_names', 'outfname'],
                                     ['agg_image_file', 'agg_list_file'],
                                     aggregate_grp_vismap,
                                     imports=['import os'])

    node__agggrpvismap = JoinNode(interface=agg_grp_vismap_npfunc,
                                  name='agg_grp_vismap',
                                  joinsource="datasource",
                                  joinfield=["in_files"])

    node__agggrpvismap.iterables = [("cnxn_names",
                                     agg_cnxn_names_dict.values()),
                                    ("outfname", agg_cnxn_names_dict.keys())]
    node__agggrpvismap.synchronize = True
    """
  Node: datasink
  """
    # I want to use a mapnode for this, but can't get it to work
    # so have to settle with this followed by a command line copy...

    # (if you don't have a mapnode, just get same result as outputs of agggrpvismap node...)
    node__datasink = Node(DataSink(), name='datasink')
    node__datasink.inputs.base_directory = wf_base_dir

    #node__datasinkniifile = MapNode(DataSink(infields=['agg_image_file']),name='ds_nii', iterfield=['agg_image_file'])
    #node__datasinkniifile.inputs.base_directory=wf_base_dir
    #node__datasinktxtfile = MapNode(DataSink(infields=['agg_list_file']),name='ds_txt', iterfield=['agg_list_file'])
    #node__datasinktxtfile.inputs.base_directory=wf_base_dir
    """
  Workflow: put it all together
  """

    wf = pe.Workflow(name=wf_name)
    wf.base_dir = wf_base_dir

    wf.connect(node__infosource, 'subject_id', node__datasource, 'subject_id')
    wf.connect(node__datasource, 'subject_id', node__makesubvismap, 'sub')
    wf.connect(node__datasource, 'dpy_file', node__makesubvismap, 'dpy_file')
    wf.connect(node__datasource, 'parc_file', node__makesubvismap, 'parc_file')
    wf.connect(node__datasource, 'warp_file', node__makesubvismap, 'warp_file')
    wf.connect(node__datasource, 'cnxn_mapping_file', node__makesubvismap,
               'cnxn_inds_file')
    wf.connect(node__datasource, 'cnxn_name', node__makesubvismap, 'cnxn_name')
    wf.connect(node__makesubvismap, 'sub_vismap_file', node__makegrpvismap,
               'sub_vismaps')
    wf.connect(node__datasource, 'cnxn_name', node__makegrpvismap, 'cnxn_name')
    wf.connect(node__datasource, 'subject_id', node__makegrpvismap,
               'subs_list')

    wf.connect(node__makegrpvismap, 'grp_vismap_norm_fpath',
               node__agggrpvismap, 'in_files')

    wf.connect(node__agggrpvismap, 'agg_image_file', node__datasink,
               '@agg_image_file')
    wf.connect(node__agggrpvismap, 'agg_list_file', node__datasink,
               '@agg_list_file')

    #wf.connect(node__agggrpvismap, 'agg_image_file', node__datasinkniifile, '@agg_image_file')
    #wf.connect(node__agggrpvismap, 'agg_list_file',  node__datasinktxtfile, '@agg_list_file')

    return wf
def create_workflow(files,
                    target_file,
                    subject_id,
                    TR,
                    slice_times,
                    norm_threshold=1,
                    num_components=5,
                    vol_fwhm=None,
                    surf_fwhm=None,
                    lowpass_freq=-1,
                    highpass_freq=-1,
                    subjects_dir=None,
                    sink_directory=os.getcwd(),
                    target_subject=['fsaverage3', 'fsaverage4'],
                    name='resting'):

    wf = Workflow(name=name)

    # Rename files in case they are named identically
    name_unique = MapNode(Rename(format_string='rest_%(run)02d'),
                          iterfield=['in_file', 'run'],
                          name='rename')
    name_unique.inputs.keep_ext = True
    name_unique.inputs.run = list(range(1, len(files) + 1))
    name_unique.inputs.in_file = files

    realign = Node(interface=spm.Realign(), name="realign")
    realign.inputs.jobtype = 'estwrite'

    num_slices = len(slice_times)
    slice_timing = Node(interface=spm.SliceTiming(), name="slice_timing")
    slice_timing.inputs.num_slices = num_slices
    slice_timing.inputs.time_repetition = TR
    slice_timing.inputs.time_acquisition = TR - TR / float(num_slices)
    slice_timing.inputs.slice_order = (np.argsort(slice_times) + 1).tolist()
    slice_timing.inputs.ref_slice = int(num_slices / 2)

    # Comute TSNR on realigned data regressing polynomials upto order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(slice_timing, 'timecorrected_files', tsnr, 'in_file')

    # Compute the median image across runs
    calc_median = Node(Function(input_names=['in_files'],
                                output_names=['median_file'],
                                function=median,
                                imports=imports),
                       name='median')
    wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')
    """Segment and Register
    """

    registration = create_reg_workflow(name='registration')
    wf.connect(calc_median, 'median_file', registration,
               'inputspec.mean_image')
    registration.inputs.inputspec.subject_id = subject_id
    registration.inputs.inputspec.subjects_dir = subjects_dir
    registration.inputs.inputspec.target_image = target_file
    """Use :class:`nipype.algorithms.rapidart` to determine which of the
    images in the functional series are outliers based on deviations in
    intensity or movement.
    """

    art = Node(interface=ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, True]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = norm_threshold
    art.inputs.zintensity_threshold = 9
    art.inputs.mask_type = 'spm_global'
    art.inputs.parameter_source = 'SPM'
    """Here we are connecting all the nodes together. Notice that we add the merge node only if you choose
    to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal
    voxel sizes.
    """

    wf.connect([
        (name_unique, realign, [('out_file', 'in_files')]),
        (realign, slice_timing, [('realigned_files', 'in_files')]),
        (slice_timing, art, [('timecorrected_files', 'realigned_files')]),
        (realign, art, [('realignment_parameters', 'realignment_parameters')]),
    ])

    def selectindex(files, idx):
        import numpy as np
        from nipype.utils.filemanip import filename_to_list, list_to_filename
        return list_to_filename(
            np.array(filename_to_list(files))[idx].tolist())

    mask = Node(fsl.BET(), name='getmask')
    mask.inputs.mask = True
    wf.connect(calc_median, 'median_file', mask, 'in_file')

    # get segmentation in normalized functional space

    def merge_files(in1, in2):
        out_files = filename_to_list(in1)
        out_files.extend(filename_to_list(in2))
        return out_files

    # filter some noise

    # Compute motion regressors
    motreg = Node(Function(
        input_names=['motion_params', 'order', 'derivatives'],
        output_names=['out_files'],
        function=motion_regressors,
        imports=imports),
                  name='getmotionregress')
    wf.connect(realign, 'realignment_parameters', motreg, 'motion_params')

    # Create a filter to remove motion and art confounds
    createfilter1 = Node(Function(
        input_names=['motion_params', 'comp_norm', 'outliers', 'detrend_poly'],
        output_names=['out_files'],
        function=build_filter1,
        imports=imports),
                         name='makemotionbasedfilter')
    createfilter1.inputs.detrend_poly = 2
    wf.connect(motreg, 'out_files', createfilter1, 'motion_params')
    wf.connect(art, 'norm_files', createfilter1, 'comp_norm')
    wf.connect(art, 'outlier_files', createfilter1, 'outliers')

    filter1 = MapNode(fsl.GLM(out_f_name='F_mcart.nii',
                              out_pf_name='pF_mcart.nii',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filtermotion')

    wf.connect(slice_timing, 'timecorrected_files', filter1, 'in_file')
    wf.connect(slice_timing, ('timecorrected_files', rename, '_filtermotart'),
               filter1, 'out_res_name')
    wf.connect(createfilter1, 'out_files', filter1, 'design')

    createfilter2 = MapNode(Function(input_names=[
        'realigned_file', 'mask_file', 'num_components', 'extra_regressors'
    ],
                                     output_names=['out_files'],
                                     function=extract_noise_components,
                                     imports=imports),
                            iterfield=['realigned_file', 'extra_regressors'],
                            name='makecompcorrfilter')
    createfilter2.inputs.num_components = num_components

    wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors')
    wf.connect(filter1, 'out_res', createfilter2, 'realigned_file')
    wf.connect(registration,
               ('outputspec.segmentation_files', selectindex, [0, 2]),
               createfilter2, 'mask_file')

    filter2 = MapNode(fsl.GLM(out_f_name='F.nii',
                              out_pf_name='pF.nii',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filter_noise_nosmooth')
    wf.connect(filter1, 'out_res', filter2, 'in_file')
    wf.connect(filter1, ('out_res', rename, '_cleaned'), filter2,
               'out_res_name')
    wf.connect(createfilter2, 'out_files', filter2, 'design')
    wf.connect(mask, 'mask_file', filter2, 'mask')

    bandpass = Node(Function(
        input_names=['files', 'lowpass_freq', 'highpass_freq', 'fs'],
        output_names=['out_files'],
        function=bandpass_filter,
        imports=imports),
                    name='bandpass_unsmooth')
    bandpass.inputs.fs = 1. / TR
    bandpass.inputs.highpass_freq = highpass_freq
    bandpass.inputs.lowpass_freq = lowpass_freq
    wf.connect(filter2, 'out_res', bandpass, 'files')
    """Smooth the functional data using
    :class:`nipype.interfaces.spm.Smooth`.
    """

    smooth = Node(interface=spm.Smooth(), name="smooth")
    smooth.inputs.fwhm = vol_fwhm

    wf.connect(bandpass, 'out_files', smooth, 'in_files')

    collector = Node(Merge(2), name='collect_streams')
    wf.connect(smooth, 'smoothed_files', collector, 'in1')
    wf.connect(bandpass, 'out_files', collector, 'in2')
    """
    Transform the remaining images. First to anatomical and then to target
    """

    warpall = MapNode(ants.ApplyTransforms(),
                      iterfield=['input_image'],
                      name='warpall')
    warpall.inputs.input_image_type = 3
    warpall.inputs.interpolation = 'Linear'
    warpall.inputs.invert_transform_flags = [False, False]
    warpall.inputs.terminal_output = 'file'
    warpall.inputs.reference_image = target_file
    warpall.inputs.args = '--float'
    warpall.inputs.num_threads = 1

    # transform to target
    wf.connect(collector, 'out', warpall, 'input_image')
    wf.connect(registration, 'outputspec.transforms', warpall, 'transforms')

    mask_target = Node(fsl.ImageMaths(op_string='-bin'), name='target_mask')

    wf.connect(registration, 'outputspec.anat2target', mask_target, 'in_file')

    maskts = MapNode(fsl.ApplyMask(), iterfield=['in_file'], name='ts_masker')
    wf.connect(warpall, 'output_image', maskts, 'in_file')
    wf.connect(mask_target, 'out_file', maskts, 'mask_file')

    # map to surface
    # extract aparc+aseg ROIs
    # extract subcortical ROIs
    # extract target space ROIs
    # combine subcortical and cortical rois into a single cifti file

    #######
    # Convert aparc to subject functional space

    # Sample the average time series in aparc ROIs
    sampleaparc = MapNode(
        freesurfer.SegStats(default_color_table=True),
        iterfield=['in_file', 'summary_file', 'avgwf_txt_file'],
        name='aparc_ts')
    sampleaparc.inputs.segment_id = ([8] + list(range(10, 14)) +
                                     [17, 18, 26, 47] + list(range(49, 55)) +
                                     [58] + list(range(1001, 1036)) +
                                     list(range(2001, 2036)))

    wf.connect(registration, 'outputspec.aparc', sampleaparc,
               'segmentation_file')
    wf.connect(collector, 'out', sampleaparc, 'in_file')

    def get_names(files, suffix):
        """Generate appropriate names for output files
        """
        from nipype.utils.filemanip import (split_filename, filename_to_list,
                                            list_to_filename)
        out_names = []
        for filename in files:
            _, name, _ = split_filename(filename)
            out_names.append(name + suffix)
        return list_to_filename(out_names)

    wf.connect(collector, ('out', get_names, '_avgwf.txt'), sampleaparc,
               'avgwf_txt_file')
    wf.connect(collector, ('out', get_names, '_summary.stats'), sampleaparc,
               'summary_file')

    # Sample the time series onto the surface of the target surface. Performs
    # sampling into left and right hemisphere
    target = Node(IdentityInterface(fields=['target_subject']), name='target')
    target.iterables = ('target_subject', filename_to_list(target_subject))

    samplerlh = MapNode(freesurfer.SampleToSurface(),
                        iterfield=['source_file'],
                        name='sampler_lh')
    samplerlh.inputs.sampling_method = "average"
    samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1)
    samplerlh.inputs.sampling_units = "frac"
    samplerlh.inputs.interp_method = "trilinear"
    samplerlh.inputs.smooth_surf = surf_fwhm
    # samplerlh.inputs.cortex_mask = True
    samplerlh.inputs.out_type = 'niigz'
    samplerlh.inputs.subjects_dir = subjects_dir

    samplerrh = samplerlh.clone('sampler_rh')

    samplerlh.inputs.hemi = 'lh'
    wf.connect(collector, 'out', samplerlh, 'source_file')
    wf.connect(registration, 'outputspec.out_reg_file', samplerlh, 'reg_file')
    wf.connect(target, 'target_subject', samplerlh, 'target_subject')

    samplerrh.set_input('hemi', 'rh')
    wf.connect(collector, 'out', samplerrh, 'source_file')
    wf.connect(registration, 'outputspec.out_reg_file', samplerrh, 'reg_file')
    wf.connect(target, 'target_subject', samplerrh, 'target_subject')

    # Combine left and right hemisphere to text file
    combiner = MapNode(Function(input_names=['left', 'right'],
                                output_names=['out_file'],
                                function=combine_hemi,
                                imports=imports),
                       iterfield=['left', 'right'],
                       name="combiner")
    wf.connect(samplerlh, 'out_file', combiner, 'left')
    wf.connect(samplerrh, 'out_file', combiner, 'right')

    # Sample the time series file for each subcortical roi
    ts2txt = MapNode(Function(
        input_names=['timeseries_file', 'label_file', 'indices'],
        output_names=['out_file'],
        function=extract_subrois,
        imports=imports),
                     iterfield=['timeseries_file'],
                     name='getsubcortts')
    ts2txt.inputs.indices = [8] + list(range(10, 14)) + [17, 18, 26, 47] +\
        list(range(49, 55)) + [58]
    ts2txt.inputs.label_file = \
        os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_'
                         '2mm_v2.nii.gz'))
    wf.connect(maskts, 'out_file', ts2txt, 'timeseries_file')

    ######

    substitutions = [('_target_subject_', ''),
                     ('_filtermotart_cleaned_bp_trans_masked', ''),
                     ('_filtermotart_cleaned_bp', '')]
    regex_subs = [
        ('_ts_masker.*/sar', '/smooth/'),
        ('_ts_masker.*/ar', '/unsmooth/'),
        ('_combiner.*/sar', '/smooth/'),
        ('_combiner.*/ar', '/unsmooth/'),
        ('_aparc_ts.*/sar', '/smooth/'),
        ('_aparc_ts.*/ar', '/unsmooth/'),
        ('_getsubcortts.*/sar', '/smooth/'),
        ('_getsubcortts.*/ar', '/unsmooth/'),
        ('series/sar', 'series/smooth/'),
        ('series/ar', 'series/unsmooth/'),
        ('_inverse_transform./', ''),
    ]
    # Save the relevant data into an output directory
    datasink = Node(interface=DataSink(), name="datasink")
    datasink.inputs.base_directory = sink_directory
    datasink.inputs.container = subject_id
    datasink.inputs.substitutions = substitutions
    datasink.inputs.regexp_substitutions = regex_subs  # (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(realign, 'realignment_parameters', datasink,
               'resting.qa.motion')
    wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm')
    wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity')
    wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files')
    wf.connect(registration, 'outputspec.segmentation_files', datasink,
               'resting.mask_files')
    wf.connect(registration, 'outputspec.anat2target', datasink,
               'resting.qa.ants')
    wf.connect(mask, 'mask_file', datasink, 'resting.mask_files.@brainmask')
    wf.connect(mask_target, 'out_file', datasink, 'resting.mask_files.target')
    wf.connect(filter1, 'out_f', datasink, 'resting.qa.compmaps.@mc_F')
    wf.connect(filter1, 'out_pf', datasink, 'resting.qa.compmaps.@mc_pF')
    wf.connect(filter2, 'out_f', datasink, 'resting.qa.compmaps')
    wf.connect(filter2, 'out_pf', datasink, 'resting.qa.compmaps.@p')
    wf.connect(bandpass, 'out_files', datasink,
               'resting.timeseries.@bandpassed')
    wf.connect(smooth, 'smoothed_files', datasink,
               'resting.timeseries.@smoothed')
    wf.connect(createfilter1, 'out_files', datasink,
               'resting.regress.@regressors')
    wf.connect(createfilter2, 'out_files', datasink,
               'resting.regress.@compcorr')
    wf.connect(maskts, 'out_file', datasink, 'resting.timeseries.target')
    wf.connect(sampleaparc, 'summary_file', datasink,
               'resting.parcellations.aparc')
    wf.connect(sampleaparc, 'avgwf_txt_file', datasink,
               'resting.parcellations.aparc.@avgwf')
    wf.connect(ts2txt, 'out_file', datasink,
               'resting.parcellations.grayo.@subcortical')

    datasink2 = Node(interface=DataSink(), name="datasink2")
    datasink2.inputs.base_directory = sink_directory
    datasink2.inputs.container = subject_id
    datasink2.inputs.substitutions = substitutions
    datasink2.inputs.regexp_substitutions = regex_subs  # (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(combiner, 'out_file', datasink2,
               'resting.parcellations.grayo.@surface')
    return wf
def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
                             task_id=None, output_dir=None, subj_prefix='*',
                             hpcutoff=120., use_derivatives=True,
                             fwhm=6.0, subjects_dir=None, target=None):
    """Analyzes an open fmri dataset

    Parameters
    ----------

    data_dir : str
        Path to the base data directory

    work_dir : str
        Nipype working directory (defaults to cwd)
    """

    """
    Load nipype workflows
    """

    preproc = create_featreg_preproc(whichvol='first')
    modelfit = create_modelfit_workflow()
    fixed_fx = create_fixed_effects_flow()
    if subjects_dir:
        registration = create_fs_reg_workflow()
    else:
        registration = create_reg_workflow()

    """
    Remove the plotting connection so that plot iterables don't propagate
    to the model stage
    """

    preproc.disconnect(preproc.get_node('plot_motion'), 'out_file',
                       preproc.get_node('outputspec'), 'motion_plots')

    """
    Set up openfmri data specific components
    """

    subjects = sorted([path.split(os.path.sep)[-1] for path in
                       glob(os.path.join(data_dir, subj_prefix))])

    infosource = pe.Node(niu.IdentityInterface(fields=['subject_id',
                                                       'model_id',
                                                       'task_id']),
                         name='infosource')
    if len(subject) == 0:
        infosource.iterables = [('subject_id', subjects),
                                ('model_id', [model_id]),
                                ('task_id', task_id)]
    else:
        infosource.iterables = [('subject_id',
                                 [subjects[subjects.index(subj)] for subj in subject]),
                                ('model_id', [model_id]),
                                ('task_id', task_id)]

    subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir',
                                                 'task_id', 'model_id'],
                                    output_names=['run_id', 'conds', 'TR'],
                                    function=get_subjectinfo),
                       name='subjectinfo')
    subjinfo.inputs.base_dir = data_dir

    """
    Return data components as anat, bold and behav
    """

    contrast_file = os.path.join(data_dir, 'models', 'model%03d' % model_id,
                                 'task_contrasts.txt')
    has_contrast = os.path.exists(contrast_file)
    if has_contrast:
        datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id',
                                                   'task_id', 'model_id'],
                                         outfields=['anat', 'bold', 'behav',
                                                    'contrasts']),
                         name='datasource')
    else:
        datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id',
                                                   'task_id', 'model_id'],
                                         outfields=['anat', 'bold', 'behav']),
                         name='datasource')
    datasource.inputs.base_directory = data_dir
    datasource.inputs.template = '*'

    if has_contrast:
        datasource.inputs.field_template = {'anat': '%s/anatomy/T1_001.nii.gz',
                                            'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
                                            'behav': ('%s/model/model%03d/onsets/task%03d_'
                                                      'run%03d/cond*.txt'),
                                            'contrasts': ('models/model%03d/'
                                                          'task_contrasts.txt')}
        datasource.inputs.template_args = {'anat': [['subject_id']],
                                       'bold': [['subject_id', 'task_id']],
                                       'behav': [['subject_id', 'model_id',
                                                  'task_id', 'run_id']],
                                       'contrasts': [['model_id']]}
    else:
        datasource.inputs.field_template = {'anat': '%s/anatomy/T1_001.nii.gz',
                                            'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
                                            'behav': ('%s/model/model%03d/onsets/task%03d_'
                                                      'run%03d/cond*.txt')}
        datasource.inputs.template_args = {'anat': [['subject_id']],
                                       'bold': [['subject_id', 'task_id']],
                                       'behav': [['subject_id', 'model_id',
                                                  'task_id', 'run_id']]}

    datasource.inputs.sort_filelist = True

    """
    Create meta workflow
    """

    wf = pe.Workflow(name='openfmri')
    wf.connect(infosource, 'subject_id', subjinfo, 'subject_id')
    wf.connect(infosource, 'model_id', subjinfo, 'model_id')
    wf.connect(infosource, 'task_id', subjinfo, 'task_id')
    wf.connect(infosource, 'subject_id', datasource, 'subject_id')
    wf.connect(infosource, 'model_id', datasource, 'model_id')
    wf.connect(infosource, 'task_id', datasource, 'task_id')
    wf.connect(subjinfo, 'run_id', datasource, 'run_id')
    wf.connect([(datasource, preproc, [('bold', 'inputspec.func')]),
                ])

    def get_highpass(TR, hpcutoff):
        return hpcutoff / (2 * TR)
    gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'],
                                       output_names=['highpass'],
                                       function=get_highpass),
                          name='gethighpass')
    wf.connect(subjinfo, 'TR', gethighpass, 'TR')
    wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass')

    """
    Setup a basic set of contrasts, a t-test per condition
    """

    def get_contrasts(contrast_file, task_id, conds):
        import numpy as np
        import os
        contrast_def = []
        if os.path.exists(contrast_file):
            with open(contrast_file, 'rt') as fp:
                contrast_def.extend([np.array(row.split()) for row in fp.readlines() if row.strip()])
        contrasts = []
        for row in contrast_def:
            if row[0] != 'task%03d' % task_id:
                continue
            con = [row[1], 'T', ['cond%03d' % (i + 1)  for i in range(len(conds))],
                   row[2:].astype(float).tolist()]
            contrasts.append(con)
        # add auto contrasts for each column
        for i, cond in enumerate(conds):
            con = [cond, 'T', ['cond%03d' % (i + 1)], [1]]
            contrasts.append(con)
        return contrasts

    contrastgen = pe.Node(niu.Function(input_names=['contrast_file',
                                                    'task_id', 'conds'],
                                       output_names=['contrasts'],
                                       function=get_contrasts),
                          name='contrastgen')

    art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False],
                                                 use_norm=True,
                                                 norm_threshold=1,
                                                 zintensity_threshold=3,
                                                 parameter_source='FSL',
                                                 mask_type='file'),
                     iterfield=['realigned_files', 'realignment_parameters',
                                'mask_file'],
                     name="art")

    modelspec = pe.Node(interface=model.SpecifyModel(),
                           name="modelspec")
    modelspec.inputs.input_units = 'secs'

    def check_behav_list(behav, run_id, conds):
        from nipype.external import six
        import numpy as np
        num_conds = len(conds)
        if isinstance(behav, six.string_types):
            behav = [behav]
        behav_array = np.array(behav).flatten()
        num_elements = behav_array.shape[0]
        return behav_array.reshape(num_elements/num_conds, num_conds).tolist()

    reshape_behav = pe.Node(niu.Function(input_names=['behav', 'run_id', 'conds'],
                                       output_names=['behav'],
                                       function=check_behav_list),
                          name='reshape_behav')

    wf.connect(subjinfo, 'TR', modelspec, 'time_repetition')
    wf.connect(datasource, 'behav', reshape_behav, 'behav')
    wf.connect(subjinfo, 'run_id', reshape_behav, 'run_id')
    wf.connect(subjinfo, 'conds', reshape_behav, 'conds')
    wf.connect(reshape_behav, 'behav', modelspec, 'event_files')

    wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval')
    wf.connect(subjinfo, 'conds', contrastgen, 'conds')
    if has_contrast:
        wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file')
    else:
        contrastgen.inputs.contrast_file = ''
    wf.connect(infosource, 'task_id', contrastgen, 'task_id')
    wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts')

    wf.connect([(preproc, art, [('outputspec.motion_parameters',
                                 'realignment_parameters'),
                                ('outputspec.realigned_files',
                                 'realigned_files'),
                                ('outputspec.mask', 'mask_file')]),
                (preproc, modelspec, [('outputspec.highpassed_files',
                                       'functional_runs'),
                                      ('outputspec.motion_parameters',
                                       'realignment_parameters')]),
                (art, modelspec, [('outlier_files', 'outlier_files')]),
                (modelspec, modelfit, [('session_info',
                                        'inputspec.session_info')]),
                (preproc, modelfit, [('outputspec.highpassed_files',
                                      'inputspec.functional_data')])
                ])

    # Comute TSNR on realigned data regressing polynomials upto order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(preproc, "outputspec.realigned_files", tsnr, "in_file")

    # Compute the median image across runs
    calc_median = Node(Function(input_names=['in_files'],
                                output_names=['median_file'],
                                function=median,
                                imports=imports),
                       name='median')
    wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')

    """
    Reorder the copes so that now it combines across runs
    """

    def sort_copes(copes, varcopes, contrasts):
        import numpy as np
        if not isinstance(copes, list):
            copes = [copes]
            varcopes = [varcopes]
        num_copes = len(contrasts)
        n_runs = len(copes)
        all_copes = np.array(copes).flatten()
        all_varcopes = np.array(varcopes).flatten()
        outcopes = all_copes.reshape(len(all_copes)/num_copes, num_copes).T.tolist()
        outvarcopes = all_varcopes.reshape(len(all_varcopes)/num_copes, num_copes).T.tolist()
        return outcopes, outvarcopes, n_runs

    cope_sorter = pe.Node(niu.Function(input_names=['copes', 'varcopes',
                                                    'contrasts'],
                                       output_names=['copes', 'varcopes',
                                                     'n_runs'],
                                       function=sort_copes),
                          name='cope_sorter')

    pickfirst = lambda x: x[0]

    wf.connect(contrastgen, 'contrasts', cope_sorter, 'contrasts')
    wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst),
                                      'flameo.mask_file')]),
                (modelfit, cope_sorter, [('outputspec.copes', 'copes')]),
                (modelfit, cope_sorter, [('outputspec.varcopes', 'varcopes')]),
                (cope_sorter, fixed_fx, [('copes', 'inputspec.copes'),
                                         ('varcopes', 'inputspec.varcopes'),
                                         ('n_runs', 'l2model.num_copes')]),
                (modelfit, fixed_fx, [('outputspec.dof_file',
                                        'inputspec.dof_files'),
                                      ])
                ])

    wf.connect(calc_median, 'median_file', registration, 'inputspec.mean_image')
    if subjects_dir:
        wf.connect(infosource, 'subject_id', registration, 'inputspec.subject_id')
        registration.inputs.inputspec.subjects_dir = subjects_dir
        registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
        if target:
            registration.inputs.inputspec.target_image = target
    else:
        wf.connect(datasource, 'anat', registration, 'inputspec.anatomical_image')
        registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz')
        registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
        registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm'

    def merge_files(copes, varcopes, zstats):
        out_files = []
        splits = []
        out_files.extend(copes)
        splits.append(len(copes))
        out_files.extend(varcopes)
        splits.append(len(varcopes))
        out_files.extend(zstats)
        splits.append(len(zstats))
        return out_files, splits

    mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes',
                                                  'zstats'],
                                   output_names=['out_files', 'splits'],
                                   function=merge_files),
                      name='merge_files')
    wf.connect([(fixed_fx.get_node('outputspec'), mergefunc,
                                 [('copes', 'copes'),
                                  ('varcopes', 'varcopes'),
                                  ('zstats', 'zstats'),
                                  ])])
    wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files')

    def split_files(in_files, splits):
        copes = in_files[:splits[0]]
        varcopes = in_files[splits[0]:(splits[0] + splits[1])]
        zstats = in_files[(splits[0] + splits[1]):]
        return copes, varcopes, zstats

    splitfunc = pe.Node(niu.Function(input_names=['in_files', 'splits'],
                                     output_names=['copes', 'varcopes',
                                                   'zstats'],
                                     function=split_files),
                      name='split_files')
    wf.connect(mergefunc, 'splits', splitfunc, 'splits')
    wf.connect(registration, 'outputspec.transformed_files',
               splitfunc, 'in_files')

    if subjects_dir:
        get_roi_mean = pe.MapNode(fs.SegStats(default_color_table=True),
                                  iterfield=['in_file'], name='get_aparc_means')
        get_roi_mean.inputs.avgwf_txt_file = True
        wf.connect(fixed_fx.get_node('outputspec'), 'copes', get_roi_mean, 'in_file')
        wf.connect(registration, 'outputspec.aparc', get_roi_mean, 'segmentation_file')

        get_roi_tsnr = pe.MapNode(fs.SegStats(default_color_table=True),
                                  iterfield=['in_file'], name='get_aparc_tsnr')
        get_roi_tsnr.inputs.avgwf_txt_file = True
        wf.connect(tsnr, 'tsnr_file', get_roi_tsnr, 'in_file')
        wf.connect(registration, 'outputspec.aparc', get_roi_tsnr, 'segmentation_file')

    """
    Connect to a datasink
    """

    def get_subs(subject_id, conds, run_id, model_id, task_id):
        subs = [('_subject_id_%s_' % subject_id, '')]
        subs.append(('_model_id_%d' % model_id, 'model%03d' %model_id))
        subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id))
        subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp',
        'mean'))
        subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt',
        'affine'))

        for i in range(len(conds)):
            subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1)))
            subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1)))
            subs.append(('_warpall%d/cope1_warp.' % i,
                         'cope%02d.' % (i + 1)))
            subs.append(('_warpall%d/varcope1_warp.' % (len(conds) + i),
                         'varcope%02d.' % (i + 1)))
            subs.append(('_warpall%d/zstat1_warp.' % (2 * len(conds) + i),
                         'zstat%02d.' % (i + 1)))
            subs.append(('_warpall%d/cope1_trans.' % i,
                         'cope%02d.' % (i + 1)))
            subs.append(('_warpall%d/varcope1_trans.' % (len(conds) + i),
                         'varcope%02d.' % (i + 1)))
            subs.append(('_warpall%d/zstat1_trans.' % (2 * len(conds) + i),
                         'zstat%02d.' % (i + 1)))
            subs.append(('__get_aparc_means%d/' % i, '/cope%02d_' % (i + 1)))

        for i, run_num in enumerate(run_id):
            subs.append(('__get_aparc_tsnr%d/' % i, '/run%02d_' % run_num))
            subs.append(('__art%d/' % i, '/run%02d_' % run_num))
            subs.append(('__dilatemask%d/' % i, '/run%02d_' % run_num))
            subs.append(('__realign%d/' % i, '/run%02d_' % run_num))
            subs.append(('__modelgen%d/' % i, '/run%02d_' % run_num))
        subs.append(('/model%03d/task%03d/' % (model_id, task_id), '/'))
        subs.append(('/model%03d/task%03d_' % (model_id, task_id), '/'))
        subs.append(('_bold_dtype_mcf_bet_thresh_dil', '_mask'))
        subs.append(('_output_warped_image', '_anat2target'))
        subs.append(('median_flirt_brain_mask', 'median_brain_mask'))
        subs.append(('median_bbreg_brain_mask', 'median_brain_mask'))
        return subs

    subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds', 'run_id',
                                                'model_id', 'task_id'],
                                   output_names=['substitutions'],
                                   function=get_subs),
                      name='subsgen')
    wf.connect(subjinfo, 'run_id', subsgen, 'run_id')

    datasink = pe.Node(interface=nio.DataSink(),
                       name="datasink")
    wf.connect(infosource, 'subject_id', datasink, 'container')
    wf.connect(infosource, 'subject_id', subsgen, 'subject_id')
    wf.connect(infosource, 'model_id', subsgen, 'model_id')
    wf.connect(infosource, 'task_id', subsgen, 'task_id')
    wf.connect(contrastgen, 'contrasts', subsgen, 'conds')
    wf.connect(subsgen, 'substitutions', datasink, 'substitutions')
    wf.connect([(fixed_fx.get_node('outputspec'), datasink,
                                 [('res4d', 'res4d'),
                                  ('copes', 'copes'),
                                  ('varcopes', 'varcopes'),
                                  ('zstats', 'zstats'),
                                  ('tstats', 'tstats')])
                                 ])
    wf.connect([(modelfit.get_node('modelgen'), datasink,
                                 [('design_cov', 'qa.model'),
                                  ('design_image', 'qa.model.@matrix_image'),
                                  ('design_file', 'qa.model.@matrix'),
                                 ])])
    wf.connect([(preproc, datasink, [('outputspec.motion_parameters',
                                      'qa.motion'),
                                     ('outputspec.motion_plots',
                                      'qa.motion.plots'),
                                     ('outputspec.mask', 'qa.mask')])])
    wf.connect(registration, 'outputspec.mean2anat_mask', datasink, 'qa.mask.mean2anat')
    wf.connect(art, 'norm_files', datasink, 'qa.art.@norm')
    wf.connect(art, 'intensity_files', datasink, 'qa.art.@intensity')
    wf.connect(art, 'outlier_files', datasink, 'qa.art.@outlier_files')
    wf.connect(registration, 'outputspec.anat2target', datasink, 'qa.anat2target')
    wf.connect(tsnr, 'tsnr_file', datasink, 'qa.tsnr.@map')
    if subjects_dir:
        wf.connect(registration, 'outputspec.min_cost_file', datasink, 'qa.mincost')
        wf.connect([(get_roi_tsnr, datasink, [('avgwf_txt_file', 'qa.tsnr'),
                                              ('summary_file', 'qa.tsnr.@summary')])])
        wf.connect([(get_roi_mean, datasink, [('avgwf_txt_file', 'copes.roi'),
                                              ('summary_file', 'copes.roi.@summary')])])
    wf.connect([(splitfunc, datasink,
                 [('copes', 'copes.mni'),
                  ('varcopes', 'varcopes.mni'),
                  ('zstats', 'zstats.mni'),
                  ])])
    wf.connect(calc_median, 'median_file', datasink, 'mean')
    wf.connect(registration, 'outputspec.transformed_mean', datasink, 'mean.mni')
    wf.connect(registration, 'outputspec.func2anat_transform', datasink, 'xfm.mean2anat')
    wf.connect(registration, 'outputspec.anat2target_transform', datasink, 'xfm.anat2target')

    """
    Set processing parameters
    """

    preproc.inputs.inputspec.fwhm = fwhm
    gethighpass.inputs.hpcutoff = hpcutoff
    modelspec.inputs.high_pass_filter_cutoff = hpcutoff
    modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivatives}}
    modelfit.inputs.inputspec.model_serial_correlations = True
    modelfit.inputs.inputspec.film_threshold = 1000

    datasink.inputs.base_directory = output_dir
    return wf
Beispiel #8
0
def motion_power_stats_wf(wf_name='gen_motion_stats'):
    """ The main purpose of this workflow is to get various statistical measures from the
    movement/motion parameters obtained in functional preprocessing.

    These parameters (FD calculations) are also required to carry out scrubbing.

    Order of commands:

    - Calculate Frame Wise Displacement FD as per power et al., 2012

      Differentiating head realignment parameters across frames yields a six dimensional timeseries that represents
      instantaneous head motion.
      Rotational displacements are converted from degrees to millimeters by calculating displacement on the surface of
      a sphere of radius 50 mm.[R5]

    - Calculate Frame wise Displacement FD as per jenkinson et al., 2002

    - Calculate Frames to exclude

      Remove all frames which are below the threshold

    - Calculate Frames to include

      Include all the frames which are above the threshold

    - Calculate DVARS

      DVARS (D temporal derivative of timecourses, VARS referring to RMS variance over voxels) indexes
      the rate of change of BOLD signal across the entire brain at each frame of data.To calculate
      DVARS, the volumetric timeseries is differentiated (by backwards differences) and RMS signal
      change is calculated over the whole brain.DVARS is thus a measure of how much the intensity
      of a brain image changes in comparison to the previous timepoint (as opposed to the global
      signal, which is the average value of a brain image at a timepoint).[R5]

    - Calculate Power parameters::

        MeanFD : Mean (across time/frames) of the absolute values for Framewise Displacement (FD),
        computed as described in Power et al., Neuroimage, 2012)

        rootMeanSquareFD : Root mean square (RMS; across time/frames) of the absolute values for FD

        NumFD >=threshold : Number of frames (time points) where movement (FD) exceeded threshold

        rmsFD : Root mean square (RMS; across time/frames) of the absolute values for FD

        FDquartile(top 1/4th FD) : Mean of the top 25% highest FD values

        PercentFD( > threshold) : Number of frames (time points) where movement (FD) exceeded threshold
                                  expressed as a percentage of the total number of frames (time points)

        MeanDVARS : Mean of voxel DVARS

    - Calculate Motion Parameters

      Following motion parameters are calculated::

        Subject, Scan, Mean Relative RMS Displacement, Max Relative RMS Displacement,
        Movements >threshold, Mean Relative Mean Rotation, Mean Relative Maxdisp,
        Max Relative Maxdisp, Max Abs Maxdisp, Max Relative Roll,Max Relative Pitch,
        Max Relative Yaw, Max Relative dS-I, Max Relative dL-R,Max Relative dP-A,
        Mean Relative Roll, Mean Relative Pitch,Mean Relative Yaw, Mean Relative dS-I,
        Mean Relative dL-R, Mean Relative dP-A, Max Abs Roll, Max Abs Pitch, Max Abs Yaw,
        Max Abs dS-I, Max Abs dL-R, Max Abs dP-A, Mean Abs Roll,Mean Abs Pitch,Mean Abs Yaw,
        Mean Abs dS-I,Mean Abs dL-R,Mean Abs dP-A

    Parameters
    ----------
    wf_name: workflow object
        Workflow name

    Returns
    -------
    param_wf: workflow object
          Workflow object containing various movement/motion and power parameters estimates.

    Nipype inputs
    -------------
    inputspec.motion_correct : string (func/rest file or a list of func/rest nifti file)
        Path to motion corrected functional data

    inputspec.mask : string (nifti file)
        Path to field contianing brain-only mask for the functional data

    inputspec.max_displacement : string (Mat file)
        maximum displacement (in mm) vector for brain voxels in each volume.
        This file is obtained in functional preprocessing step

    inputspec.movement_parameters : string (Mat file)
        1D file containing six movement/motion parameters(3 Translation, 3 Rotations)
        in different columns (roll pitch yaw dS  dL  dP), obtained in functional preprocessing step

    scrubbing_input.threshold : a float
        scrubbing threshold

    scrubbing_input.remove_frames_before : an integer
        count of preceding frames to the offending time
        frames to be removed (i.e.,those exceeding FD threshold)

    scrubbing_input.remove_frames_after : an integer
        count of subsequent frames to the offending time
        frames to be removed (i.e., those exceeding FD threshold)

    Nipype outputs
    --------------
    outputspec.FD_1D : 1D file
        mean Framewise Displacement (FD)

    outputspec.frames_ex_1D : 1D file
        Number of frames that would be censored ("scrubbed")
        also removing the offending time frames (i.e., those exceeding the threshold),
        the preceeding frame, and the two subsequent frames

    outputspec.frames_in_1D : 1d file
        Number of frames left after removing for scrubbing

    outputspec.power_params : txt file
        Text file various power parameters for scrubbing.

    outputspec.motion_params : txt file
       Text file containing various movement parameters

    References
    ----------
    .. [1] Power, J. D., Barnes, K. A., Snyder, A. Z., Schlaggar, B. L., & Petersen, S. E. (2012). Spurious
           but systematic correlations in functional connectivity MRI networks arise from subject motion. NeuroImage, 59(3),
           2142-2154. doi:10.1016/j.neuroimage.2011.10.018

    .. [2] Power, J. D., Barnes, K. A., Snyder, A. Z., Schlaggar, B. L., & Petersen, S. E. (2012). Steps
           toward optimizing motion artifact removal in functional connectivity MRI; a reply to Carp.
           NeuroImage. doi:10.1016/j.neuroimage.2012.03.017

    .. [3] Jenkinson, M., Bannister, P., Brady, M., Smith, S., 2002. Improved optimization for the robust
           and accurate linear registration and motion correction of brain images. Neuroimage 17, 825-841.
    """
    wf = pe.Workflow(name=wf_name)

    # specify input and output fields
    in_fields = [
        "in_files",
        "anat",
        "atlas_anat",
        "coreg_target",
        "tissues",
        "lowpass_freq",
        "highpass_freq",
    ]

    out_fields = [
        "motion_corrected",
        "motion_params",
        "tissues",
        "anat",
        "time_filtered",
        "smooth",
        "time_filtered_mni",
        "smooth_mni",
        "tsnr_file",
        "epi_brain_mask",
        "tissues_brain_mask",
        "motion_regressors",
        "compcor_regressors",
        "gsr_regressors",
        "nuis_corrected",
        "epi_mni",
        "epi_mni_warpfield",
    ]

    inputNode = setup_node(IdentityInterface(fields=[
        'subject_id', 'scan_id', 'movement_parameters', 'max_displacement',
        'motion_correct', 'mask', 'oned_matrix_save'
    ]),
                           name='inputspec')

    scrubbing_input = setup_node(IdentityInterface(
        fields=['threshold', 'remove_frames_before', 'remove_frames_after']),
                                 name='scrubbing_input')

    outputNode = setup_node(IdentityInterface(fields=[
        'FD_1D', 'FDJ_1D', 'frames_ex_1D', 'frames_in_1D', 'power_params',
        'motion_params'
    ]),
                            name='outputspec')

    # calculate mean DVARS
    cal_DVARS = setup_node(Function(input_names=['in_file', 'mask'],
                                    output_names=['out_file'],
                                    function=calculate_DVARS),
                           name='cal_DVARS')
    wf.connect(inputNode, 'motion_correct', cal_DVARS, 'rest')
    wf.connect(inputNode, 'mask', cal_DVARS, 'mask')

    # Calculating mean Framewise Displacement as per power et al., 2012
    calculate_FD = setup_node(Function(input_names=['in_file'],
                                       output_names=['out_file'],
                                       function=calculate_FD_P),
                              name='calculate_FD')

    wf.connect(inputNode, 'movement_parameters', calculate_FD, 'in_file')
    wf.connect(calculate_FD, 'out_file', outputNode, 'FD_1D')

    # Calculating mean Framewise Displacement as per jenkinson et al., 2002
    calculate_FDJ = setup_node(Function(input_names=['in_file'],
                                        output_names=['out_file'],
                                        function=calculate_FD_J),
                               name='calculate_FDJ')

    wf.connect(inputNode, 'oned_matrix_save', calculate_FDJ, 'in_file')
    wf.connect(calculate_FDJ, 'out_file', outputNode, 'FDJ_1D')

    ##calculating frames to exclude and include after scrubbing
    exclude_frames = setup_node(Function(
        input_names=['in_file', 'threshold', 'frames_before', 'frames_after'],
        output_names=['out_file'],
        function=set_frames_ex),
                                name='exclude_frames')

    wf.connect(calculate_FD, 'out_file', exclude_frames, 'in_file')
    wf.connect(scrubbing_input, 'threshold', exclude_frames, 'threshold')
    wf.connect(scrubbing_input, 'remove_frames_before', exclude_frames,
               'frames_before')
    wf.connect(scrubbing_input, 'remove_frames_after', exclude_frames,
               'frames_after')
    wf.connect(exclude_frames, 'out_file', outputNode, 'frames_ex_1D')

    include_frames = setup_node(Function(
        input_names=['in_file', 'threshold', 'exclude_list'],
        output_names=['out_file'],
        function=set_frames_in),
                                name='include_frames')

    wf.connect(calculate_FD, 'out_file', include_frames, 'in_file')
    wf.connect(scrubbing_input, 'threshold', include_frames, 'threshold')
    wf.connect(exclude_frames, 'out_file', include_frames, 'exclude_list')
    wf.connect(include_frames, 'out_file', outputNode, 'frames_in_1D')

    calc_motion_parameters = setup_node(Function(
        input_names=[
            "subject_id", "scan_id", "movement_parameters", "max_displacement"
        ],
        output_names=['out_file'],
        function=gen_motion_parameters),
                                        name='calc_motion_parameters')
    wf.connect(inputNode, 'subject_id', calc_motion_parameters, 'subject_id')
    wf.connect(inputNode, 'scan_id', calc_motion_parameters, 'scan_id')
    wf.connect(inputNode, 'movement_parameters', calc_motion_parameters,
               'movement_parameters')
    wf.connect(inputNode, 'max_displacement', calc_motion_parameters,
               'max_displacement')
    wf.connect(calc_motion_parameters, 'out_file', outputNode, 'motion_params')

    calc_power_parameters = setup_node(Function(input_names=[
        "subject_id", "scan_id", "FD_1D", "FDJ_1D", "threshold", "DVARS"
    ],
                                                output_names=['out_file'],
                                                function=gen_power_parameters),
                                       name='calc_power_parameters')
    wf.connect(inputNode, 'subject_id', calc_power_parameters, 'subject_id')
    wf.connect(inputNode, 'scan_id', calc_power_parameters, 'scan_id')
    wf.connect(cal_DVARS, 'out_file', calc_power_parameters, 'DVARS')
    wf.connect(calculate_FD, 'out_file', calc_power_parameters, 'FD_1D')
    wf.connect(calculate_FDJ, 'out_file', calc_power_parameters, 'FDJ_1D')
    wf.connect(scrubbing_input, 'threshold', calc_power_parameters,
               'threshold')

    wf.connect(calc_power_parameters, 'out_file', outputNode, 'power_params')

    return wf
Beispiel #9
0
def create_epi_t1_nonlinear_pipeline(name='epi_t1_nonlinear'):
    """Creates a pipeline that performs nonlinear EPI to T1 registration using 
    the antsRegistration tool. Beforehand, the T1 image has to be processed in 
    freesurfer and the EPI timeseries should be realigned.

    Example
    -------

    >>> nipype_epi_t1_nonlin = create_epi_t1_nonlinear_pipeline('nipype_epi_t1_nonlin')
    >>> nipype_epi_t1_nonlin.inputs.inputnode.fs_subject_id = '123456'
    >>> nipype_epi_t1_nonlin.inputs.inputnode.fs_subjects_dir = '/project/data/freesurfer'
    >>> nipype_epi_t1_nonlin.inputs.inputnode.realigned_epi = 'mcflirt.nii.gz'
    >>> nipype_epi_t1_nonlin.run()

    Inputs::

        inputnode.fs_subject_id    # subject id used in freesurfer
        inputnode.fs_subjects_dir  # path to freesurfer output
        inputnode.realigned_epi    # realigned EPI timeseries

    Outputs::

        outputnode.lin_epi2anat     # ITK format
        outputnode.lin_anat2epi     # ITK format
        outputnode.nonlin_epi2anat  # ANTs specific 5D deformation field
        outputnode.nonlin_anat2epi  # ANTs specific 5D deformation field

    """

    nonreg = Workflow(name='epi_t1_nonlinear')

    # input
    inputnode = Node(interface=util.IdentityInterface(
        fields=['fs_subject_id', 'fs_subjects_dir', 'realigned_epi']),
                     name='inputnode')

    # calculate the temporal mean image of the realigned timeseries
    tmean = Node(interface=fsl.maths.MeanImage(dimension='T',
                                               output_type='NIFTI_GZ'),
                 name='tmean')

    nonreg.connect(inputnode, 'realigned_epi', tmean, 'in_file')

    # import brain.mgz and ribbon.mgz from freesurfer directory
    fs_import = Node(interface=nio.FreeSurferSource(),
                     name='freesurfer_import')

    nonreg.connect(inputnode, 'fs_subjects_dir', fs_import, 'subjects_dir')
    nonreg.connect(inputnode, 'fs_subject_id', fs_import, 'subject_id')

    # convert brain.mgz to niigz
    mriconvert = Node(interface=fs.MRIConvert(out_type='niigz'),
                      name='mriconvert')

    nonreg.connect(fs_import, 'brain', mriconvert, 'in_file')

    # calculate rigid transformation of mean epi to t1 with bbregister
    bbregister = Node(interface=fs.BBRegister(init='fsl',
                                              contrast_type='t2',
                                              out_fsl_file=True),
                      name='bbregister')

    nonreg.connect(inputnode, 'fs_subjects_dir', bbregister, 'subjects_dir')
    nonreg.connect(inputnode, 'fs_subject_id', bbregister, 'subject_id')
    nonreg.connect(tmean, 'out_file', bbregister, 'source_file')

    # convert linear transformation to itk format compatible with ants
    itk = Node(interface=c3.C3dAffineTool(fsl2ras=True,
                                          itk_transform='epi2anat_affine.txt'),
               name='itk')

    nonreg.connect(tmean, 'out_file', itk, 'source_file')
    nonreg.connect(mriconvert, 'out_file', itk, 'reference_file')
    nonreg.connect(bbregister, 'out_fsl_file', itk, 'transform_file')

    # get aparc aseg mask
    # create brainmask from aparc+aseg
    def get_aparc_aseg(files):
        for name in files:
            if 'aparc+aseg' in name:
                return name

    aparc_aseg_mask = Node(fs.Binarize(min=0.1,
                                       dilate=10,
                                       erode=7,
                                       out_type='nii.gz',
                                       binary_file='aparc_aseg_mask.nii.gz'),
                           name='aparc_aseg_mask')

    # fill holes in mask
    fillholes = Node(fsl.maths.MathsCommand(args='-fillh'), name='fillholes')

    nonreg.connect([(fs_import, aparc_aseg_mask, [
        (('aparc_aseg', get_aparc_aseg), 'in_file')
    ]), (aparc_aseg_mask, fillholes, [('binary_file', 'in_file')])])

    #create bounding box mask and rigidly transform into anatomical (fs) space
    fov = Node(interface=fs.model.Binarize(min=0.0, out_type='nii.gz'),
               name='fov')

    nonreg.connect(tmean, 'out_file', fov, 'in_file')

    fov_trans = Node(interface=ants.resampling.ApplyTransforms(
        dimension=3, interpolation='NearestNeighbor'),
                     name='fov_trans')

    nonreg.connect(itk, ('itk_transform', filename_to_list), fov_trans,
                   'transforms')
    nonreg.connect(fov, 'binary_file', fov_trans, 'input_image')
    nonreg.connect(fillholes, 'out_file', fov_trans, 'reference_image')
    #nonreg.connect(ribbon, 'binary_file', fov_trans, 'reference_image')

    # intersect both masks
    intersect = Node(interface=fsl.maths.BinaryMaths(operation='mul'),
                     name='intersect')

    nonreg.connect(fillholes, 'out_file', intersect, 'in_file')
    #nonreg.connect(ribbon, 'binary_file', intersect, 'in_file')
    nonreg.connect(fov_trans, 'output_image', intersect, 'operand_file')

    # inversly transform mask and mask original epi
    mask_trans = Node(interface=ants.resampling.ApplyTransforms(
        dimension=3,
        interpolation='NearestNeighbor',
        invert_transform_flags=[True]),
                      name='mask_trans')

    nonreg.connect(itk, ('itk_transform', filename_to_list), mask_trans,
                   'transforms')
    nonreg.connect(intersect, 'out_file', mask_trans, 'input_image')
    nonreg.connect(tmean, 'out_file', mask_trans, 'reference_image')

    maskepi = Node(interface=fs.utils.ApplyMask(), name='maskepi')

    nonreg.connect(mask_trans, 'output_image', maskepi, 'mask_file')
    nonreg.connect(tmean, 'out_file', maskepi, 'in_file')

    # mask anatomical image (brain)
    maskanat = Node(interface=fs.utils.ApplyMask(), name='maskanat')

    nonreg.connect(intersect, 'out_file', maskanat, 'mask_file')
    nonreg.connect(mriconvert, 'out_file', maskanat, 'in_file')

    # invert masked anatomical image
    anat_min_max = Node(interface=fsl.utils.ImageStats(op_string='-R'),
                        name='anat_min_max')
    epi_min_max = Node(interface=fsl.utils.ImageStats(op_string='-r'),
                       name='epi_min_max')

    nonreg.connect(maskanat, 'out_file', anat_min_max, 'in_file')
    nonreg.connect(tmean, 'out_file', epi_min_max, 'in_file')

    def calc_inversion(anat_min_max, epi_min_max):
        mul = -(epi_min_max[1] - epi_min_max[0]) / (anat_min_max[1] -
                                                    anat_min_max[0])
        add = abs(anat_min_max[1] * mul) + epi_min_max[0]
        return mul, add

    calcinv = Node(interface=Function(
        input_names=['anat_min_max', 'epi_min_max'],
        output_names=['mul', 'add'],
        function=calc_inversion),
                   name='calcinv')

    nonreg.connect(anat_min_max, 'out_stat', calcinv, 'anat_min_max')
    nonreg.connect(epi_min_max, 'out_stat', calcinv, 'epi_min_max')

    mulinv = Node(interface=fsl.maths.BinaryMaths(operation='mul'),
                  name='mulinv')
    addinv = Node(interface=fsl.maths.BinaryMaths(operation='add'),
                  name='addinv')

    nonreg.connect(maskanat, 'out_file', mulinv, 'in_file')
    nonreg.connect(calcinv, 'mul', mulinv, 'operand_value')
    nonreg.connect(mulinv, 'out_file', addinv, 'in_file')
    nonreg.connect(calcinv, 'add', addinv, 'operand_value')

    # nonlinear transformation of masked anat to masked epi with ants
    antsreg = Node(interface=ants.registration.Registration(
        dimension=3,
        invert_initial_moving_transform=True,
        metric=['CC'],
        metric_weight=[1.0],
        radius_or_number_of_bins=[4],
        sampling_strategy=['None'],
        transforms=['SyN'],
        args='-g .1x1x.1',
        transform_parameters=[(0.10, 3, 0)],
        number_of_iterations=[[10, 5]],
        convergence_threshold=[1e-06],
        convergence_window_size=[10],
        shrink_factors=[[2, 1]],
        smoothing_sigmas=[[1, 0.5]],
        sigma_units=['vox'],
        use_estimate_learning_rate_once=[True],
        use_histogram_matching=[True],
        collapse_output_transforms=True,
        output_inverse_warped_image=True,
        output_warped_image=True),
                   name='antsreg')

    nonreg.connect(itk, 'itk_transform', antsreg, 'initial_moving_transform')
    nonreg.connect(maskepi, 'out_file', antsreg, 'fixed_image')
    nonreg.connect(addinv, 'out_file', antsreg, 'moving_image')

    # output

    def second_element(file_list):
        return file_list[1]

    def first_element(file_list):
        return file_list[0]

    outputnode = Node(interface=util.IdentityInterface(fields=[
        'lin_epi2anat', 'lin_anat2epi', 'nonlin_epi2anat', 'nonlin_anat2epi'
    ]),
                      name='outputnode')

    nonreg.connect(itk, 'itk_transform', outputnode, 'lin_epi2anat')
    nonreg.connect(antsreg, ('forward_transforms', first_element), outputnode,
                   'lin_anat2epi')
    nonreg.connect(antsreg, ('forward_transforms', second_element), outputnode,
                   'nonlin_anat2epi')
    nonreg.connect(antsreg, ('reverse_transforms', second_element), outputnode,
                   'nonlin_epi2anat')

    return nonreg
Beispiel #10
0
def attach_concat_canica(main_wf, wf_name="canica", **kwargs):
    """ Attach a Concat and a nilearn CanICA interface to `main_wf`.

    The Concat node will merge all the files together in one 4D volume before delivering it to CanICA.

    Parameters
    ----------
    main_wf: nipype Workflow

    wf_name: str
        Name of the preprocessing workflow

    kwargs: dict[str]->str
        input_node: str
            Name of the input node from where to connect the source `input_connect`.

        input_connection: str
            Name of the connection to obtain the source files.

    Nipype Inputs for `main_wf`
    ---------------------------
    datasink: nipype Node

    Returns
    -------
    main_wf: nipype Workflow
    """
    # Dependency workflows
    srcwf_name   = kwargs['input_node']
    srcconn_name = kwargs['input_connection']

    src_wf   = main_wf.get_node(srcwf_name)
    datasink = get_datasink(main_wf, name='datasink')

    base_outdir  = datasink.inputs.base_directory
    ica_datasink = pe.Node(DataSink(parameterization=False,
                                    base_directory=base_outdir,),
                           name="ica_datasink".format(wf_name))
    ica_datasink.inputs.container = 'ica_{}'.format(wf_name)

    # the list of the raw pet subjects
    ica_subjs = pe.JoinNode(interface=IdentityInterface(fields=["ica_subjs"]),
                            joinsource="infosrc",
                            joinfield="ica_subjs",
                            name="ica_subjs")

    # concat images
    concat = setup_node(Function(function=concat_3D_imgs,
                                 input_names=["in_files"],
                                 output_names=["out_file"],
                                 imports=['from pypes.interfaces.nilearn import ni2file']),
                        name="concat")

    # warp each subject to the group template
    ica = setup_node(CanICAInterface(), name="{}_ica".format(wf_name),)
    algorithm = get_config_setting("{}_ica.algorithm".format(wf_name),
                                   default=get_config_setting('canica.algorithm',
                                   default=''))
    if algorithm:
        ica.inputs.algorithm = algorithm

    # Connect the nodes
    main_wf.connect([
                     # file list input
                     (src_wf, ica_subjs, [(srcconn_name, "ica_subjs")]),

                     # concat images
                     (ica_subjs, concat, [("ica_subjs", "in_files")]),

                     # canica
                     (concat, ica, [(("out_file", _check_list), "in_files")]),

                     # canica output
                     (ica, ica_datasink, [("components", "@components"),
                                          ("loadings",   "@loadings"),
                                          ("score",      "@score"),
                                         ]),
                   ])

    # plot the ICA results?
    do_plot = get_config_setting('canica_extra.plot', default=True)
    if not do_plot:
        return main_wf

    # get the plot threshold from the ICA node or the config file (in that order).
    plot_thr = get_config_setting('canica_extra.plot_thr', default=0)
    plot_thr = get_trait_value(ica.inputs, 'threshold', default=plot_thr)

    # plto ica results images
    plot_ica = setup_node(Function(function=plot_ica_results,
                                   input_names=["ica_result", "application", "mask_file", "zscore", "bg_img"],
                                   output_names=["all_icc_plot", "iccs_plot", "sliced_ic_plots"],),
                          name="plot_ica")
    plot_ica.inputs.zscore      = plot_thr
    plot_ica.inputs.mask_file   = get_trait_value(ica.inputs, 'mask')
    plot_ica.inputs.application = 'nilearn'

    # Connect the plotting nodes
    main_wf.connect([
                     # canica
                     (ica,   plot_ica,        [("components",   "ica_result")]),

                     # canica output
                     (plot_ica, ica_datasink, [("all_icc_plot",     "@all_icc_plot"),
                                               ("iccs_plot",        "@iccs_plot"),
                                               ("sliced_ic_plots",  "@sliced_ic_plots"),
                                              ]),
                     ])

    return main_wf