def mod_smooth(in_file, mask_file, fwhm, smooth_type, reg_file, surface_fwhm, subjects_dir=None):
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.freesurfer as fs
    import os
    if smooth_type == 'susan':
        if fwhm == 0:
            return in_file
        smooth = create_susan_smooth()
        smooth.base_dir = os.getcwd()
        smooth.inputs.inputnode.fwhm = fwhm
        smooth.inputs.inputnode.mask_file = mask_file
        smooth.inputs.inputnode.in_file = in_file
        res = smooth.run()
        smoothed_file = res.outputs.outputnode.smoothed_files
    elif smooth_type=='isotropic':
        if fwhm == 0:
            return in_file
        smooth = fsl.IsotropicSmooth()
        smooth.inputs.in_file = in_file
        smooth.inputs.fwhm = fwhm
        res = smooth.run()
        smoothed_file = res.outputs.out_file
    elif smooth_type == 'freesurfer':
        if fwhm == 0 and surface_fwhm == 0:
            return in_file
        smooth = fs.Smooth()
        smooth.inputs.reg_file = reg_file
        smooth.inputs.in_file = in_file
        smooth.inputs.surface_fwhm = surface_fwhm
        smooth.inputs.vol_fwhm = fwhm
        smooth.inputs.proj_frac_avg = (0.0,1.0,0.1)
        smooth.inputs.subjects_dir = subjects_dir
        res = smooth.run()
        smoothed_file = res.outputs.smoothed_file
    return smoothed_file
Esempio n. 2
0
def mod_smooth(in_file, mask_file, fwhm, smooth_type):
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.freesurfer as fs
    import os
    if smooth_type == 'susan':
        if fwhm == 0:
            return in_file
        smooth = create_susan_smooth()
        smooth.base_dir = out_dir  #os.getcwd()
        smooth.inputs.inputnode.fwhm = fwhm
        smooth.inputs.inputnode.mask_file = mask_file
        smooth.inputs.inputnode.in_files = in_file
        #smooth.outputs.outputnode.smoothed_files='/jukebox/norman/jantony/surprisesuspense/data/bids/Norman/Antony/ss/derivatives/firstlevel/sub-02/ses-01/sub-02_ses-01_task-recall_run-01_space-MNI152NLin2009cAsym_desc-preproc_bold_trim3TRs_smooth.nii.gz'
        res = smooth.run()
        smoothed_file = [
        ]  #smoothed_file = res.outputs.outputnode.smoothed_files
    return smoothed_file
Esempio n. 3
0
def mod_smooth(in_file,
               mask_file,
               fwhm,
               smooth_type,
               reg_file,
               surface_fwhm,
               subjects_dir=None):
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.freesurfer as fs
    import os
    if smooth_type == 'susan':
        if fwhm == 0:
            return in_file
        smooth = create_susan_smooth()
        smooth.base_dir = os.getcwd()
        smooth.inputs.inputnode.fwhm = fwhm
        smooth.inputs.inputnode.mask_file = mask_file
        smooth.inputs.inputnode.in_file = in_file
        res = smooth.run()
        smoothed_file = res.outputs.outputnode.smoothed_files
    elif smooth_type == 'isotropic':
        if fwhm == 0:
            return in_file
        smooth = fsl.IsotropicSmooth()
        smooth.inputs.in_file = in_file
        smooth.inputs.fwhm = fwhm
        res = smooth.run()
        smoothed_file = res.outputs.out_file
    elif smooth_type == 'freesurfer':
        if fwhm == 0 and surface_fwhm == 0:
            return in_file
        smooth = fs.Smooth()
        smooth.inputs.reg_file = reg_file
        smooth.inputs.in_file = in_file
        smooth.inputs.surface_fwhm = surface_fwhm
        smooth.inputs.vol_fwhm = fwhm
        smooth.inputs.proj_frac_avg = (0.0, 1.0, 0.1)
        smooth.inputs.subjects_dir = subjects_dir
        res = smooth.run()
        smoothed_file = res.outputs.smoothed_file
    return smoothed_file
Esempio n. 4
0
## adding node for the saveScrub functions
svScrub = pe.Node(util.Function(input_names=['regressors_file', 'thr'],
                                output_names=['perFile'],
                                function=saveScrub),
                  name='svScrub')

svScrub.inputs.thr = thr
#%%
skip = pe.Node(interface=fsl.ExtractROI(), name='skip')
skip.inputs.t_min = removeTR
skip.inputs.t_size = lastTR

#%%

susan = create_susan_smooth()
susan.inputs.inputnode.fwhm = fwhm


#%%
def changeTostring(arr):
    return arr[0]


changeTosrting = pe.Node(name="changeToString",
                         interface=Function(input_names=['arr'],
                                            output_names=['arr'],
                                            function=changeTostring))
#%%
modelfit = pe.Workflow(name='fsl_fit', base_dir=output_dir)
"""
Esempio n. 5
0
             'inputspec.rest_mask')

########################################################################################################################
# get_zscore
#from CPAC.utils import get_zscore

zscore_reho = cent.get_zscore('reho', wf_name='Ztrans_reho')
pipe.connect(reho, 'outputspec.raw_reho_map', zscore_reho,
             'inputspec.input_file')
pipe.connect(preproc, 'outputspec.func_brain_mask', zscore_reho,
             'inputspec.mask_file')

########################################################################################################################
# smooth reho maps

smooth = prpc.create_susan_smooth("smooth_reho")
smooth.inputs.inputnode.fwhm = 12.
#smooth.get_node( "smooth").iterables=[('fwhm', [4., 6., 8., 10., 12., 14.])] #TODO: to sigma???

pipe.connect(reho, 'outputspec.raw_reho_map', smooth, 'inputnode.in_files')
pipe.connect(preproc, 'outputspec.func_brain_mask', smooth,
             'inputnode.mask_file')

########################################################################################################################
# smooth reho Z maps

smoothz = prpc.create_susan_smooth("smooth_reho_z")
smoothz.inputs.inputnode.fwhm = 12.
#smooth.get_node( "smooth").iterables=[('fwhm', [4., 6., 8., 10., 12., 14.])] #TODO: to sigma???

pipe.connect(zscore_reho, 'outputspec.z_score_img', smoothz,
Esempio n. 6
0
def create_workflow():
    featpreproc = pe.Workflow(name="featpreproc")

    featpreproc.base_dir = os.path.join(ds_root, 'workingdirs')

    # ===================================================================
    #                  _____                   _
    #                 |_   _|                 | |
    #                   | |  _ __  _ __  _   _| |_
    #                   | | | '_ \| '_ \| | | | __|
    #                  _| |_| | | | |_) | |_| | |_
    #                 |_____|_| |_| .__/ \__,_|\__|
    #                             | |
    #                             |_|
    # ===================================================================

    # ------------------ Specify variables
    inputnode = pe.Node(
        niu.IdentityInterface(fields=[
            'funcs',
            'subject_id',
            'session_id',
            'fwhm',  # smoothing
            'highpass'
        ]),
        name="inputspec")

    # SelectFiles
    templates = {
        'ref_manual_fmapmask':  # was: manual_fmapmask
        'derivatives/manual-masks/sub-eddy/ses-20170511/fmap/'
            'sub-eddy_ses-20170511_magnitude1_res-1x1x1_manualmask.nii.gz',

        'ref_fmap_magnitude':
        'derivatives/manual-masks/sub-eddy/ses-20170511/fmap/'
            'sub-eddy_ses-20170511_magnitude1_res-1x1x1_reference.nii.gz',

        'ref_fmap_phasediff':
        'derivatives/resampled-isotropic-1mm/sub-eddy/ses-20170511/fmap/'
            'sub-eddy_ses-20170511_phasediff_res-1x1x1_preproc'
            '.nii.gz',

        # 'manualweights':
        # 'manual-masks/sub-eddy/ses-20170511/func/'
        #     'sub-eddy_ses-20170511_task-curvetracing_run-01_frame-50_bold'
        #     '_res-1x1x1_manualweights.nii.gz',

        'ref_func':  # was: manualmask_func_ref
        'derivatives/manual-masks/sub-eddy/ses-20170607/func/'
            'sub-eddy_ses-20170607_task-RestingPRF_run-02_bold_'
            'res-1x1x1_fnirt_reference.nii.gz',

        'ref_funcmask':  # was: manualmask
        'derivatives/manual-masks/sub-eddy/ses-20170607/func/'
            'sub-eddy_ses-20170607_task-RestingPRF_run-02_bold_'
            'res-1x1x1_fnirt_mask.nii.gz',

        'ref_t1':
        'derivatives/manual-masks/sub-eddy/ses-20170511/anat/'
            'sub-eddy_ses-20170511_T1w_res-1x1x1_reference.nii.gz',

        'ref_t1mask':
        'derivatives/manual-masks/sub-eddy/ses-20170511/anat/'
            'sub-eddy_ses-20170511_T1w_res-1x1x1_manualmask.nii.gz',

        # 'funcs':
        # 'resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/func/'
        #     # 'sub-{subject_id}_ses-{session_id}*_bold_res-1x1x1_preproc'
        #     'sub-{subject_id}_ses-{session_id}*run-01_bold_res-1x1x1_preproc'
        #     # '.nii.gz',
        #     '_nvol10.nii.gz',

        'fmap_phasediff':
        'derivatives/resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/fmap/'
            'sub-{subject_id}_ses-{session_id}_phasediff_res-1x1x1_preproc'
            '.nii.gz',

        'fmap_magnitude':
        'derivatives/resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/fmap/'
            'sub-{subject_id}_ses-{session_id}_magnitude1_res-1x1x1_preproc'
            '.nii.gz',

        # 'fmap_mask':
        # 'transformed-manual-fmap-mask/sub-{subject_id}/ses-{session_id}/fmap/'
        #     'sub-{subject_id}_ses-{session_id}_'
        #     'magnitude1_res-1x1x1_preproc.nii.gz',
    }

    inputfiles = pe.Node(nio.SelectFiles(templates, base_directory=data_dir),
                         name="input_files")

    featpreproc.connect([(inputnode, inputfiles, [
        ('subject_id', 'subject_id'),
        ('session_id', 'session_id'),
    ])])

    # ===================================================================
    #                   ____        _               _
    #                  / __ \      | |             | |
    #                 | |  | |_   _| |_ _ __  _   _| |_
    #                 | |  | | | | | __| '_ \| | | | __|
    #                 | |__| | |_| | |_| |_) | |_| | |_
    #                  \____/ \__,_|\__| .__/ \__,_|\__|
    #                                  | |
    #                                  |_|
    # ===================================================================

    # ------------------ Output Files
    # Datasink
    outputfiles = pe.Node(nio.DataSink(base_directory=ds_root,
                                       container='derivatives/featpreproc',
                                       parameterization=True),
                          name="output_files")

    # Use the following DataSink output substitutions
    # each tuple is only matched once per file
    outputfiles.inputs.substitutions = [
        ('/_mc_method_afni3dAllinSlices/', '/'),
        ('/_mc_method_afni3dAllinSlices/', '/'),  # needs to appear twice
        ('/oned_file/', '/'),
        ('/out_file/', '/'),
        ('/oned_matrix_save/', '/'),
        ('subject_id_', 'sub-'),
        ('session_id_', 'ses-'),
    ]
    # Put result into a BIDS-like format
    outputfiles.inputs.regexp_substitutions = [
        (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1'),
        (r'/_addmean[0-9]+/', r'/func/'),
        (r'/_funcbrains[0-9]+/', r'/func/'),
        (r'/_maskfunc[0-9]+/', r'/func/'),
        (r'/_mc[0-9]+/', r'/func/'),
        (r'/_meanfunc[0-9]+/', r'/func/'),
        (r'/_outliers[0-9]+/', r'/func/'),
        (r'_run_id_[0-9][0-9]', r''),
    ]
    outputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'motion_parameters',
        'motion_corrected',
        'motion_plots',
        'motion_outlier_files',
        'mask',
        'smoothed_files',
        'highpassed_files',
        'mean',
        'func_unwarp',
        'ref_func',
        'ref_funcmask',
        'ref_t1',
        'ref_t1mask',
    ]),
                         name='outputspec')

    # ===================================================================
    #                  _____ _            _ _
    #                 |  __ (_)          | (_)
    #                 | |__) | _ __   ___| |_ _ __   ___
    #                 |  ___/ | '_ \ / _ \ | | '_ \ / _ \
    #                 | |   | | |_) |  __/ | | | | |  __/
    #                 |_|   |_| .__/ \___|_|_|_| |_|\___|
    #                         | |
    #                         |_|
    # ===================================================================

    #  ~|~ _ _  _  _ |` _  _ _ _    _ _  _  _|  _
    #   | | (_|| |_\~|~(_)| | | |  | | |(_|_\|<_\
    #
    # Transform manual skull-stripped masks to multiple images
    # --------------------------------------------------------
    # should just be used as input to motion correction,
    # after mc, all functionals should be aligned to reference
    transmanmask_mc = transform_manualmask.create_workflow()

    # - - - - - - Connections - - - - - - -
    featpreproc.connect([(inputfiles, transmanmask_mc, [
        ('subject_id', 'in.subject_id'),
        ('session_id', 'in.session_id'),
    ])])

    featpreproc.connect(inputfiles, 'ref_funcmask', transmanmask_mc,
                        'in.manualmask')
    featpreproc.connect(inputnode, 'funcs', transmanmask_mc, 'in.funcs')
    featpreproc.connect(inputfiles, 'ref_func', transmanmask_mc,
                        'in.manualmask_func_ref')

    # fieldmaps not being used
    if False:
        trans_fmapmask = transmanmask_mc.clone('trans_fmapmask')
        featpreproc.connect(inputfiles, 'ref_manual_fmapmask', trans_fmapmask,
                            'in.manualmask')
        featpreproc.connect(inputfiles, 'fmap_magnitude', trans_fmapmask,
                            'in.funcs')
        featpreproc.connect(inputfiles, 'ref_func', trans_fmapmask,
                            'in.manualmask_func_ref')

    #  |\/| _ _|_. _  _    _ _  _ _ _  __|_. _  _
    #  |  |(_) | |(_)| |  (_(_)| | (/_(_ | |(_)| |
    #
    # Perform motion correction, using some pipeline
    # --------------------------------------------------------
    # mc = motioncorrection_workflow.create_workflow_afni()

    # Register an image from the functionals to the reference image
    median_func = pe.MapNode(
        interface=fsl.maths.MedianImage(dimension="T"),
        name='median_func',
        iterfield=('in_file'),
    )
    pre_mc = motioncorrection_workflow.create_workflow_allin_slices(
        name='premotioncorrection')

    featpreproc.connect([
        (inputnode, median_func, [
            ('funcs', 'in_file'),
        ]),
        (median_func, pre_mc, [
            ('out_file', 'in.funcs'),
        ]),
        (
            inputfiles,
            pre_mc,
            [
                # median func image will be used a reference / base
                ('ref_func', 'in.ref_func'),
                ('ref_funcmask', 'in.ref_func_weights'),
            ]),
        (
            transmanmask_mc,
            pre_mc,
            [
                ('funcreg.out_file', 'in.funcs_masks'),  # use mask as weights
            ]),
        (pre_mc, outputnode, [
            ('mc.out_file', 'pre_motion_corrected'),
            ('mc.oned_file', 'pre_motion_parameters.oned_file'),
            ('mc.oned_matrix_save', 'pre_motion_parameters.oned_matrix_save'),
        ]),
        (
            outputnode,
            outputfiles,
            [
                ('pre_motion_corrected', 'pre_motion_corrected.out_file'),
                ('pre_motion_parameters.oned_file',
                 'pre_motion_corrected.oned_file'
                 ),  # warp parameters in ASCII (.1D)
                ('pre_motion_parameters.oned_matrix_save',
                 'pre_motion_corrected.oned_matrix_save'
                 ),  # transformation matrices for each sub-brick
            ]),
    ])

    mc = motioncorrection_workflow.create_workflow_allin_slices(
        name='motioncorrection',
        iterfield=('in_file', 'ref_file', 'in_weight_file'))
    # - - - - - - Connections - - - - - - -
    featpreproc.connect([
        (inputnode, mc, [
            ('funcs', 'in.funcs'),
        ]),
        (
            pre_mc,
            mc,
            [
                # the median image realigned to the reference functional will serve as reference
                #  this way motion correction is done to an image more similar to the functionals
                ('mc.out_file', 'in.ref_func'),
            ]),
        (
            inputfiles,
            mc,
            [
                # Check and make sure the ref func mask is close enough to the registered median
                # image.
                ('ref_funcmask', 'in.ref_func_weights'),
            ]),
        (
            transmanmask_mc,
            mc,
            [
                ('funcreg.out_file', 'in.funcs_masks'),  # use mask as weights
            ]),
        (mc, outputnode, [
            ('mc.out_file', 'motion_corrected'),
            ('mc.oned_file', 'motion_parameters.oned_file'),
            ('mc.oned_matrix_save', 'motion_parameters.oned_matrix_save'),
        ]),
        (
            outputnode,
            outputfiles,
            [
                ('motion_corrected', 'motion_corrected.out_file'),
                ('motion_parameters.oned_file', 'motion_corrected.oned_file'
                 ),  # warp parameters in ASCII (.1D)
                ('motion_parameters.oned_matrix_save',
                 'motion_corrected.oned_matrix_save'
                 ),  # transformation matrices for each sub-brick
            ]),
    ])

    #  |~. _ | _| _ _  _  _    _ _  _ _ _  __|_. _  _
    #  |~|(/_|(_|| | |(_||_)  (_(_)| | (/_(_ | |(_)| |
    #                    |
    # Unwarp EPI distortions
    # --------------------------------------------------------

    # Performing motion correction to a reference that is undistorted,
    # so b0_unwarp is currently not needed
    if False:
        b0_unwarp = undistort_workflow.create_workflow()

        featpreproc.connect([
            (
                inputfiles,
                b0_unwarp,
                [  # ('subject_id', 'in.subject_id'),
                    # ('session_id', 'in.session_id'),
                    ('fmap_phasediff', 'in.fmap_phasediff'),
                    ('fmap_magnitude', 'in.fmap_magnitude'),
                ]),
            (mc, b0_unwarp, [
                ('mc.out_file', 'in.funcs'),
            ]),
            (transmanmask_mc, b0_unwarp, [
                ('funcreg.out_file', 'in.funcmasks'),
            ]),
            (trans_fmapmask, b0_unwarp, [('funcreg.out_file', 'in.fmap_mask')
                                         ]),
            (b0_unwarp, outputfiles, [
                ('out.funcs', 'func_unwarp.funcs'),
                ('out.funcmasks', 'func_unwarp.funcmasks'),
            ]),
            (b0_unwarp, outputnode, [
                ('out.funcs', 'func_unwarp.funcs'),
                ('out.funcmasks', 'mask'),
            ]),
        ])

    # undistort the reference images
    if False:
        b0_unwarp_ref = b0_unwarp.clone('b0_unwarp_ref')
        featpreproc.connect([
            (
                inputfiles,
                b0_unwarp_ref,
                [  # ('subject_id', 'in.subject_id'),
                    # ('session_id', 'in.session_id'),
                    ('ref_fmap_phasediff', 'in.fmap_phasediff'),
                    ('ref_fmap_magnitude', 'in.fmap_magnitude'),
                    ('ref_manual_fmapmask', 'in.fmap_mask'),
                    ('ref_func', 'in.funcs'),
                    ('ref_funcmask', 'in.funcmasks'),
                ]),
            (b0_unwarp_ref, outputfiles, [
                ('out.funcs', 'func_unwarp_ref.func'),
                ('out.funcmasks', 'func_unwarp_ref.funcmask'),
            ]),
            (b0_unwarp_ref, outputnode, [
                ('out.funcs', 'ref_func'),
                ('out.funcmasks', 'ref_mask'),
            ]),
        ])
    else:
        featpreproc.connect([
            (inputfiles, outputfiles, [
                ('ref_func', 'reference/func'),
                ('ref_funcmask', 'reference/func_mask'),
            ]),
            (inputfiles, outputnode, [
                ('ref_func', 'ref_func'),
                ('ref_funcmask', 'ref_funcmask'),
            ]),
        ])

    # |~) _  _ . __|_ _  _  _|_ _   |~) _  |` _  _ _  _  _ _
    # |~\(/_(_||_\ | (/_|    | (_)  |~\(/_~|~(/_| (/_| |(_(/_
    #        _|
    # Register all functionals to common reference
    # --------------------------------------------------------
    if False:  # this is now done during motion correction
        # FLIRT cost: intermodal: corratio, intramodal: least squares and normcorr
        reg_to_ref = pe.MapNode(  # intra-modal
            # some runs need to be scaled along the anterior-posterior direction
            interface=fsl.FLIRT(dof=12, cost='normcorr'),
            name='reg_to_ref',
            iterfield=('in_file', 'in_weight'),
        )
        refEPI_to_refT1 = pe.Node(
            # some runs need to be scaled along the anterior-posterior direction
            interface=fsl.FLIRT(dof=12, cost='corratio'),
            name='refEPI_to_refT1',
        )
        # combine func -> ref_func and ref_func -> ref_T1
        reg_to_refT1 = pe.MapNode(
            interface=fsl.ConvertXFM(concat_xfm=True),
            name='reg_to_refT1',
            iterfield=('in_file'),
        )

        reg_funcs = pe.MapNode(
            interface=fsl.preprocess.ApplyXFM(),
            name='reg_funcs',
            iterfield=('in_file', 'in_matrix_file'),
        )
        reg_funcmasks = pe.MapNode(interface=fsl.preprocess.ApplyXFM(),
                                   name='reg_funcmasks',
                                   iterfield=('in_file', 'in_matrix_file'))

        def deref_list(x):
            assert len(x) == 1
            return x[0]

        featpreproc.connect([
            (
                b0_unwarp,
                reg_to_ref,  # --> reg_to_ref, (A)
                [
                    ('out.funcs', 'in_file'),
                    ('out.funcmasks', 'in_weight'),
                ]),
            (b0_unwarp_ref, reg_to_ref, [
                (('out.funcs', deref_list), 'reference'),
                (('out.funcmasks', deref_list), 'ref_weight'),
            ]),
            (
                b0_unwarp_ref,
                refEPI_to_refT1,  # --> refEPI_to_refT1 (B)
                [
                    (('out.funcs', deref_list), 'in_file'),
                    (('out.funcmasks', deref_list), 'in_weight'),
                ]),
            (inputfiles, refEPI_to_refT1, [
                ('ref_t1', 'reference'),
                ('ref_t1mask', 'ref_weight'),
            ]),
            (
                reg_to_ref,
                reg_to_refT1,  # --> reg_to_refT1 (A*B)
                [
                    ('out_matrix_file', 'in_file'),
                ]),
            (refEPI_to_refT1, reg_to_refT1, [
                ('out_matrix_file', 'in_file2'),
            ]),
            (
                reg_to_refT1,
                reg_funcs,  # --> reg_funcs
                [
                    # ('out_matrix_file', 'in_matrix_file'),
                    ('out_file', 'in_matrix_file'),
                ]),
            (b0_unwarp, reg_funcs, [
                ('out.funcs', 'in_file'),
            ]),
            (b0_unwarp_ref, reg_funcs, [
                (('out.funcs', deref_list), 'reference'),
            ]),
            (
                reg_to_refT1,
                reg_funcmasks,  # --> reg_funcmasks
                [
                    # ('out_matrix_file', 'in_matrix_file'),
                    ('out_file', 'in_matrix_file'),
                ]),
            (b0_unwarp, reg_funcmasks, [
                ('out.funcmasks', 'in_file'),
            ]),
            (b0_unwarp_ref, reg_funcmasks, [
                (('out.funcs', deref_list), 'reference'),
            ]),
            (reg_funcs, outputfiles, [
                ('out_file', 'common_ref.func'),
            ]),
            (reg_funcmasks, outputfiles, [
                ('out_file', 'common_ref.funcmask'),
            ]),
        ])

    #  |\/| _ _|_. _  _    _   _|_|. _  _ _
    #  |  |(_) | |(_)| |  (_)|_|| ||(/_| _\
    #
    # --------------------------------------------------------

    # Apply brain masks to functionals
    # --------------------------------------------------------

    # Dilate mask
    """
    Dilate the mask
    """
    if False:
        dilatemask = pe.MapNode(interface=fsl.ImageMaths(suffix='_dil',
                                                         op_string='-dilF'),
                                iterfield=['in_file'],
                                name='dilatemask')
        featpreproc.connect(reg_funcmasks, 'out_file', dilatemask, 'in_file')
    else:
        dilatemask = pe.Node(interface=fsl.ImageMaths(suffix='_dil',
                                                      op_string='-dilF'),
                             name='dilatemask')
        featpreproc.connect(inputfiles, 'ref_funcmask', dilatemask, 'in_file')

    featpreproc.connect(dilatemask, 'out_file', outputfiles, 'dilate_mask')

    funcbrains = pe.MapNode(fsl.BinaryMaths(operation='mul'),
                            iterfield=('in_file', 'operand_file'),
                            name='funcbrains')

    featpreproc.connect([
        (mc, funcbrains, [
            ('mc.out_file', 'in_file'),
        ]),
        (dilatemask, funcbrains, [
            ('out_file', 'operand_file'),
        ]),
        (funcbrains, outputfiles, [
            ('out_file', 'funcbrains'),
        ]),
    ])
    # Detect motion outliers
    # --------------------------------------------------------

    import nipype.algorithms.rapidart as ra
    outliers = pe.MapNode(
        ra.ArtifactDetect(
            mask_type='file',
            # trying to "disable" `norm_threshold`:
            use_norm=True,
            norm_threshold=10.0,  # combines translations in mm and rotations
            # use_norm=Undefined,
            # translation_threshold=1.0,  # translation in mm
            # rotation_threshold=0.02,  # rotation in radians
            zintensity_threshold=3.0,  # z-score
            parameter_source='AFNI',
            save_plot=True),
        iterfield=('realigned_files', 'realignment_parameters', 'mask_file'),
        name='outliers')

    featpreproc.connect([
        (
            mc,
            outliers,
            [  # ('mc.par_file', 'realignment_parameters'),
                ('mc.oned_file', 'realignment_parameters'),
            ]),
        (funcbrains, outliers, [
            ('out_file', 'realigned_files'),
        ]),
        (dilatemask, outliers, [
            ('out_file', 'mask_file'),
        ]),
        (
            outliers,
            outputfiles,
            [
                ('outlier_files', 'motion_outliers.@outlier_files'),
                ('plot_files', 'motion_outliers.@plot_files'),
                ('displacement_files', 'motion_outliers.@displacement_files'),
                ('intensity_files', 'motion_outliers.@intensity_files'),
                ('mask_files', 'motion_outliers.@mask_files'),
                ('statistic_files', 'motion_outliers.@statistic_files'),
                # ('norm_files', 'outliers.@norm_files'),
            ]),
        (mc, outputnode, [
            ('mc.oned_file', 'motion_parameters'),
        ]),
        (
            outliers,
            outputnode,
            [
                ('outlier_files', 'motion_outlier_files'),
                ('plot_files', 'motion_plots.@plot_files'),
                ('displacement_files', 'motion_outliers.@displacement_files'),
                ('intensity_files', 'motion_outliers.@intensity_files'),
                ('mask_files', 'motion_outliers.@mask_files'),
                ('statistic_files', 'motion_outliers.@statistic_files'),
                # ('norm_files', 'outliers.@norm_files'),
            ])
    ])
    """
    Determine the 2nd and 98th percentile intensities of each functional run
    """
    getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 2 -p 98'),
                           iterfield=['in_file'],
                           name='getthreshold')
    if False:
        featpreproc.connect(b0_unwarp, 'out.funcs', getthresh, 'in_file')
    else:
        featpreproc.connect(mc, 'mc.out_file', getthresh, 'in_file')
    """
    Threshold the first run of functional data at 10% of the 98th percentile
    """

    threshold = pe.MapNode(interface=fsl.ImageMaths(out_data_type='char',
                                                    suffix='_thresh'),
                           iterfield=['in_file', 'op_string'],
                           name='threshold')
    if False:
        featpreproc.connect(b0_unwarp, 'out.funcs', threshold, 'in_file')
    else:
        featpreproc.connect(mc, 'mc.out_file', threshold, 'in_file')
    """
    Define a function to get 10% of the intensity
    """
    def getthreshop(thresh):
        return ['-thr %.10f -Tmin -bin' % (0.1 * val[1]) for val in thresh]

    featpreproc.connect(getthresh, ('out_stat', getthreshop), threshold,
                        'op_string')
    """
    Determine the median value of the functional runs using the mask
    """
    medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield=['in_file', 'mask_file'],
                           name='medianval')
    if False:
        featpreproc.connect(b0_unwarp, 'out.funcs', medianval, 'in_file')
    else:
        featpreproc.connect(mc, 'mc.out_file', medianval, 'in_file')

    featpreproc.connect(threshold, 'out_file', medianval, 'mask_file')

    # (~ _  _ _|_. _ |  (~ _ _  _  _ _|_|_ . _  _
    # _)|_)(_| | |(_||  _)| | |(_)(_) | | ||| |(_|
    #   |                                       _|
    # Spatial smoothing (SUSAN)
    # --------------------------------------------------------

    # create_susan_smooth takes care of calculating the mean and median
    #   functional, applying mask to functional, and running the smoothing
    smooth = create_susan_smooth(separate_masks=False)
    featpreproc.connect(inputnode, 'fwhm', smooth, 'inputnode.fwhm')

    # featpreproc.connect(b0_unwarp, 'out.funcs', smooth, 'inputnode.in_files')
    if False:
        featpreproc.connect(reg_funcs, 'out_file', smooth,
                            'inputnode.in_files')
    else:
        featpreproc.connect(mc, 'mc.out_file', smooth, 'inputnode.in_files')

    featpreproc.connect(dilatemask, 'out_file', smooth, 'inputnode.mask_file')

    # -------------------------------------------------------
    # The below is from workflows/fmri/fsl/preprocess.py
    """
    Mask the smoothed data with the dilated mask
    """

    maskfunc3 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                           iterfield=['in_file', 'in_file2'],
                           name='maskfunc3')
    featpreproc.connect(smooth, 'outputnode.smoothed_files', maskfunc3,
                        'in_file')

    featpreproc.connect(dilatemask, 'out_file', maskfunc3, 'in_file2')

    concatnode = pe.Node(interface=util.Merge(2), name='concat')

    tolist = lambda x: [x]

    def chooseindex(fwhm):
        if fwhm < 1:
            return [0]
        else:
            return [1]

    # maskfunc2 is the functional data before SUSAN
    if False:
        featpreproc.connect(b0_unwarp, ('out.funcs', tolist), concatnode,
                            'in1')
    else:
        featpreproc.connect(mc, ('mc.out_file', tolist), concatnode, 'in1')
    # maskfunc3 is the functional data after SUSAN
    featpreproc.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2')
    """
    The following nodes select smooth or unsmoothed data depending on the
    fwhm. This is because SUSAN defaults to smoothing the data with about the
    voxel size of the input data if the fwhm parameter is less than 1/3 of the
    voxel size.
    """
    selectnode = pe.Node(interface=util.Select(), name='select')

    featpreproc.connect(concatnode, 'out', selectnode, 'inlist')

    featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index')
    featpreproc.connect(selectnode, 'out', outputfiles, 'smoothed_files')
    """
    Scale the median value of the run is set to 10000.
    """

    meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'),
                           iterfield=['in_file', 'op_string'],
                           name='meanscale')
    featpreproc.connect(selectnode, 'out', meanscale, 'in_file')
    """
    Define a function to get the scaling factor for intensity normalization
    """

    featpreproc.connect(medianval, ('out_stat', getmeanscale), meanscale,
                        'op_string')

    # |_|. _ |_  _  _  _ _
    # | ||(_|| ||_)(_|_\_\
    #      _|   |
    # Temporal filtering
    # --------------------------------------------------------

    highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt'),
                          iterfield=['in_file'],
                          name='highpass')
    highpass_operand = lambda x: '-bptf %.10f -1' % x
    featpreproc.connect(inputnode, ('highpass', highpass_operand), highpass,
                        'op_string')
    featpreproc.connect(meanscale, 'out_file', highpass, 'in_file')

    version = 0
    if fsl.Info.version() and \
            LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'):
        version = 507

    if version < 507:
        featpreproc.connect(highpass, 'out_file', outputnode,
                            'highpassed_files')
    else:
        """
        Add back the mean removed by the highpass filter operation as
            of FSL 5.0.7
        """
        meanfunc4 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                        suffix='_mean'),
                               iterfield=['in_file'],
                               name='meanfunc4')

        featpreproc.connect(meanscale, 'out_file', meanfunc4, 'in_file')
        addmean = pe.MapNode(interface=fsl.BinaryMaths(operation='add'),
                             iterfield=['in_file', 'operand_file'],
                             name='addmean')
        featpreproc.connect(highpass, 'out_file', addmean, 'in_file')
        featpreproc.connect(meanfunc4, 'out_file', addmean, 'operand_file')
        featpreproc.connect(addmean, 'out_file', outputnode,
                            'highpassed_files')
    """
    Generate a mean functional image from the first run
    """
    meanfunc3 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                    suffix='_mean'),
                           iterfield=['in_file'],
                           name='meanfunc3')

    featpreproc.connect(meanscale, 'out_file', meanfunc3, 'in_file')
    featpreproc.connect(meanfunc3, 'out_file', outputfiles, 'mean')

    featpreproc.connect(meanfunc3, 'out_file', outputnode, 'mean_highpassed')
    featpreproc.connect(outputnode, 'highpassed_files', outputfiles,
                        'highpassed_files')

    return (featpreproc)
Esempio n. 7
0
def modelfit_fsl(wf_name='modelfit'):
    """

    Fit 1st level GLM using FSL routines

    Usage (TODO)

    modelfit.inputs.inputspec.fwhm = 12
    modelfit.inputs.inputspec.brain_mask = ['/opt/shared2/nipype-test/testblock/example_func_brain_mask.nii.gz', '/opt/shared2/nipype-test/testblock/example_func_brain_mask.nii.gz']

    modelfit.inputs.inputspec.input_units = 'secs'
    modelfit.inputs.inputspec.in_file = ['/opt/shared2/nipype-test/testblock/mc_data_brain.nii.gz', '/opt/shared2/nipype-test/testblock/mc_data_brain.nii.gz']
    modelfit.inputs.inputspec.TR = 2
    modelfit.inputs.inputspec.high_pass_filter_cutoff = 100 #sigma in TR
    modelfit.inputs.inputspec.event_files = ['/opt/shared2/nipype-test/testblock/a']

    cont1 = ['whisker', 'T', ['a', 'a'], [1.0, 0.0]]
    cont2 = ['-whisker', 'T', ['a', 'a'], [-1.0, 0.0]]
    cont3 = ['Task','F', [cont1, cont2]]
    contrasts = [cont1]

    modelfit.inputs.inputspec.contrasts = contrasts #TODO: change condition names

    modelfit.inputs.inputspec.bases_function = {'dgamma': {'derivs':  True}}
    modelfit.inputs.inputspec.model_serial_correlations = True


    #modelfit.write_graph('graph.dot');
    modelfit.write_graph('graph.dot', graph2use='colored');
    x=modelfit.run()
    #x=modelfit.run(plugin='MultiProc', plugin_args={'n_procs': 8})

    server.serve_content(modelfit)
    """

    modelfit = pe.Workflow(name=wf_name)
    """
        Set up a node to define all inputs required for the preprocessing workflow

    """

    inputnode = pe.Node(interface=util.IdentityInterface(
        fields=[
            'in_file', 'ev_file', 'confounders', 'contrasts',
            'high_pass_filter_cutoff', 'fwhm', 'interscan_interval', 'TR',
            'input_units', 'bases_function', 'model_serial_correlations',
            'brain_mask'
        ],
        mandatory_inputs=True),
                        name='inputspec')

    #TODO: eliminate brain mask

    #inputnode.iterables=[('high_pass_filter_cutoff', [30, 60, 90, 120, 500])]
    """
        Set up a node to define outputs for the preprocessing workflow

    """

    outputnode = pe.Node(interface=util.IdentityInterface(
        fields=['zstats', 'zfstats', 'copes', 'varcopes'],
        mandatory_inputs=True),
                         name='outputspec')

    # collect subject info

    getsubjectinfo = pe.MapNode(util.Function(
        input_names=['ev_file', 'confounders'],
        output_names=['subject_info'],
        function=get_subject_info),
                                name='getsubjectinfo',
                                iterfield=['confounders'])

    # nipype.algorithms.modelgen.SpecifyModel to generate design information.

    modelspec = pe.MapNode(interface=model.SpecifyModel(),
                           name="modelspec",
                           iterfield=['subject_info'])

    # smooth #TODO: move into preproc pipeline

    smooth = preproc.create_susan_smooth("smooth")
    #smooth.get_node( "smooth").iterables=[('fwhm', [6., 8., 10., 12., 14., 16.])]

    toSigma = pe.Node(interface=util.Function(
        input_names=['high_pass_filter_cutoff', 'TR'],
        output_names=['high_pass_filter_opstring'],
        function=highpass_operand),
                      name='toSigma')

    highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt',
                                                   op_string=''),
                          iterfield=['in_file'],
                          name='highpass')

    # Use nipype.interfaces.fsl.Level1Design to generate a run specific fsf file for analysis

    level1design = pe.MapNode(interface=fsl.Level1Design(),
                              name="level1design",
                              iterfield='session_info')

    # Use nipype.interfaces.fsl.FEATModel to generate a run specific mat file for use by FILMGLS

    modelgen = pe.MapNode(interface=fsl.FEATModel(),
                          name='modelgen',
                          iterfield=['fsf_file', 'ev_files'])

    # Use nipype.interfaces.fsl.FILMGLS to estimate a model specified by a mat file and a functional run

    modelestimate = pe.MapNode(
        interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5,
                              threshold=200),
        name='modelestimate',
        #iterfield=['design_file', 'in_file'])
        iterfield=['in_file', 'design_file'])

    # Use nipype.interfaces.fsl.ContrastMgr to generate contrast estimates

    conestimate = pe.MapNode(interface=fsl.ContrastMgr(),
                             name='conestimate',
                             iterfield=[
                                 'param_estimates', 'sigmasquareds',
                                 'corrections', 'dof_file', 'tcon_file'
                             ])

    modelfit.connect([
        (
            inputnode,
            smooth,
            [
                ('in_file', 'inputnode.in_files'),
                ('fwhm', 'inputnode.fwhm'),  # in iterable
                ('brain_mask', 'inputnode.mask_file')
            ]),
        (smooth, highpass, [('outputnode.smoothed_files', 'in_file')]),
        (inputnode, toSigma, [('high_pass_filter_cutoff',
                               'high_pass_filter_cutoff')]),
        (inputnode, toSigma, [('TR', 'TR')]),
        (toSigma, highpass, [('high_pass_filter_opstring', 'op_string')]),
        (inputnode, getsubjectinfo, [('ev_file', 'ev_file'),
                                     ('confounders', 'confounders')]),
        (getsubjectinfo, modelspec, [('subject_info', 'subject_info')]),
        (highpass, modelspec, [('out_file', 'functional_runs')]),
        (highpass, modelestimate, [('out_file', 'in_file')]),
        (inputnode, modelspec, [
            ('input_units', 'input_units'),
            ('TR', 'time_repetition'),
            ('high_pass_filter_cutoff', 'high_pass_filter_cutoff'),
        ]),
        (inputnode, level1design, [('TR', 'interscan_interval'),
                                   ('model_serial_correlations',
                                    'model_serial_correlations'),
                                   ('bases_function', 'bases'),
                                   ('contrasts', 'contrasts')]),
        (modelspec, level1design, [('session_info', 'session_info')]),
        (level1design, modelgen, [('fsf_files', 'fsf_file'),
                                  ('ev_files', 'ev_files')]),
        (modelgen, modelestimate, [('design_file', 'design_file')]),
        (modelgen, conestimate, [('con_file', 'tcon_file')]),
        (modelestimate, conestimate, [('param_estimates', 'param_estimates'),
                                      ('sigmasquareds', 'sigmasquareds'),
                                      ('corrections', 'corrections'),
                                      ('dof_file', 'dof_file')]),
        (conestimate, outputnode, [('zstats', 'zstats'),
                                   ('zfstats', 'zfstats'), ('copes', 'copes'),
                                   ('varcopes', 'varcopes')])
    ])

    return modelfit
Esempio n. 8
0
                                      iterfield=['list'])
get_element.inputs.index=0
pipe.connect(graph, 'outputspec.centrality_outputs', get_element, 'list')

########################################################################################################################
# get_zscore
#from CPAC.utils import get_zscore

#zscore=cent.get_zscore('EC',wf_name='Z')
#zscore.inputs.inputspec.mask_file='/opt/shared2/Autista_Modell1/gm-mask-5mm.nii.gz'
#pipe.connect(get_element, 'out', zscore, 'inputspec.input_file')


########################################################################################################################
# smooth centrality maps
smooth = prpc.create_susan_smooth("smooth_centrality_s10", separate_masks=False)
#smooth.get_node( "smooth").iterables=[('fwhm', [4., 6., 8., 10., 12., 14.])] #TODO: to sigma???
smooth.inputs.inputnode.fwhm=12.
smooth.inputs.inputnode.mask_file='/opt/shared2/Autista_Modell1/gm-mask-5mm.nii.gz'

#pipe.connect(zscore, 'outputspec.z_score_img', smooth, 'inputnode.in_files')
pipe.connect(get_element, 'out', smooth, 'inputnode.in_files')

########################################################################################################################
# permutation-test for centrality

groupmodelfit=model.modelfit_2ndlevel("09_permutation_test_s12_v4",method='randomise_parallel', standardize=False)
groupmodelfit.inputs.inputspec.std_brain = '/opt/shared/etc/std/new/standard-wistar_5mm_brain.nii.gz'
groupmodelfit.inputs.inputspec.std_brain_mask = '/opt/shared2/Autista_Modell1/gm-mask-5mm.nii.gz'

groupmodelfit.inputs.inputspec.regressors=_REGRESSORS_
Esempio n. 9
0
def process_rest(wf_name='proc_rest'):
    def get_element(list, index):
        return list[index]

    rest = pe.Workflow(wf_name)
    inputnode = pe.Node(interface=util.IdentityInterface(
        fields=['preprocessed_func', 'func_brain_mask']),
                        name='inputspec')
    outputnode = pe.Node(interface=util.IdentityInterface(fields=['out']),
                         name='outputspec')

    ########################################################################################################################
    # node for bandpass filter
    bandpass_filter = pe.MapNode(
        fsl.TemporalFilter(),  # TODO: into preprocess workflow
        name='05_bandpass_filter_cc',
        iterfield=['in_file'])

    #bandpass_filter.inputs.highpass_sigma = 100 / (2 * _TR_)
    #bandpass_filter.inputs.lowpass_sigma = 12.5 / (2 * _TR_)

    # 1. frequency band: 0.1-0.05 Hz
    # 2. frequency band: 0.05-0.02 Hz
    # 3. frequency band: 0.02-0.01 Hz
    # 4. frequency band: 0.01-0.002 Hz
    # 3. frequency band: 0.1-0.01 Hz # standard broadband

    bandpass_filter.iterables = [('highpass_sigma', [
        20 / (2 * _TR_), 50 / (2 * _TR_), 100 / (2 * _TR_), 500 / (2 * _TR_),
        100 / (2 * _TR_)
    ]),
                                 ('lowpass_sigma', [
                                     10 / (2 * _TR_), 20 / (2 * _TR_),
                                     50 / (2 * _TR_), 100 / (2 * _TR_),
                                     10 / (2 * _TR_)
                                 ])]

    bandpass_filter.synchronize = True

    rest.connect(inputnode, 'preprocessed_func', bandpass_filter, 'in_file')
    #pipe.connect(changedim, 'out_file', bandpass_filter, 'in_file')

    ########################################################################################################################
    # node for reho
    import mypype.workflows.rest.reho as reho
    reho = reho.create_reho(wf_name="06_reho_cc")
    reho.inputs.inputspec.cluster_size = 27
    rest.connect(bandpass_filter, 'out_file', reho, 'inputspec.rest_res_filt')
    rest.connect(inputnode, 'func_brain_mask', reho, 'inputspec.rest_mask')

    ########################################################################################################################
    # smooth reho maps

    smooth = prpc.create_susan_smooth("08_smooth_reho_cc")
    smooth.inputs.inputnode.fwhm = 12.
    #smooth.get_node( "smooth").iterables=[('fwhm', [4., 6., 8., 10., 12., 14.])] #TODO: to sigma???

    rest.connect(reho, 'outputspec.raw_reho_map', smooth, 'inputnode.in_files')
    rest.connect(inputnode, 'func_brain_mask', smooth, 'inputnode.mask_file')

    ########################################################################################################################
    # compute centrality

    #graph=cent.create_resting_state_graphs(wf_name='07_centrality_cc', multipleTemplate=True)
    #graph.inputs.centrality_options.method_options=[True, True]
    #graph.inputs.inputspec.method_option=1
    #graph.get_node( "calculate_centrality").iterables=[('method_option', [0,1,2]),
    #												   ('threshold_option', [1,1,2]),
    #												   ('threshold', [0.3, 0.3,0.6])]
    #graph.get_node( "calculate_centrality").synchronize = True
    #graph.inputs.inputspec.template = '/opt/shared/etc/std/new/standard-wistar_5mm_brain_mask.nii.gz'
    #graph.inputs.inputspec.threshold=0.5
    #graph.inputs.inputspec.threshold_option=1
    #graph.inputs.inputspec.weight_options=[False, True]

    #rest.connect(bandpass_filter, 'out_file', graph, 'inputspec.subject')
    #rest.connect(inputnode, 'func_brain_mask', graph, 'inputspec.template')

    ########################################################################################################################
    # get fisrt element of results

    #get_element=pe.MapNode(interface=util.Function(input_names = ['list','index'],
    #								   output_names = ['out'],
    #								   function = get_element),
    #								   name = 'get_element',
    #									  iterfield=['list'])
    #get_element.inputs.index=0
    #rest.connect(graph, 'outputspec.centrality_outputs', get_element, 'list')

    ########################################################################################################################
    # smooth centrality maps

    #smoothc = prpc.create_susan_smooth("09_smooth_centrality_cc", separate_masks=True)
    #smooth.get_node( "smooth").iterables=[('fwhm', [4., 6., 8., 10., 12., 14.])] #TODO: to sigma???
    #smoothc.inputs.inputnode.fwhm=12.

    #rest.connect(inputnode, 'func_brain_mask', smoothc, 'inputnode.mask_file')
    #rest.connect(get_element, 'out', smoothc, 'inputnode.in_files')
    return rest
def first_level_wf(in_files, output_dir, fwhm=6.0, name='wf_1st_level'):
    workflow = pe.Workflow(name=name)
    datasource = pe.Node(niu.Function(function=_dict_ds,
                                      output_names=DATA_ITEMS),
                         name='datasource')
    datasource.inputs.in_dict = in_files
    datasource.iterables = ('sub', sorted(in_files.keys()))

    # Extract motion parameters from regressors file
    runinfo = pe.Node(niu.Function(input_names=[
        'in_file', 'events_file', 'regressors_file', 'regressors_names'
    ],
                                   function=_bids2nipypeinfo,
                                   output_names=['info', 'realign_file']),
                      name='runinfo')

    # Set the column names to be used from the confounds file
    runinfo.inputs.regressors_names = ['dvars', 'framewise_displacement'] + \
        ['a_comp_cor_%02d' % i for i in range(6)] + ['cosine%02d' % i for i in range(4)]

    # SUSAN smoothing
    susan = create_susan_smooth()
    susan.inputs.inputnode.fwhm = fwhm

    l1_spec = pe.Node(SpecifyModel(parameter_source='FSL',
                                   input_units='secs',
                                   high_pass_filter_cutoff=100),
                      name='l1_spec')

    # l1_model creates a first-level model design
    l1_model = pe.Node(
        fsl.Level1Design(
            bases={'dgamma': {
                'derivs': True
            }},
            model_serial_correlations=True,
            #ENTER YOUR OWN CONTRAST HERE
            contrasts=[],
            # orthogonalization=orthogonality,
        ),
        name='l1_model')

    # feat_spec generates an fsf model specification file
    feat_spec = pe.Node(fsl.FEATModel(), name='feat_spec')
    # feat_fit actually runs FEAT
    feat_fit = pe.Node(fsl.FEAT(), name='feat_fit', mem_gb=12)

    feat_select = pe.Node(nio.SelectFiles({
        'cope': 'stats/cope1.nii.gz',
        'pe': 'stats/pe[0-9][0-9].nii.gz',
        'tstat': 'stats/tstat1.nii.gz',
        'varcope': 'stats/varcope1.nii.gz',
        'zstat': 'stats/zstat1.nii.gz',
    }),
                          name='feat_select')

    ds_cope = pe.Node(DerivativesDataSink(base_directory=str(output_dir),
                                          keep_dtype=False,
                                          suffix='cope',
                                          desc='intask'),
                      name='ds_cope',
                      run_without_submitting=True)

    ds_varcope = pe.Node(DerivativesDataSink(base_directory=str(output_dir),
                                             keep_dtype=False,
                                             suffix='varcope',
                                             desc='intask'),
                         name='ds_varcope',
                         run_without_submitting=True)

    ds_zstat = pe.Node(DerivativesDataSink(base_directory=str(output_dir),
                                           keep_dtype=False,
                                           suffix='zstat',
                                           desc='intask'),
                       name='ds_zstat',
                       run_without_submitting=True)

    ds_tstat = pe.Node(DerivativesDataSink(base_directory=str(output_dir),
                                           keep_dtype=False,
                                           suffix='tstat',
                                           desc='intask'),
                       name='ds_tstat',
                       run_without_submitting=True)

    workflow.connect([
        (datasource, susan, [('bold', 'inputnode.in_files'),
                             ('mask', 'inputnode.mask_file')]),
        (datasource, runinfo, [('events', 'events_file'),
                               ('regressors', 'regressors_file')]),
        (susan, l1_spec, [('outputnode.smoothed_files', 'functional_runs')]),
        (datasource, l1_spec, [('tr', 'time_repetition')]),
        (datasource, l1_model, [('tr', 'interscan_interval')]),
        (datasource, ds_cope, [('bold', 'source_file')]),
        (datasource, ds_varcope, [('bold', 'source_file')]),
        (datasource, ds_zstat, [('bold', 'source_file')]),
        (datasource, ds_tstat, [('bold', 'source_file')]),
        (susan, runinfo, [('outputnode.smoothed_files', 'in_file')]),
        (runinfo, l1_spec, [('info', 'subject_info'),
                            ('realign_file', 'realignment_parameters')]),
        (l1_spec, l1_model, [('session_info', 'session_info')]),
        (l1_model, feat_spec, [('fsf_files', 'fsf_file'),
                               ('ev_files', 'ev_files')]),
        (l1_model, feat_fit, [('fsf_files', 'fsf_file')]),
        (feat_fit, feat_select, [('feat_dir', 'base_directory')]),
        (feat_select, ds_cope, [('cope', 'in_file')]),
        (feat_select, ds_varcope, [('varcope', 'in_file')]),
        (feat_select, ds_zstat, [('zstat', 'in_file')]),
        (feat_select, ds_tstat, [('tstat', 'in_file')]),
    ])
    return workflow
Esempio n. 11
0
def rs_preprocess(in_file, fwhm, work_dir, output_dir):

    from nipype.workflows.fmri.fsl.preprocess import create_susan_smooth
    from nipype.interfaces.fsl.model import GLM
    from nipype.interfaces import fsl as fsl


    # define nodes and workflows
    rs_preproc_workflow = pe.Workflow(name="rs_preproc_workflow")
    rs_preproc_workflow.base_dir = work_dir

    inputnode = pe.Node(interface=util.IdentityInterface(fields=['func', 'fwhm']), name='inputspec')
    inputnode.inputs.func = in_file
    inputnode.inputs.fwhm = fwhm

    #make a brain mask
    immask = pe.Node(interface=fsl.ImageMaths(op_string = '-abs -bin -Tmin'), name='immask')
    rs_preproc_workflow.connect(inputnode, 'func', immask, 'in_file')

    #calculate mean image from smoothed data, for adding back after GSR
    immean = pe.Node(interface=fsl.ImageMaths(op_string = '-Tmean'), name='immean')
    rs_preproc_workflow.connect(inputnode, 'func', immean, 'in_file')

    #get time-series for GSR
    meants = pe.Node(interface=fsl.utils.ImageMeants(), name='meants')
    rs_preproc_workflow.connect(inputnode, 'func', meants, 'in_file')
    rs_preproc_workflow.connect(immask, 'out_file', meants, 'mask')

    #removing global signal
    glm = pe.Node(interface=GLM(), name='glm')
    glm.inputs.out_res_name = op.join(work_dir, 'res4d.nii.gz')
    rs_preproc_workflow.connect(inputnode, 'func', glm, 'in_file')
    rs_preproc_workflow.connect(immask, 'out_file', glm, 'mask')
    rs_preproc_workflow.connect(meants, 'out_file', glm, 'design')

    #add mean back to GSR'ed image
    maths = pe.Node(interface=fsl.maths.BinaryMaths(operation = 'add'), name='maths')
    rs_preproc_workflow.connect(glm, 'out_res', maths, 'in_file')
    rs_preproc_workflow.connect(immean, 'out_file', maths, 'operand_file')

    #do the smoothing
    smooth = create_susan_smooth()
    rs_preproc_workflow.connect(maths, 'out_file', smooth, 'inputnode.in_files')
    rs_preproc_workflow.connect(inputnode, 'fwhm', smooth, 'inputnode.fwhm')
    rs_preproc_workflow.connect(immask, 'out_file', smooth, 'inputnode.mask_file')

    datasink = pe.Node(nio.DataSink(), name='sinker')
    datasink.inputs.base_directory = work_dir

    rs_preproc_workflow.connect(maths, 'out_file', datasink, 'gsr')
    rs_preproc_workflow.connect(immask, 'out_file', datasink, 'mask')
    rs_preproc_workflow.connect(smooth.get_node('smooth'), ('smoothed_file', pickfirst), datasink, 'gsr_smooth')

    rs_preproc_workflow.run()

    #copy data to directory
    gsr_fn = glob(op.join(work_dir, 'gsr', '*.nii.gz'))[0]
    mask_fn = glob(op.join(work_dir, 'mask', '*.nii.gz'))[0]
    gsr_smooth_fn = glob(op.join(work_dir, 'gsr_smooth', '*', '*.nii.gz'))[0]
    gsr_fn2 = op.join(output_dir, '{0}.nii.gz'.format(op.basename(in_file).split('.')[0]))
    mask_fn2 = op.join(output_dir, '{0}_mask.nii.gz'.format(op.basename(in_file).split('.')[0]))
    gsr_smooth_fn2 = op.join(output_dir, '{0}_smooth.nii.gz'.format(op.basename(in_file).split('.')[0]))

    shutil.copyfile(gsr_fn, gsr_fn2)
    shutil.copyfile(mask_fn, mask_fn2)
    shutil.copyfile(gsr_smooth_fn, gsr_smooth_fn2)

    shutil.rmtree(work_dir)
Esempio n. 12
0
def create_extended_susan_workflow(name='extended_susan', separate_masks=True):

    input_node = pe.Node(IdentityInterface(fields=['in_file',
                                                   'fwhm',
                                                   'EPI_session_space',
                                                   'output_directory',
                                                   'sub_id']), name='inputspec')

    output_node = pe.Node(interface=IdentityInterface(fields=['smoothed_files',
                                                              'mask',
                                                              'mean']), name='outputspec')

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    # first link the workflow's output_directory into the datasink.

    esw = pe.Workflow(name=name)

    esw.connect(input_node, 'output_directory', datasink, 'base_directory')
    esw.connect(input_node, 'sub_id', datasink, 'container')

    meanfuncmask = pe.Node(interface=fsl.BET(mask=True,
                                             no_output=True,
                                             frac=0.3),
                           name='meanfuncmask')

    esw.connect(input_node, 'EPI_session_space', meanfuncmask, 'in_file')

    """
    Mask the functional runs with the extracted mask
    """

    maskfunc = pe.MapNode(interface=fsl.ImageMaths(suffix='_bet',
                                                   op_string='-mas'),
                          iterfield=['in_file'],
                          name='maskfunc')

    esw.connect(input_node, 'in_file', maskfunc, 'in_file')
    esw.connect(meanfuncmask, 'mask_file', maskfunc, 'in_file2')

    """
    Determine the 2nd and 98th percentile intensities of each functional run
    """

    getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 2 -p 98'),
                           iterfield=['in_file'],
                           name='getthreshold')
    esw.connect(maskfunc, 'out_file', getthresh, 'in_file')

    """
    Threshold the first run of the functional data at 10% of the 98th percentile
    """

    threshold = pe.MapNode(interface=fsl.ImageMaths(out_data_type='char',
                                                    suffix='_thresh'),
                           iterfield=['in_file', 'op_string'],
                           name='threshold')

    esw.connect(maskfunc, 'out_file', threshold, 'in_file')

    """
    Define a function to get 10% of the intensity
    """

    esw.connect(getthresh, ('out_stat', getthreshop), threshold, 'op_string')

    """
    Determine the median value of the functional runs using the mask
    """

    medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield=['in_file', 'mask_file'],
                           name='medianval')
    esw.connect(input_node, 'in_file', medianval, 'in_file')
    esw.connect(threshold, 'out_file', medianval, 'mask_file')

    """
    Dilate the mask
    """

    dilatemask = pe.MapNode(interface=fsl.ImageMaths(suffix='_dil',
                                                     op_string='-dilF'),
                            iterfield=['in_file'],
                            name='dilatemask')
    esw.connect(threshold, 'out_file', dilatemask, 'in_file')
    esw.connect(dilatemask, 'out_file', output_node, 'mask')

    """
    Mask the motion corrected functional runs with the dilated mask
    """

    maskfunc2 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                           iterfield=['in_file', 'in_file2'],
                           name='maskfunc2')
    esw.connect(input_node, 'in_file', maskfunc2, 'in_file')
    esw.connect(dilatemask, 'out_file', maskfunc2, 'in_file2')

    """
    Smooth each run using SUSAN with the brightness threshold set to 75%
    of the median value for each run and a mask constituting the mean
    functional
    """

    smooth = create_susan_smooth(separate_masks=separate_masks)

    esw.connect(input_node, 'fwhm', smooth, 'inputnode.fwhm')
    esw.connect(maskfunc2, 'out_file', smooth, 'inputnode.in_files')
    esw.connect(dilatemask, 'out_file', smooth, 'inputnode.mask_file')

    """
    Mask the smoothed data with the dilated mask
    """

    maskfunc3 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                           iterfield=['in_file', 'in_file2'],
                           name='maskfunc3')
    esw.connect(smooth, 'outputnode.smoothed_files', maskfunc3, 'in_file')

    esw.connect(dilatemask, 'out_file', maskfunc3, 'in_file2')

    concatnode = pe.Node(interface=Merge(2),
                         name='concat')
    esw.connect(maskfunc2, ('out_file', tolist), concatnode, 'in1')
    esw.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2')

    """
    The following nodes select smooth or unsmoothed data depending on the
    fwhm. This is because SUSAN defaults to smoothing the data with about the
    voxel size of the input data if the fwhm parameter is less than 1/3 of the
    voxel size.
    """
    selectnode = pe.Node(interface=Select(), name='select')

    esw.connect(concatnode, 'out', selectnode, 'inlist')

    esw.connect(input_node, ('fwhm', chooseindex), selectnode, 'index')
    esw.connect(selectnode, 'out', output_node, 'smoothed_files')

    """
    Scale the median value of the run is set to 10000
    """

    meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'),
                           iterfield=['in_file', 'op_string'],
                           name='meanscale')
    esw.connect(selectnode, 'out', meanscale, 'in_file')

    """
    Define a function to get the scaling factor for intensity normalization
    """

    esw.connect(medianval, ('out_stat', getmeanscale), meanscale, 'op_string')

    """
    Generate a mean functional image from the first run
    """

    meanfunc3 = pe.Node(interface=fsl.ImageMaths(op_string='-Tmean',
                                                 suffix='_mean'),
                        iterfield=['in_file'],
                        name='meanfunc3')

    esw.connect(meanscale, ('out_file', pickfirst), meanfunc3, 'in_file')
    esw.connect(meanfunc3, 'out_file', output_node, 'mean')

    # Datasink
    esw.connect(meanscale, 'out_file', datasink, 'filtering')
    esw.connect(selectnode, 'out', datasink, 'filtering.@smoothed')
    esw.connect(dilatemask, 'out_file', datasink, 'filtering.@mask')

    return esw
Esempio n. 13
0
    df_outlier = pd.read_table(outlierMatrix,sep='   ',engine='python')
    df_CSF = pd.read_table(CSFinput)
    df_WM = pd.read_table(WMinput)
    df_concat = pd.concat([df_motion,df_outlier,df_CSF,df_WM], ignore_index=False, axis=1)
    df_concat.to_csv('covarOut.txt',sep=str('\t'),header=True,index=False)
    return os.path.abspath('covarOut.txt')

createRegressor = pe.Node(Function(
    input_names=['motionMatrix','outlierMatrix','CSFinput','WMinput'],
    output_names=['out_file'],
    function=create_Regressor),
    name='createRegressor')


# Smoothing workflow (SUSAN)
smoothWF  = create_susan_smooth() 
smoothWF.inputs.inputnode.fwhm = fwhmVal
smoothWF.inputs.inputnode.mask_file=MNI_2mm_brain_mask
    
def smoothOut(susanList):
    return susanList[0]

# Mask functional image with extracted brain mask
finalMask = pe.Node(fsl.maths.ApplyMask(
    mask_file=MNI_2mm_brain_mask,
    output_type='NIFTI_GZ'),
    name='finalMask')

### Workflow connections ### 
### Anatomical sub workflow ###
anatWF = pe.Workflow(name='anatWF')
def create_lvl1pipe_wf(options):
    '''
    Input [Mandatory]:
        ~~~~~~~~~~~ Set in command call:
        options: dictionary with the following entries
            remove_steadystateoutlier [boolean]:
                Should always be True. Remove steady state outliers from bold timecourse, specified in fmriprep confounds file.
            smooth [boolean]:
                If True, then /smooth subfolder created and populated with results. If False, then /nosmooth subfolder created and populated with results.
            censoring [string]:
                Either '' or 'despike', which implements nipype.interfaces.afni.Despike
            ICA_AROMA [boolean]:
                Use AROMA error components, from fmriprep confounds file.
            run_contrasts [boolean]:
                If False, then components related to contrasts and p values are removed from   nipype.workflows.fmri.fsl.estimate.create_modelfit_workflow()
            keep_resid [boolean]:
                If False, then only sum of squares residuals will be outputted. If True, then timecourse residuals kept.
            poly_trend [integer. Use None to skip]:
                If given, polynomial trends will be added to run confounds, up to the order of the integer
                e.g. "0", gives an intercept, "1" gives intercept + linear trend,
                "2" gives intercept + linear trend + quadratic.
                DO NOT use in conjnuction with high pass filters.
            dct_basis [integer. Use None to skip]:
                If given, adds a discrete cosine transform, with a length (in seconds) of the interger specified.
                    Adds unit scaled cosine basis functions to Design_Matrix columns,
                    based on spm-style discrete cosine transform for use in
                    high-pass filtering. Does not add intercept/constant.
                    DO NOT use in conjnuction with high pass filters.
        ~~~~~~~~~~~ Set through inputs.inputspec
        input_dir [string]:
            path to folder containing fmriprep preprocessed data.
            e.g. model_wf.inputs.inputspec.input_dir = '/home/neuro/data'
        output_dir [string]:
            path to desired output folder. Workflow will create a new subfolder based on proj_name.
            e.g. model_wf.inputs.inputspec.output_dir = '/home/neuro/output'
        proj_name [string]:
            name for project subfolder within output_dir. Ideally something unique, or else workflow will write to an existing folder.
            e.g. model_wf.inputs.inputspec.proj_name = 'FSMAP_stress'
        design_col [string]:
            Name of column within events.tsv with values corresponding to entries specified in params.
            e.g. model_wf.inputs.inputspec.design_col = 'trial_type'
        params [list fo strings]:
            values within events.tsv design_col that correspond to events to be modeled.
            e.g.  ['Instructions', 'Speech_prep', 'No_speech']
        conditions [list, of either strings or lists],
            each condition must be a string within the events.tsv design_col.
            These conditions correspond to event conditions to be modeled.
            Give a list, instead of a string, to model parametric terms.
            These parametric terms give a event condition, then a parametric term, which is another column in the events.tsv file.
            The parametric term can be centered and normed using entries 3 and 4 in the list.
            e.g. model_wf.inputs.inputspec.params = ['condition1',
                                                     'condition2',
                                                    ['condition1', 'parametric1', 'no_cent', 'no_norm'],
                                                    ['condition2', 'paramatric2', 'cent', 'norm']]
                     entry 1 is a condition within the design_col column
                     entry 2 is a column in the events folder, which will be used for parametric weightings.
                     entry 3 is either 'no_cent', or 'cent', indicating whether to center the parametric variable.
                     entry 4 is either 'no_norm', or 'norm', indicating whether to normalize the parametric variable.
             Onsets and durations will be taken from corresponding values for entry 1
             parametric weighting specified by entry 2, scaled/centered as specified, then
             appended to the design matrix.
        contrasts [list of lists]:
            Specifies contrasts to be performed. using params selected above.
            e.g. model_wf.inputs.inputspec.contrasts =
                [['Instructions', 'T', ['Instructions'], [1]],
                 ['Speech_prep', 'T', ['Speech_prep'], [1]],
                 ['No_speech', 'T', ['No_speech'], [1]],
                 ['Speech_prep>No_speech', 'T', ['Speech_prep', 'No_speech'], [1, -1]]]
        noise_regressors [list of strings]:
            column names in confounds.tsv, specifying desired noise regressors for model.
            IF noise_transforms are to be applied to a regressor, add '*' to the name.
            e.g. model_wf.inputs.inputspec.noise_regressors = ['CSF', 'WhiteMatter', 'GlobalSignal', 'X*', 'Y*', 'Z*', 'RotX*', 'RotY*', 'RotZ*']
        noise_transforms [list of strings]:
            noise transforms to be applied to select noise_regressors above. Possible values are 'quad', 'tderiv', and 'quadtderiv', standing for quadratic function of value, temporal derivative of value, and quadratic function of temporal derivative.
            e.g. model_wf.inputs.inputspec.noise_transforms = ['quad', 'tderiv', 'quadtderiv']
        TR [float]:
            Scanner TR value in seconds.
            e.g. model_wf.inputs.inputspec.TR = 2.
        FILM_threshold [integer]:
            Cutoff value for modeling threshold. 1000: p <.001; 1: p <=1, i.e. unthresholded.
            e.g. model_wf.inputs.inputspec.FILM_threshold = 1
        hpf_cutoff [float]:
            high pass filter value. DO NOT USE THIS in conjunction with poly_trend or dct_basis.
            e.g. model_wf.inputs.inputspec.hpf_cutoff = 120.
        bases: (a dictionary with keys which are 'hrf' or 'fourier' or 'fourier_han' or 'gamma' or 'fir' and with values which are any value)
             dict {'name':{'basesparam1':val,...}}
             name : string
             Name of basis function (hrf, fourier, fourier_han, gamma, fir)
             hrf :
                 derivs : 2-element list
                    Model HRF Derivatives. No derivatives: [0,0],
                    Time derivatives : [1,0],
                    Time and Dispersion derivatives: [1,1]
             fourier, fourier_han, gamma, fir:
                 length : int
                    Post-stimulus window length (in seconds)
                 order : int
                    Number of basis functions
            e.g. model_wf.inputs.inputspec.bases = {'dgamma':{'derivs': False}}
        model_serial_correlations [boolean]:
            Allow prewhitening, with 5mm spatial smoothing.
            model_wf.inputs.inputspec.model_serial_correlations = True
        sinker_subs [list of tuples]:
            passed to nipype.interfaces.io.Datasink. Changes names when passing to output directory.
            e.g. model_wf.inputs.inputspec.sinker_subs =
                [('pe1', 'pe1_instructions'),
                 ('pe2', 'pe2_speech_prep'),
                 ('pe3', 'pe3_no_speech')]
        bold_template [dictionary with string entry]:
            Specifies path, with wildcard, to grab all relevant BOLD files. Each subject_list entry should uniquely identify the ONE relevant file.
            e.g. model_wf.inputs.inputspec.bold_template =
                {'bold': '/home/neuro/data/sub-*/func/sub-*_task-stress_bold_space-MNI152NLin2009cAsym_preproc.nii.gz'}
                 This would grab the functional run for all subjects, and when subject_id = 'sub-001', there is ONE file in the list that the ID could possible correspond to.
                To handle multiple runs, list the run information in the subject_id. e.g. 'sub-01_task-trag_run-01'.
        mask_template [dictionary with string entry]:
            Specifies path, with wildcard, to grab all relevant MASK files, corresponding to functional images. Each subject_list entry should uniquely identify the ONE relevant file.
            e.g. model_wf.inputs.inputspec.mask_template =
            {'mask': '/home/neuro/data/sub-*/func/sub-*_task-stress_bold_space-MNI152NLin2009cAsym_brainmask.nii.gz'}
            See bold_template for more detail.
        task_template [dictionary with string entry]:
            Specifies path, with wildcard, to grab all relevant events.tsv files, corresponding to functional images. Each subject_list entry should uniquely identify the ONE relevant file.
            e.g. model_wf.inputs.inputspec.task_template =
            {'task': '/home/neuro/data/sub-*/func/sub-*_task-stress_events.tsv'}
            See bold_template for more detail.
        confound_template [dictionary with string entry]:
            Specifies path, with wildcard, to grab all relevant confounds.tsv files, corresponding to functional images. Each subject_list entry should uniquely identify the ONE relevant file.
            e.g. model_wf.inputs.inputspec.confound_template =
            {'confound': '/home/neuro/data/sub-*/func/sub-*_task-stress_bold_confounds.tsv'}
            See bold_template for more detail.
        smooth_gm_mask_template [dictionary with string entry]:
            Specifies path, with wildcard, to grab all relevant grey matter mask .nii.gz files, pulling from each subject's /anat fodler. Each subject_list entry should uniquely identify the ONE relevant file (BUT SEE THE NOTE BELOW).
            e.g. model_wf.inputs.inputspec.smooth_gm_mask_template =
                {'gm_mask': '/scratch/data/sub-*/anat/sub-*_T1w_space-MNI152NLin2009cAsym_class-GM_probtissue.nii.gz'}
                NOTE: If the subject_id value has more information than just the ID (e.g. sub-01_task-trag_run-01), then JUST the sub-01 portion will be used to identify the grey matter mask. This is because multiple runs will have the same anatomical data. i.e. sub-01_run-01, sub-01_run-02, sub-01_run-03, all correspond to sub-01_T1w_space-MNI152NLin2009cAsym_class-GM_probtissue.nii.gz.
        fwhm [float]. Redundant if options['smooth']: False
            Determines smoothing kernel. Multiple kernels can be run in parallel by iterating through an outside workflow. Also see subject_id below for another example of iterables.
            e.g.
                model_wf.inputs.inputspec.fwhm = 1.5
            OR Iterable e.g.
                import nipype.pipeline.engine as pe
                fwhm_list = [1.5, 6]
                infosource = pe.Node(IdentityInterface(fields=['fwhm']),
                           name='infosource')
                infosource.iterables = [('fwhm', fwhm_list)]
                full_model_wf = pe.Workflow(name='full_model_wf')
                full_model_wf.connect([(infosource, model_wf, [('subject_id', 'inputspec.subject_id')])])
                full_model_wf.run()
        subject_id [string]:
            Identifies subject in conjnuction with template. See bold_template note above.
            Can also be entered as an iterable from an outside workflow, in which case iterables are run in parallel to the extent that cpu cores are available.
            e.g.
                model_wf.inputs.inputspec.subject_id = 'sub-01'
            OR Iterable e.g.
                import nipype.pipeline.engine as pe
                subject_list = ['sub-001', 'sub-002']
                infosource = pe.Node(IdentityInterface(fields=['subject_id']),
                           name='infosource')
                infosource.iterables = [('subject_id', subject_list)]
                full_model_wf = pe.Workflow(name='full_model_wf')
                full_model_wf.connect([(infosource, model_wf, [('subject_id', 'inputspec.subject_id')])])
                full_model_wf.run()
    '''
    import nipype.pipeline.engine as pe # pypeline engine
    import nipype.interfaces.fsl as fsl
    import os
    from nipype import IdentityInterface, SelectFiles
    from nipype.interfaces.utility.wrappers import Function

    ##################  Setup workflow.
    lvl1pipe_wf = pe.Workflow(name='lvl_one_pipe')

    inputspec = pe.Node(IdentityInterface(
        fields=['input_dir',
                'output_dir',
                'design_col',
                'noise_regressors',
                'noise_transforms',
                'TR', # in seconds.
                'FILM_threshold',
                'hpf_cutoff',
                'conditions',
                'contrasts',
                'bases',
                'model_serial_correlations',
                'sinker_subs',
                'bold_template',
                'mask_template',
                'task_template',
                'confound_template',
                'smooth_gm_mask_template',
                'gmmask_args',
                'subject_id',
                'fwhm',
                'proj_name',
                ],
        mandatory_inputs=False),
                 name='inputspec')

    ################## Select Files
    def get_file(subj_id, template):
        import glob
        temp_list = []
        out_list = []
        if '_' in subj_id and '/anat/' in list(template.values())[0]:
            subj_id = subj_id[:subj_id.find('_')]
            # if looking for gmmask, and subj_id includes additional info (e.g. sub-001_task-trag_run-01) then just take the subject id component, as the run info will not be present for the anatomical data.
        for x in glob.glob(list(template.values())[0]):
            if subj_id in x:
                temp_list.append(x)
        for file in temp_list: # ensure no duplicate entries.
            if file not in out_list:
                out_list.append(file)
        if len(out_list) == 0:
            assert (len(out_list) == 1), 'Each combination of template and subject ID should return 1 file. 0 files were returned.'
        if len(out_list) > 1:
            assert (len(out_list) == 1), 'Each combination of template and subject ID should return 1 file. Multiple files returned.'
        out_file = out_list[0]
        return out_file

    get_bold = pe.Node(Function(
        input_names=['subj_id', 'template'],
        output_names=['out_file'],
        function=get_file),
                        name='get_bold')
    get_mask = pe.Node(Function(
        input_names=['subj_id', 'template'],
        output_names=['out_file'],
        function=get_file),
                        name='get_mask')
    get_task = pe.Node(Function(
        input_names=['subj_id', 'template'],
        output_names=['out_file'],
        function=get_file),
                        name='get_task')
    get_confile = pe.Node(Function(
        input_names=['subj_id', 'template'],
        output_names=['out_file'],
        function=get_file),
                        name='get_confile')
    # get_bold.inputs.subj_id # From inputspec
    # get_bold.inputs.templates # From inputspec
    if options['smooth']:
        get_gmmask = pe.Node(Function(
            input_names=['subj_id', 'template'],
            output_names=['out_file'],
            function=get_file),
                            name='get_gmmask')

        mod_gmmask = pe.Node(fsl.maths.MathsCommand(),
                                name='mod_gmmask')
        # mod_gmmask.inputs.in_file = # from get_gmmask
        # mod_gmmask.inputs.args = from inputspec
        def fit_mask(mask_file, ref_file):
            from nilearn.image import resample_img
            import nibabel as nib
            import os
            out_file = resample_img(nib.load(mask_file),
                                   target_affine=nib.load(ref_file).affine,
                                   target_shape=nib.load(ref_file).shape[0:3],
                                   interpolation='nearest')
            nib.save(out_file, os.path.join(os.getcwd(), mask_file.split('.nii')[0]+'_fit.nii.gz'))
            out_mask = os.path.join(os.getcwd(), mask_file.split('.nii')[0]+'_fit.nii.gz')
            return out_mask

        fit_mask = pe.Node(Function(
            input_names=['mask_file', 'ref_file'],
            output_names=['out_mask'],
            function=fit_mask),
                            name='fit_mask')

    ################## Setup confounds
    def get_terms(confound_file, noise_transforms, noise_regressors, TR, options):
        '''
        Gathers confounds (and transformations) into a pandas dataframe.
        Input [Mandatory]:
            confound_file [string]: path to confound.tsv file, given by fmriprep.
            noise_transforms [list of strings]:
                noise transforms to be applied to select noise_regressors above. Possible values are 'quad', 'tderiv', and 'quadtderiv', standing for quadratic function of value, temporal derivative of value, and quadratic function of temporal derivative.
                e.g. model_wf.inputs.inputspec.noise_transforms = ['quad', 'tderiv', 'quadtderiv']
            noise_regressors [list of strings]:
                column names in confounds.tsv, specifying desired noise regressors for model.
                IF noise_transforms are to be applied to a regressor, add '*' to the name.
                e.g. model_wf.inputs.inputspec.noise_regressors = ['CSF', 'WhiteMatter', 'GlobalSignal', 'X*', 'Y*', 'Z*', 'RotX*', 'RotY*', 'RotZ*']
            TR [float]:
                Scanner TR value in seconds.
            options: dictionary with the following entries
                remove_steadystateoutlier [boolean]:
                    Should always be True. Remove steady state outliers from bold timecourse, specified in fmriprep confounds file.
                ICA_AROMA [boolean]:
                    Use AROMA error components, from fmriprep confounds file.
                poly_trend [integer. Use None to skip]:
                    If given, polynomial trends will be added to run confounds, up to the order of the integer
                    e.g. "0", gives an intercept, "1" gives intercept + linear trend,
                    "2" gives intercept + linear trend + quadratic.
                dct_basis [integer. Use None to skip]:
                    If given, adds a discrete cosine transform, with a length (in seconds) of the interger specified.
                        Adds unit scaled cosine basis functions to Design_Matrix columns,
                        based on spm-style discrete cosine transform for use in
                        high-pass filtering. Does not add intercept/constant.
        '''
        import numpy as np
        import pandas as pd
        from nltools.data import Design_Matrix

        df_cf = pd.DataFrame(pd.read_csv(confound_file, sep='\t', parse_dates=False))
        transfrm_list = []
        for idx, entry in enumerate(noise_regressors): # get entries marked with *, indicating they should be transformed.
            if '*' in entry:
                transfrm_list.append(entry.replace('*', '')) # add entry to transformation list if it has *.
                noise_regressors[idx] = entry.replace('*', '')

        confounds = df_cf[noise_regressors]
        transfrmd_cnfds = df_cf[transfrm_list] # for transforms
        TR_time = pd.Series(np.arange(0.0, TR*transfrmd_cnfds.shape[0], TR)) # time series for derivatives.
        if 'quad' in noise_transforms:
            quad = np.square(transfrmd_cnfds)
            confounds = confounds.join(quad, rsuffix='_quad')
        if 'tderiv' in noise_transforms:
            tderiv = pd.DataFrame(pd.Series(np.gradient(transfrmd_cnfds[col]), TR_time)
                                  for col in transfrmd_cnfds).T
            tderiv.columns = transfrmd_cnfds.columns
            tderiv.index = confounds.index
            confounds = confounds.join(tderiv, rsuffix='_tderiv')
        if 'quadtderiv' in noise_transforms:
            quadtderiv = np.square(tderiv)
            confounds = confounds.join(quadtderiv, rsuffix='_quadtderiv')
        if options['remove_steadystateoutlier']:
            if not df_cf[df_cf.columns[df_cf.columns.to_series().str.contains('^non_steady_state_outlier')]].empty:
                confounds = confounds.join(df_cf[df_cf.columns[df_cf.columns.to_series().str.contains('^non_steady_state_outlier')]])
            elif not df_cf[df_cf.columns[df_cf.columns.to_series().str.contains('^NonSteadyStateOutlier')]].empty:
                confounds = confounds.join(df_cf[df_cf.columns[df_cf.columns.to_series().str.contains('^NonSteadyStateOutlier')]]) # old syntax
        if options['ICA_AROMA']:
            if not df_cf[df_cf.columns[df_cf.columns.to_series().str.contains('^aroma_motion')]].empty:
                confounds = confounds.join(df_cf[df_cf.columns[df_cf.columns.to_series().str.contains('^aroma_motion')]])
            elif not df_cf[df_cf.columns[df_cf.columns.to_series().str.contains('^AROMAAggrComp')]].empty:
                confounds = confounds.join(df_cf[df_cf.columns[df_cf.columns.to_series().str.contains('^AROMAAggrComp')]]) # old syntax
        confounds = Design_Matrix(confounds, sampling_freq=1/TR)
        if isinstance(options['poly_trend'], int):
            confounds = confounds.add_poly(order = options['poly_trend']) # these do not play nice with high pass filters.
        if isinstance(options['dct_basis'], int):
            confounds = confounds.add_dct_basis(duration=options['dct_basis']) # these do not play nice with high pass filters.
        return confounds

    get_confounds = pe.Node(Function(input_names=['confound_file', 'noise_transforms',
                                                  'noise_regressors', 'TR', 'options'],
                                 output_names=['confounds'],
                                  function=get_terms),
                         name='get_confounds')
    # get_confounds.inputs.confound_file =  # From get_confile
    # get_confounds.inputs.noise_transforms =  # From inputspec
    # get_confounds.inputs.noise_regressors =  # From inputspec
    # get_confounds.inputs.TR =  # From inputspec
    get_confounds.inputs.options = options

    ################## Create bunch to run FSL first level model.
    def get_subj_info(task_file, design_col, confounds, conditions):
        '''
        Makes a Bunch, giving all necessary data about conditions, onsets, and durations to
            FSL first level model. Needs a task file to run.
        Inputs:
            task file [string], path to the subject events.tsv file, as per BIDS format.
            design_col [string], column name within task file, identifying event conditions to model.
            confounds [pandas dataframe], pd.df of confounds, gathered from get_confounds node.
            conditions [list],
                e.g. ['condition1',
                      'condition2',
                     ['condition1', 'parametric1', 'no_cent', 'no_norm'],
                     ['condition2', 'paramatric2', 'cent', 'norm']]
                     each string entry (e.g. 'condition1') specifies a event condition in the design_col column.
                     each list entry includes 4 strings:
                         entry 1 is a condition within the design_col column
                         entry 2 is a column in the events folder, which will be used for parametric weightings.
                         entry 3 is either 'no_cent', or 'cent', indicating whether to center the parametric variable.
                         entry 4 is either 'no_norm', or 'norm', indicating whether to normalize the parametric variable.
                 Onsets and durations will be taken from corresponding values for entry 1
                 parametric weighting specified by entry 2, scaled/centered as specified, then
                appended to the design matrix.
        '''
        from nipype.interfaces.base import Bunch
        import pandas as pd
        import numpy as np
        from sklearn.preprocessing import scale

        onsets = []
        durations = []
        amplitudes = []
        df = pd.read_csv(task_file, sep='\t', parse_dates=False)
        for idx, cond in enumerate(conditions):
            if isinstance(cond, list):
                if cond[2] == 'no_cent': # determine whether to center/scale
                    c = False
                elif cond[2] == 'cent':
                    c = True
                if cond[3] == 'no_norm':
                    n = False
                elif cond[3] == 'norm':
                    n = True
                # grab parametric terms.
                onsets.append(list(df[df[design_col] == cond[0]].onset))
                durations.append(list(df[df[design_col] == cond[0]].duration))
                amp_temp = list(scale(df[df[design_col] == cond[0]][cond[1]].tolist(),
                                   with_mean=c, with_std=n)) # scale
                amp_temp = pd.Series(amp_temp, dtype=object).fillna(0).tolist() # fill na
                amplitudes.append(amp_temp) # append
                conditions[idx] = cond[0]+'_'+cond[1] # combine condition/parametric names and replace.
            elif isinstance(cond, str):
                onsets.append(list(df[df[design_col] == cond].onset))
                durations.append(list(df[df[design_col] == cond].duration))
                # dummy code 1's for non-parametric conditions.
                amplitudes.append(list(np.repeat(1, len(df[df[design_col] == cond].onset))))
            else:
                print('cannot identify condition:', cond)
        #             return None
        output = Bunch(conditions= conditions,
                           onsets=onsets,
                           durations=durations,
                           amplitudes=amplitudes,
                           tmod=None,
                           pmod=None,
                           regressor_names=confounds.columns.values,
                           regressors=confounds.T.values.tolist()) # movement regressors added here. List of lists.
        return output

    make_bunch = pe.Node(Function(input_names=['task_file', 'design_col', 'confounds', 'conditions'],
                                 output_names=['subject_info'],
                                  function=get_subj_info),
                         name='make_bunch')
    # make_bunch.inputs.task_file =  # From get_task
    # make_bunch.inputs.confounds =  # From get_confounds
    # make_bunch.inputs.design_col =  # From inputspec
    # make_bunch.inputs.conditions =  # From inputspec

    def mk_outdir(output_dir, options, proj_name):
        import os
        from time import gmtime, strftime
        prefix = proj_name
        if options['smooth']:
            new_out_dir = os.path.join(output_dir, prefix, 'smooth')
        else:
            new_out_dir = os.path.join(output_dir, prefix, 'nosmooth')
        if not os.path.isdir(new_out_dir):
            os.makedirs(new_out_dir)
        return new_out_dir

    make_outdir = pe.Node(Function(input_names=['output_dir', 'options', 'proj_name'],
                                   output_names=['new_out_dir'],
                                   function=mk_outdir),
                          name='make_outdir')
    # make_outdir.inputs.proj_name = from inputspec
    # make_outdir.inputs.output_dir = from inputspec
    make_outdir.inputs.options = options


    ################## Mask functional data.
    from jtnipyutil.util import mask_img
    maskBold = pe.Node(Function(input_names=['img_file', 'mask_file'],
                                output_names=['out_file'],
                                function=mask_img),
                      name='maskBold')
    # maskBold.inputs.img_file # From get_bold, or smooth_wf
    # maskBold.inputs.mask_file # From get_mask

    ################## Despike
    from nipype.interfaces.afni import Despike
    despike = pe.Node(Despike(),
                      name='despike')
    # despike.inputs.in_file = # From Mask
    despike.inputs.outputtype = 'NIFTI_GZ'

    from nipype.workflows.fmri.fsl.preprocess import create_susan_smooth
    smooth_wf = create_susan_smooth()
    # smooth_wf.inputs.inputnode.in_files = # from maskBold
    # smooth_wf.inputs.inputnode.fwhm = # from inputspec

    ################## Model Generation.
    import nipype.algorithms.modelgen as model
    specify_model = pe.Node(interface=model.SpecifyModel(), name='specify_model')
    specify_model.inputs.input_units = 'secs'
    # specify_model.functional_runs # From maskBold, despike, or smooth_wf
    # specify_model.subject_info # From make_bunch.outputs.subject_info
    # specify_model.high_pass_filter_cutoff # From inputspec
    # specify_model.time_repetition # From inputspec

    ################## Estimate workflow
    from nipype.workflows.fmri.fsl import estimate # fsl workflow
    modelfit = estimate.create_modelfit_workflow()
    modelfit.base_dir = '.'
    # modelfit.inputs.inputspec.session_info = # From specify_model
    # modelfit.inputs.inputspec.functional_data = # from maskBold
    # modelfit.inputs.inputspec.interscan_interval = # From inputspec
    # modelfit.inputs.inputspec.film_threshold = # From inputspec
    # modelfit.inputs.inputspec.bases = # From inputspec
    # modelfit.inputs.inputspec.model_serial_correlations = # From inputspec
    # modelfit.inputs.inputspec.contrasts = # From inputspec

    if not options['run_contrasts']: # drop contrast part of modelfit if contrasts aren't required.
        modelestimate = modelfit.get_node('modelestimate')
        merge_contrasts = modelfit.get_node('merge_contrasts')
        ztop = modelfit.get_node('ztop')
        outputspec = modelfit.get_node('outputspec')
        modelfit.disconnect([(modelestimate, merge_contrasts, [('zstats', 'in1'),
                                                             ('zfstats', 'in2')]),
                             (merge_contrasts, ztop, [('out', 'in_file')]),
                             (merge_contrasts, outputspec, [('out', 'zfiles')]),
                             (ztop, outputspec, [('out_file', 'pfiles')])
                            ])
        modelfit.remove_nodes([merge_contrasts, ztop])

    ################## DataSink
    from nipype.interfaces.io import DataSink
    import os.path
    sinker = pe.Node(DataSink(), name='sinker')
    # sinker.inputs.substitutions = # From inputspec
    # sinker.inputs.base_directory = # frm make_outdir

    def negate(input):
        return not input

    def unlist(input):
        return input[0]

    lvl1pipe_wf.connect([
        # grab subject/run info
        (inputspec, get_bold, [('subject_id', 'subj_id'),
                                ('bold_template', 'template')]),
        (inputspec, get_mask, [('subject_id', 'subj_id'),
                                ('mask_template', 'template')]),
        (inputspec, get_task, [('subject_id', 'subj_id'),
                                ('task_template', 'template')]),
        (inputspec, get_confile, [('subject_id', 'subj_id'),
                                ('confound_template', 'template')]),
        (inputspec, get_confounds, [('noise_transforms', 'noise_transforms'),
                                     ('noise_regressors', 'noise_regressors'),
                                     ('TR', 'TR')]),
        (inputspec, make_bunch, [('design_col', 'design_col'),
                                  ('conditions', 'conditions')]),
        (inputspec, make_outdir, [('output_dir', 'output_dir'),
                                  ('proj_name', 'proj_name')]),
        (inputspec, specify_model, [('hpf_cutoff', 'high_pass_filter_cutoff'),
                                     ('TR', 'time_repetition')]),
        (inputspec, modelfit, [('TR', 'inputspec.interscan_interval'),
                                ('FILM_threshold', 'inputspec.film_threshold'),
                                ('bases', 'inputspec.bases'),
                                ('model_serial_correlations', 'inputspec.model_serial_correlations'),
                                (('model_serial_correlations', negate), 'modelestimate.autocorr_noestimate'),
                                ('contrasts', 'inputspec.contrasts')]),
        (get_confile, get_confounds, [('out_file', 'confound_file')]),
        (get_confounds, make_bunch, [('confounds', 'confounds')]),
        (get_task, make_bunch, [('out_file', 'task_file')]),
        (make_bunch, specify_model, [('subject_info', 'subject_info')]),
        (get_mask, maskBold, [('out_file', 'mask_file')]),
        ])

    if options['censoring'] == 'despike':
        lvl1pipe_wf.connect([
            (get_bold, despike, [('out_file', 'in_file')])
            ])
        if options['smooth']:
            lvl1pipe_wf.connect([
                (inputspec, smooth_wf, [('fwhm', 'inputnode.fwhm')]),
                (inputspec, get_gmmask, [('subject_id', 'subj_id'),
                                        ('smooth_gm_mask_template', 'template')]),
                (get_gmmask, mod_gmmask, [('out_file', 'in_file')]),
                (inputspec, mod_gmmask, [('gmmask_args', 'args')]),
                (mod_gmmask, fit_mask, [('out_file', 'mask_file')]),
                (get_bold, fit_mask, [('out_file', 'ref_file')]),
                (fit_mask, smooth_wf, [('out_mask', 'inputnode.mask_file')]),
                (fit_mask, sinker, [('out_mask', 'smoothing_mask')]),
                (despike, smooth_wf, [('out_file', 'inputnode.in_files')]),
                (smooth_wf, maskBold, [(('outputnode.smoothed_files', unlist), 'img_file')]),
                (maskBold, specify_model, [('out_file', 'functional_runs')]),
                (maskBold, modelfit, [('out_file', 'inputspec.functional_data')])
                ])
        else:
            lvl1pipe_wf.connect([
                (despike, specify_model, [('out_file', 'functional_runs')]),
                (despike, modelfit, [('out_file', 'inputspec.functional_data')]),
                (despike, sinker, [('out_file', 'despike')])
                ])
    else:
        if options['smooth']:
            lvl1pipe_wf.connect([
                (inputspec, smooth_wf, [('fwhm', 'inputnode.fwhm')]),
                (inputspec, get_gmmask, [('subject_id', 'subj_id'),
                                        ('smooth_gm_mask_template', 'template')]),
                (get_gmmask, mod_gmmask, [('out_file', 'in_file')]),
                (inputspec, mod_gmmask, [('gmmask_args', 'args')]),
                (mod_gmmask, fit_mask, [('out_file', 'mask_file')]),
                (get_bold, fit_mask, [('out_file', 'ref_file')]),
                (fit_mask, smooth_wf, [('out_mask', 'inputnode.mask_file')]),
                (fit_mask, sinker, [('out_mask', 'smoothing_mask')]),
                (get_bold, smooth_wf, [('out_file', 'inputnode.in_files')]),
                (smooth_wf, maskBold, [(('outputnode.smoothed_files', unlist), 'img_file')]),
                (maskBold, specify_model, [('out_file', 'functional_runs')]),
                (maskBold, modelfit, [('out_file', 'inputspec.functional_data')])
                ])
        else:
            lvl1pipe_wf.connect([
                (get_bold, maskBold, [('out_file', 'img_file')]),
                (maskBold, specify_model, [('out_file', 'functional_runs')]),
                (maskBold, modelfit, [('out_file', 'inputspec.functional_data')])
                ])

    lvl1pipe_wf.connect([
        (specify_model, modelfit, [('session_info', 'inputspec.session_info')]),
        (inputspec, sinker, [('subject_id','container'),
                              ('sinker_subs', 'substitutions')]), # creates folder for each subject.
        (make_outdir, sinker, [('new_out_dir', 'base_directory')]),
        (modelfit, sinker, [('outputspec.parameter_estimates', 'model'),
                            ('outputspec.dof_file','model.@dof'), #.@ puts this in the model folder.
                            ('outputspec.copes','model.@copes'),
                            ('outputspec.varcopes','model.@varcopes'),
                            ('outputspec.zfiles','stats'),
                            ('outputspec.pfiles', 'stats.@pfiles'),
                            ('level1design.ev_files', 'design'),
                            ('level1design.fsf_files', 'design.@fsf'),
                            ('modelgen.con_file', 'design.@confile'),
                            ('modelgen.fcon_file', 'design.@fconfile'),
                            ('modelgen.design_cov', 'design.@covmatriximg'),
                            ('modelgen.design_image', 'design.@designimg'),
                            ('modelgen.design_file', 'design.@designfile'),
                            ('modelestimate.logfile', 'design.@log'),
                            ('modelestimate.sigmasquareds', 'model.@resid_sum'),
                            ('modelestimate.fstats', 'stats.@fstats'),
                            ('modelestimate.thresholdac', 'model.@serial_corr'),
                           ])
        ])
    if options['keep_resid']:
        lvl1pipe_wf.connect([
            (modelfit, sinker, [('modelestimate.residual4d', 'model.@resid')
                               ])
            ])
    return lvl1pipe_wf
Esempio n. 15
0
def create_preproc_func_pipeline():#ref_slice, n_skip=4, n_slices=30, tr=2.5, sparse=False):
    
#    if sparse:
#        real_tr = tr/2
#    else:
#        real_tr = tr
    
    inputnode = pe.Node(interface=util.IdentityInterface(fields=['func', "struct", "TR", "sparse"]), name="inputnode")
    
    pre_ica = pe.Node(interface=fsl.MELODIC(), name="pre_ica")
    pre_ica.inputs.no_mask = True
    pre_ica.inputs.out_all = True
    pre_ica.inputs.report = True

    skip = pe.Node(interface=fsl.ExtractROI(), name="skip")
    skip.inputs.t_min = 4 #TODO
    skip.inputs.t_size = 100000
    
    realign = pe.Node(interface=spm.Realign(), name="realign")
    realign.inputs.register_to_mean = True
    
    tr_convert = pe.Node(interface=util.Function(input_names=['tr', 'sparse'], 
                                                 output_names=['tr'], 
                                                 function=get_tr), name="tr_converter")
    ta = pe.Node(interface=util.Function(input_names=['real_tr', 'n_slices'], 
                                                 output_names=['ta'], 
                                                 function=get_ta), name="ta")
    
    slice_timing = pe.Node(interface=spm.SliceTiming(), name="slice_timing")
    #slice_timing.inputs.num_slices = n_slices
    #slice_timing.inputs.time_repetition = real_tr
    #slice_timing.inputs.time_acquisition = real_tr - real_tr/float(n_slices)
    #slice_timing.inputs.slice_order = range(1,n_slices+1,2) + range(2,n_slices+1,2)
    #slice_timing.inputs.ref_slice = ref_slice
    
    coregister = pe.Node(interface=spm.Coregister(), name="coregister")
    coregister.inputs.jobtype= "estimate"
    
    smooth = pe.Node(interface=spm.Smooth(), name="smooth")
    smooth.inputs.fwhm = [8, 8, 8]
    susan_smooth = create_susan_smooth(name="susan_smooth")
    susan_smooth.inputs.inputnode.fwhm = 8
    
    art = pe.Node(interface=ra.ArtifactDetect(), name="art")
    art.inputs.use_differences      = [True,False]
    art.inputs.use_norm             = True
    art.inputs.norm_threshold       = 1
    art.inputs.zintensity_threshold = 3
    art.inputs.mask_type            = 'file'
    art.inputs.parameter_source     = 'SPM'
    art.inputs.save_plot            = True
    art.inputs.plot_type            = 'pdf'
    
    compute_mask = pe.Node(interface=ComputeMask(), name="compute_mask")
    
    post_ica = pe.Node(interface=fsl.MELODIC(), name="post_ica")
    post_ica.inputs.out_all = True
    post_ica.inputs.report = True
    
    preproc_func = pe.Workflow(name="preproc_func")
    preproc_func.connect([
                          (inputnode,skip, [("func", "in_file")]),
#                          (inputnode, pre_ica, [("func", "in_files"),
#                                                ("TR", "tr_sec")]),
                          
                          (coregister, compute_mask, [('coregistered_source','mean_volume')]),
                          (skip, slice_timing, [("roi_file", "in_files"),
                                                      (('roi_file', get_n_slices), "num_slices"),
                                                      (('roi_file', get_slice_order), "slice_order"),
                                                      (('roi_file', get_ref_slice), "ref_slice")
                                                      ]),
                          (inputnode, tr_convert, [("sparse", "sparse"),
                                                   ("TR", "tr")]),
                          (tr_convert, slice_timing, [("tr", "time_repetition")]),
                          
                          (tr_convert, ta, [("tr", "real_tr")]),
                          (skip, ta, [(('roi_file', get_n_slices), "n_slices")]),
                          
                          (ta, slice_timing, [("ta", "time_acquisition")]),     
                          (slice_timing, realign, [("timecorrected_files", "in_files")]),
#                          (skip, realign, [("roi_file", "in_files")]),
                          
                          (inputnode, coregister, [("struct", "target")]),
                          (realign, coregister,[('mean_image', 'source'),
                                                ('realigned_files','apply_to_files')]),
                          
                          (coregister, susan_smooth, [("coregistered_files","inputnode.in_files")]),
                          (compute_mask, susan_smooth,[('brain_mask','inputnode.mask_file')]),
                          
#                          (susan_smooth, post_ica, [("outputnode.smoothed_files", "in_files")]),
#                          (inputnode, post_ica, [("TR", "tr_sec")]),
#                          (compute_mask,post_ica,[('brain_mask','mask')]),
                          
#                          (coregister, smooth, [("coregistered_files","in_files")]),
                          
                          (compute_mask,art,[('brain_mask','mask_file')]),
                          (realign,art,[('realignment_parameters','realignment_parameters')]),
                          (realign,art,[('realigned_files','realigned_files')]),
                          ])
    
    outputnode = pe.Node(interface=util.IdentityInterface(fields=['preproced_files', 'realignment_parameters', 'outlier_files', 'mask_file']), name="outputnode")
    
    preproc_func.connect(realign, 'realignment_parameters', outputnode, 'realignment_parameters')
    preproc_func.connect(susan_smooth, 'outputnode.smoothed_files', outputnode, 'preproced_files')
    preproc_func.connect(art, 'outlier_files', outputnode, 'outlier_files')
    preproc_func.connect(compute_mask, 'brain_mask', outputnode, 'mask_file')
    
    return preproc_func
def create_workflow(undist):
    featpreproc = pe.Workflow(name="featpreproc")

    featpreproc.base_dir = os.path.join(ds_root, 'workingdirs')

    # ===================================================================
    #                  _____                   _
    #                 |_   _|                 | |
    #                   | |  _ __  _ __  _   _| |_
    #                   | | | '_ \| '_ \| | | | __|
    #                  _| |_| | | | |_) | |_| | |_
    #                 |_____|_| |_| .__/ \__,_|\__|
    #                             | |
    #                             |_|
    # ===================================================================

    # ------------------ Specify variables
    inputnode = pe.Node(
        niu.IdentityInterface(fields=[
            'funcs',
            'subject_id',
            'session_id',
            'refsubject_id',
            'fwhm',  # smoothing
            'highpass'
        ]),
        name="inputspec")

    if undist:
        ud_flag = '_undist_PLUS'
    else:
        ud_flag = ''

    # SelectFiles
    templates = {
        # EPI ========
        'ref_func':
        'reference-vols/sub-{refsubject_id}/func/'
        'sub-{subject_id}_ref_func_res-1x1x1' + ud_flag + '.nii.gz',
        'ref_funcmask':
        'reference-vols/sub-{refsubject_id}/func/'
        'sub-{subject_id}_ref_func_mask_res-1x1x1.nii.gz',

        # T1 ========
        # 0.5 mm iso ---
        'ref_t1':
        'reference-vols/sub-{refsubject_id}/anat/'
        'sub-{subject_id}_ref_anat_res-0.5x0.5x0.5.nii.gz',
        'ref_t1mask':
        'reference-vols/sub-{refsubject_id}/anat/'
        'sub-{subject_id}_ref_anat_mask_res-0.5x0.5x0.5.nii.gz',
    }

    inputfiles = pe.Node(nio.SelectFiles(templates, base_directory=data_dir),
                         name="input_files")

    featpreproc.connect([(inputnode, inputfiles, [
        ('subject_id', 'subject_id'),
        ('session_id', 'session_id'),
        ('refsubject_id', 'refsubject_id'),
    ])])

    # ===================================================================
    #                   ____        _               _
    #                  / __ \      | |             | |
    #                 | |  | |_   _| |_ _ __  _   _| |_
    #                 | |  | | | | | __| '_ \| | | | __|
    #                 | |__| | |_| | |_| |_) | |_| | |_
    #                  \____/ \__,_|\__| .__/ \__,_|\__|
    #                                  | |
    #                                  |_|
    # ===================================================================

    # Datasink
    outputfiles = pe.Node(nio.DataSink(base_directory=ds_root,
                                       container='derivatives/featpreproc',
                                       parameterization=True),
                          name="output_files")

    # Use the following DataSink output substitutions
    # each tuple is only matched once per file
    outputfiles.inputs.substitutions = [
        ('/_mc_method_afni3dAllinSlices/', '/'),
        ('/_mc_method_afni3dAllinSlices/', '/'),  # needs to appear twice
        ('/oned_file/', '/'),
        ('/out_file/', '/'),
        ('/oned_matrix_save/', '/'),
        ('refsubject_id_', 'ref-'),
        ('subject_id_', 'sub-'),
        ('session_id_', 'ses-'),
    ]
    # Put result into a BIDS-like format
    outputfiles.inputs.regexp_substitutions = [
        (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1'),
        (r'/_addmean[0-9]+/', r'/func/'),
        (r'/_funcbrains[0-9]+/', r'/func/'),
        (r'/_maskfunc[0-9]+/', r'/func/'),
        (r'/_mc[0-9]+/', r'/func/'),
        (r'/_meanfunc[0-9]+/', r'/func/'),
        (r'/_outliers[0-9]+/', r'/func/'),
        (r'_ref-([a-zA-Z0-9]+)_run_id_[0-9][0-9]', r''),
    ]
    outputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'motion_parameters',
        'motion_corrected',
        'motion_plots',
        'motion_outlier_files',
        'mask',
        'smoothed_files',
        'highpassed_files',
        'mean',
        'func_unwarp',
        'ref_func',
        'ref_funcmask',
        'ref_t1',
        'ref_t1mask',
    ]),
                         name='outputspec')

    # ===================================================================
    #                  _____ _            _ _
    #                 |  __ (_)          | (_)
    #                 | |__) | _ __   ___| |_ _ __   ___
    #                 |  ___/ | '_ \ / _ \ | | '_ \ / _ \
    #                 | |   | | |_) |  __/ | | | | |  __/
    #                 |_|   |_| .__/ \___|_|_|_| |_|\___|
    #                         | |
    #                         |_|
    # ===================================================================

    #  ~|~ _ _  _  _ |` _  _ _ _    _ _  _  _|  _
    #   | | (_|| |_\~|~(_)| | | |  | | |(_|_\|<_\
    #
    # Transform manual skull-stripped masks to multiple images
    # --------------------------------------------------------
    # should just be used as input to motion correction,
    # after mc, all functionals should be aligned to reference
    transmanmask_mc = transform_manualmask.create_workflow()

    # - - - - - - Connections - - - - - - -
    featpreproc.connect([(inputfiles, transmanmask_mc, [
        ('subject_id', 'in.subject_id'),
        ('session_id', 'in.session_id'),
        ('refsubject_id', 'in.refsubject_id'),
    ])])

    #featpreproc.connect(inputfiles, 'ref_funcmask',
    #                    transmanmask_mc, 'in.manualmask')
    featpreproc.connect(inputfiles, 'ref_funcmask', transmanmask_mc,
                        'in.ref_funcmask')
    featpreproc.connect(inputnode, 'funcs', transmanmask_mc, 'in.funcs')

    featpreproc.connect(inputfiles, 'ref_func', transmanmask_mc, 'in.ref_func')

    #  |\/| _ _|_. _  _    _ _  _ _ _  __|_. _  _
    #  |  |(_) | |(_)| |  (_(_)| | (/_(_ | |(_)| |
    #
    # Perform motion correction, using some pipeline
    # --------------------------------------------------------

    # Register an image from the functionals to the reference image
    median_func = pe.MapNode(
        interface=fsl.maths.MedianImage(dimension="T"),
        name='median_func',
        iterfield=('in_file'),
    )
    #     pre_mc = motioncorrection_workflow.create_workflow_allin_slices(
    #         name='premotioncorrection')

    pre_mc = create_workflow_allin_slices(name='premotioncorrection')

    featpreproc.connect([
        (inputnode, median_func, [
            ('funcs', 'in_file'),
        ]),
        (median_func, pre_mc, [
            ('out_file', 'in.funcs'),
        ]),
        (
            inputfiles,
            pre_mc,
            [
                # median func image will be used a reference / base
                ('ref_func', 'in.ref_func'),
                ('ref_funcmask', 'in.ref_func_weights'),
            ]),
        (
            transmanmask_mc,
            pre_mc,
            [
                ('funcreg.out_file', 'in.funcs_masks'
                 ),  # use mask as weights >>>> are we sure this is correct?
            ]),
        (pre_mc, outputnode, [
            ('mc.out_file', 'pre_motion_corrected'),
            ('mc.oned_file', 'pre_motion_parameters.oned_file'),
            ('mc.oned_matrix_save', 'pre_motion_parameters.oned_matrix_save'),
        ]),
        (
            outputnode,
            outputfiles,
            [
                ('pre_motion_corrected', 'pre_motion_corrected.out_file'),
                ('pre_motion_parameters.oned_file',
                 'pre_motion_corrected.oned_file'),
                # warp parameters in ASCII (.1D)
                ('pre_motion_parameters.oned_matrix_save',
                 'pre_motion_corrected.oned_matrix_save'),
                # transformation matrices for each sub-brick
            ]),
    ])

    #     mc = motioncorrection_workflow.create_workflow_allin_slices(
    #         name='motioncorrection',
    #         iterfield=('in_file', 'ref_file', 'in_weight_file'))

    mc = create_workflow_allin_slices(name='motioncorrection',
                                      iterfield=('in_file', 'ref_file',
                                                 'in_weight_file'))

    # - - - - - - Connections - - - - - - -
    featpreproc.connect([
        (inputnode, mc, [
            ('funcs', 'in.funcs'),
        ]),
        (
            pre_mc,
            mc,
            [
                # the median image realigned to the reference functional
                # will serve as reference. This way motion correction is
                #  done to an image more similar to the functionals
                ('mc.out_file', 'in.ref_func'),
            ]),
        (
            inputfiles,
            mc,
            [
                # Check and make sure the ref func mask is close enough
                # to the registered median image.
                ('ref_funcmask', 'in.ref_func_weights'),
            ]),
        (
            transmanmask_mc,
            mc,
            [
                ('funcreg.out_file', 'in.funcs_masks'),  # use mask as weights
            ]),
        (mc, outputnode, [
            ('mc.out_file', 'motion_corrected'),
            ('mc.oned_file', 'motion_parameters.oned_file'),
            ('mc.oned_matrix_save', 'motion_parameters.oned_matrix_save'),
        ]),
        (
            outputnode,
            outputfiles,
            [
                ('motion_corrected', 'motion_corrected.out_file'),
                ('motion_parameters.oned_file', 'motion_corrected.oned_file'),
                # warp parameters in ASCII (.1D)
                ('motion_parameters.oned_matrix_save',
                 'motion_corrected.oned_matrix_save'),
                # transformation matrices for each sub-brick
            ]),
    ])

    #  |~. _ | _| _ _  _  _    _ _  _ _ _  __|_. _  _
    #  |~|(/_|(_|| | |(_||_)  (_(_)| | (/_(_ | |(_)| |
    #                    |
    # Unwarp EPI distortions
    # --------------------------------------------------------

    # Performing motion correction to a reference that is undistorted,
    # So b0_unwarp is currently not needed or used.

    # we have moved this to a separate workflow and use the blip-up/down
    # method now (reverse phase-encoding directions). This has also been
    # done for the new reference images.

    featpreproc.connect([
        (inputfiles, outputfiles, [
            ('ref_func', 'reference/func'),
            ('ref_funcmask', 'reference/func_mask'),
        ]),
        (inputfiles, outputnode, [
            ('ref_func', 'ref_func'),
            ('ref_funcmask', 'ref_funcmask'),
        ]),
    ])

    #  |\/| _ _|_. _  _    _   _|_|. _  _ _
    #  |  |(_) | |(_)| |  (_)|_|| ||(/_| _\
    #
    # --------------------------------------------------------

    # Apply brain masks to functionals
    # --------------------------------------------------------

    # Dilate mask
    dilatemask = pe.Node(interface=fsl.ImageMaths(suffix='_dil',
                                                  op_string='-dilF'),
                         name='dilatemask')
    featpreproc.connect(inputfiles, 'ref_funcmask', dilatemask, 'in_file')
    featpreproc.connect(dilatemask, 'out_file', outputfiles, 'dilate_mask')

    funcbrains = pe.MapNode(fsl.BinaryMaths(operation='mul'),
                            iterfield=('in_file', 'operand_file'),
                            name='funcbrains')

    featpreproc.connect([
        (mc, funcbrains, [
            ('mc.out_file', 'in_file'),
        ]),
        (dilatemask, funcbrains, [
            ('out_file', 'operand_file'),
        ]),
        (funcbrains, outputfiles, [
            ('out_file', 'funcbrains'),
        ]),
    ])
    # Detect motion outliers
    # --------------------------------------------------------

    import nipype.algorithms.rapidart as ra
    outliers = pe.MapNode(
        ra.ArtifactDetect(
            mask_type='file',
            use_norm=True,
            norm_threshold=10.0,  # combines translations in mm and rotations
            zintensity_threshold=3.0,  # z-score
            parameter_source='AFNI',
            save_plot=True),
        iterfield=('realigned_files', 'realignment_parameters', 'mask_file'),
        name='outliers')

    featpreproc.connect([
        (
            mc,
            outliers,
            [  # ('mc.par_file', 'realignment_parameters'),
                ('mc.oned_file', 'realignment_parameters'),
            ]),
        (funcbrains, outliers, [
            ('out_file', 'realigned_files'),
        ]),
        (dilatemask, outliers, [
            ('out_file', 'mask_file'),
        ]),
        (
            outliers,
            outputfiles,
            [
                ('outlier_files', 'motion_outliers.@outlier_files'),
                ('plot_files', 'motion_outliers.@plot_files'),
                ('displacement_files', 'motion_outliers.@displacement_files'),
                ('intensity_files', 'motion_outliers.@intensity_files'),
                ('mask_files', 'motion_outliers.@mask_files'),
                ('statistic_files', 'motion_outliers.@statistic_files'),
                # ('norm_files', 'outliers.@norm_files'),
            ]),
        (mc, outputnode, [
            ('mc.oned_file', 'motion_parameters'),
        ]),
        (
            outliers,
            outputnode,
            [
                ('outlier_files', 'motion_outlier_files'),
                ('plot_files', 'motion_plots.@plot_files'),
                ('displacement_files', 'motion_outliers.@displacement_files'),
                ('intensity_files', 'motion_outliers.@intensity_files'),
                ('mask_files', 'motion_outliers.@mask_files'),
                ('statistic_files', 'motion_outliers.@statistic_files'),
                # ('norm_files', 'outliers.@norm_files'),
            ])
    ])
    """
    Determine the 2nd and 98th percentile intensities of each functional run
    """
    getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 2 -p 98'),
                           iterfield=['in_file'],
                           name='getthreshold')

    featpreproc.connect(mc, 'mc.out_file', getthresh, 'in_file')
    """
    Threshold the first run of functional data at 10% of the 98th percentile
    """

    threshold = pe.MapNode(interface=fsl.ImageMaths(out_data_type='char',
                                                    suffix='_thresh'),
                           iterfield=['in_file', 'op_string'],
                           name='threshold')

    featpreproc.connect(mc, 'mc.out_file', threshold, 'in_file')
    """
    Define a function to get 10% of the intensity
    """
    def getthreshop(thresh):
        return ['-thr %.10f -Tmin -bin' % (0.1 * val[1]) for val in thresh]

    featpreproc.connect(getthresh, ('out_stat', getthreshop), threshold,
                        'op_string')
    """
    Determine the median value of the functional runs using the mask
    """
    medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield=['in_file', 'mask_file'],
                           name='medianval')

    featpreproc.connect(mc, 'mc.out_file', medianval, 'in_file')
    featpreproc.connect(threshold, 'out_file', medianval, 'mask_file')

    # (~ _  _ _|_. _ |  (~ _ _  _  _ _|_|_ . _  _
    # _)|_)(_| | |(_||  _)| | |(_)(_) | | ||| |(_|
    #   |                                       _|
    # Spatial smoothing (SUSAN)
    # --------------------------------------------------------

    # create_susan_smooth takes care of calculating the mean and median
    #   functional, applying mask to functional, and running the smoothing
    smooth = create_susan_smooth(separate_masks=False)

    featpreproc.connect(inputnode, 'fwhm', smooth, 'inputnode.fwhm')

    featpreproc.connect(mc, 'mc.out_file', smooth, 'inputnode.in_files')

    featpreproc.connect(dilatemask, 'out_file', smooth, 'inputnode.mask_file')

    # -------------------------------------------------------
    # The below is from workflows/fmri/fsl/preprocess.py
    """
    Mask the smoothed data with the dilated mask
    """

    maskfunc3 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                           iterfield=['in_file', 'in_file2'],
                           name='maskfunc3')
    featpreproc.connect(smooth, 'outputnode.smoothed_files', maskfunc3,
                        'in_file')

    featpreproc.connect(dilatemask, 'out_file', maskfunc3, 'in_file2')

    concatnode = pe.Node(interface=util.Merge(2), name='concat')

    tolist = lambda x: [x]

    def chooseindex(fwhm):
        if fwhm < 1:
            return [0]
        else:
            return [1]

    # maskfunc2 is the functional data before SUSAN
    featpreproc.connect(mc, ('mc.out_file', tolist), concatnode, 'in1')

    # maskfunc3 is the functional data after SUSAN
    featpreproc.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2')
    """
    The following nodes select smooth or unsmoothed data depending on the
    fwhm. This is because SUSAN defaults to smoothing the data with about the
    voxel size of the input data if the fwhm parameter is less than 1/3 of the
    voxel size.
    """
    selectnode = pe.Node(interface=util.Select(), name='select')

    featpreproc.connect(concatnode, 'out', selectnode, 'inlist')

    featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index')
    featpreproc.connect(selectnode, 'out', outputfiles, 'smoothed_files')
    """
    Scale the median value of the run is set to 10000.
    """

    meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'),
                           iterfield=['in_file', 'op_string'],
                           name='meanscale')
    featpreproc.connect(selectnode, 'out', meanscale, 'in_file')
    """
    Define a function to get the scaling factor for intensity normalization
    """

    featpreproc.connect(medianval, ('out_stat', getmeanscale), meanscale,
                        'op_string')

    # |_|. _ |_  _  _  _ _
    # | ||(_|| ||_)(_|_\_\
    #      _|   |
    # Temporal filtering
    # --------------------------------------------------------

    highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt'),
                          iterfield=['in_file'],
                          name='highpass')
    highpass_operand = lambda x: '-bptf %.10f -1' % x
    featpreproc.connect(inputnode, ('highpass', highpass_operand), highpass,
                        'op_string')
    featpreproc.connect(meanscale, 'out_file', highpass, 'in_file')

    version = 0
    if fsl.Info.version() and \
            LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'):
        version = 507

    if version < 507:
        featpreproc.connect(highpass, 'out_file', outputnode,
                            'highpassed_files')
    else:
        """
        Add back the mean removed by the highpass filter operation as
            of FSL 5.0.7
        """
        meanfunc4 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                        suffix='_mean'),
                               iterfield=['in_file'],
                               name='meanfunc4')

        featpreproc.connect(meanscale, 'out_file', meanfunc4, 'in_file')
        addmean = pe.MapNode(interface=fsl.BinaryMaths(operation='add'),
                             iterfield=['in_file', 'operand_file'],
                             name='addmean')
        featpreproc.connect(highpass, 'out_file', addmean, 'in_file')
        featpreproc.connect(meanfunc4, 'out_file', addmean, 'operand_file')
        featpreproc.connect(addmean, 'out_file', outputnode,
                            'highpassed_files')
    """
    Generate a mean functional image from the first run
    """
    meanfunc3 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                    suffix='_mean'),
                           iterfield=['in_file'],
                           name='meanfunc3')

    featpreproc.connect(meanscale, 'out_file', meanfunc3, 'in_file')
    featpreproc.connect(meanfunc3, 'out_file', outputfiles, 'mean')

    featpreproc.connect(meanfunc3, 'out_file', outputnode, 'mean_highpassed')
    featpreproc.connect(outputnode, 'highpassed_files', outputfiles,
                        'highpassed_files')

    return (featpreproc)