Ejemplo n.º 1
0
def create_non_uniformity_correct_4D_file(auto_clip=False, clip_low=7,
                                          clip_high=200, n_procs=12):
    """non_uniformity_correct_4D_file corrects functional files for nonuniformity on a timepoint by timepoint way.
    Internally it implements a workflow to split the in_file, correct each separately and then merge them back together.
    This is an ugly workaround as we have to find the output of the workflow's datasink somewhere, but it should work.

    Parameters
    ----------
    in_file : str
        Absolute path to nifti-file.
    auto_clip : bool (default: False)
        whether to let 3dUniformize decide on clipping boundaries
    clip_low : float (default: 7),
        lower clipping bound for 3dUniformize
    clip_high : float (default: 200),
        higher clipping bound for 3dUniformize
    n_procs : int (default: 12),
        the number of processes to run the internal workflow with

    Returns
    -------
    out_file : non-uniformity corrected file
        List of absolute paths to nifti-files.    """

    # nodes
    input_node = pe.Node(IdentityInterface(
        fields=['in_file',
                'auto_clip',
                'clip_low',
                'clip_high',
                'output_directory',
                'sub_id']), name='inputspec')
    split = pe.Node(Function(input_names='in_file', output_names=['out_files'],
                             function=split_4D_to_3D), name='split')

    uniformer = pe.MapNode(
        Uniformize(clip_high=clip_high, clip_low=clip_low, auto_clip=auto_clip,
                   outputtype='NIFTI_GZ'), name='uniformer',
        iterfield=['in_file'])
    merge = pe.MapNode(fsl.Merge(dimension='t'), name='merge',
                       iterfield=['in_files'])

    datasink = pe.Node(nio.DataSink(infields=['topup'], container=''),
                       name='sinker')
    datasink.inputs.parameterization = False

    # workflow
    nuc_wf = pe.Workflow(name='nuc')
    nuc_wf.connect(input_node, 'sub_id', datasink, 'container')
    nuc_wf.connect(input_node, 'output_directory', datasink, 'base_directory')
    nuc_wf.connect(input_node, 'in_file', split, 'in_file')
    nuc_wf.connect(split, 'out_files', uniformer, 'in_file')
    nuc_wf.connect(uniformer, 'out_file', merge, 'in_files')
    nuc_wf.connect(merge, 'merged_file', datasink, 'uni')

    # nuc_wf.run('MultiProc', plugin_args={'n_procs': n_procs})
    # out_file = glob.glob(os.path.join(td, 'uni', fn_base + '_0000*.nii.gz'))[0]

    return nuc_wf
Ejemplo n.º 2
0
def vol2png(qcname, tag="", overlay=True, overlayiterated=True):
    import PUMI.func_preproc.Onevol as onevol

    QCDir = os.path.abspath(globals._SinkDir_ + "/" + globals._QCDir_)
    if not os.path.exists(QCDir):
        os.makedirs(QCDir)

    if tag:
        tag = "_" + tag

    inputspec = pe.Node(
        utility.IdentityInterface(fields=['bg_image', 'overlay_image']),
        name='inputspec')

    analysisflow = pe.Workflow(name=qcname + tag + '_qc')

    myonevol_bg = onevol.onevol_workflow(wf_name="onebg")
    analysisflow.connect(inputspec, 'bg_image', myonevol_bg, 'inputspec.func')

    if overlay and not overlayiterated:
        #myonevol_ol = onevol.onevol_workflow(wf_name="oneol")
        #analysisflow.connect(inputspec, 'overlay_image', myonevol_ol, 'inputspec.func')
        slicer = pe.MapNode(interface=fsl.Slicer(),
                            iterfield=['in_file'],
                            name='slicer')

    # Create png images for quality check
    if overlay and overlayiterated:
        myonevol_ol = onevol.onevol_workflow(wf_name="oneol")
        analysisflow.connect(inputspec, 'overlay_image', myonevol_ol,
                             'inputspec.func')
        slicer = pe.MapNode(interface=fsl.Slicer(),
                            iterfield=['in_file', 'image_edges'],
                            name='slicer')
    if not overlay:
        slicer = pe.MapNode(interface=fsl.Slicer(),
                            iterfield=['in_file'],
                            name='slicer')

    slicer.inputs.image_width = 2000
    slicer.inputs.out_file = qcname
    # set output all axial slices into one picture
    slicer.inputs.sample_axial = 5
    #slicer.inputs.middle_slices = True

    # Save outputs which are important
    ds_qc = pe.Node(interface=io.DataSink(), name='ds_qc')
    ds_qc.inputs.base_directory = QCDir
    ds_qc.inputs.regexp_substitutions = [("(\/)[^\/]*$", tag + ".ppm")]

    analysisflow.connect(myonevol_bg, 'outputspec.func1vol', slicer, 'in_file')
    if overlay and not overlayiterated:
        analysisflow.connect(inputspec, 'overlay_image', slicer, 'image_edges')
    if overlay and overlayiterated:
        analysisflow.connect(myonevol_ol, 'outputspec.func1vol', slicer,
                             'image_edges')
    analysisflow.connect(slicer, 'out_file', ds_qc, qcname)

    return analysisflow
Ejemplo n.º 3
0
def fMRI2QC(qcname, tag="", SinkDir=".", QCDIR="QC", indiv_atlas=False):
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import PUMI.plot.image as plot

    QCDir = os.path.abspath(globals._SinkDir_ + "/" + globals._QCDir_)
    if not os.path.exists(QCDir):
        os.makedirs(QCDir)

    if tag:
        tag = "_" + tag

    # Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['func', 'atlas', 'confounds']),
        name='inputspec')
    inputspec.inputs.atlas = globals._FSLDIR_ + '/data/atlases/HarvardOxford/HarvardOxford-cort-maxprob-thr25-3mm.nii.gz'

    if indiv_atlas:
        plotfmri = pe.MapNode(interface=Function(
            input_names=['func', 'atlaslabels', 'confounds', 'output_file'],
            output_names=['plotfile'],
            function=plot.plot_fmri_qc),
                              iterfield=['func', 'confounds', 'atlaslabels'],
                              name="qc_fmri")
    else:
        plotfmri = pe.MapNode(interface=Function(
            input_names=['func', 'atlaslabels', 'confounds', 'output_file'],
            output_names=['plotfile'],
            function=plot.plot_fmri_qc),
                              iterfield=['func', 'confounds'],
                              name="qc_fmri")

    plotfmri.inputs.output_file = "qc_fmri.png"
    # default atlas works only for standardized, 3mm-resoultion data

    # Save outputs which are important
    ds_qc = pe.Node(interface=io.DataSink(), name='ds_qc')
    ds_qc.inputs.base_directory = QCDir
    ds_qc.inputs.regexp_substitutions = [("(\/)[^\/]*$", tag + ".png")]

    # Create a workflow
    analysisflow = nipype.Workflow(name=qcname + tag + '_qc')

    analysisflow.connect(inputspec, 'func', plotfmri, 'func')
    analysisflow.connect(inputspec, 'atlas', plotfmri, 'atlaslabels')
    analysisflow.connect(inputspec, 'confounds', plotfmri, 'confounds')

    analysisflow.connect(plotfmri, 'plotfile', ds_qc, qcname)

    return analysisflow
Ejemplo n.º 4
0
def create_anat_noise_roi_workflow(SinkTag="func_preproc",
                                   wf_name="create_noise_roi"):
    """
    Creates an anatomical noise ROI for use with compcor

    inputs are awaited from the (BBR-based) func2anat registration
    and are already transformed to functional space

    Tamas Spisak
    2018


    """
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import PUMI.utils.globals as globals

    # Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['wm_mask', 'ventricle_mask']),
        name='inputspec')

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=['noise_roi']),
                         name='outputspec')

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)
    wf = nipype.Workflow(wf_name)

    # erode WM mask in functional space
    erode_mask = pe.MapNode(fsl.ErodeImage(),
                            iterfield=['in_file'],
                            name="erode_wm_mask")
    wf.connect(inputspec, 'wm_mask', erode_mask, 'in_file')

    # add ventricle and eroded WM masks
    add_masks = pe.MapNode(fsl.ImageMaths(op_string=' -add'),
                           iterfield=['in_file', 'in_file2'],
                           name="addimgs")

    wf.connect(inputspec, 'ventricle_mask', add_masks, 'in_file')
    wf.connect(erode_mask, 'out_file', add_masks, 'in_file2')

    wf.connect(add_masks, 'out_file', outputspec, 'noise_roi')

    return wf
Ejemplo n.º 5
0
def test_savgol_filter_node():

    sg_node = pe.MapNode(interface=Savgol_filter,
                         name='savgol_filt',
                         iterfield=['in_file'])
    sg_node.inputs.in_file = func_data
    res = sg_node.run()

    for f in res.outputs.out_file:
        assert (op.isfile(f))
Ejemplo n.º 6
0
def create_melodic_workflow(name='melodic', template=None, varnorm=True):

    input_node = pe.Node(IdentityInterface(fields=['in_file']),
                         name='inputspec')

    output_node = pe.Node(IdentityInterface(fields=['out_dir']),
                          name='outputspec')

    if template is None:
        template = op.join(op.dirname(op.dirname(op.abspath(__file__))),
                           'data', 'fsf_templates', 'melodic_template.fsf')

    melodic4fix_node = pe.MapNode(interface=Melodic4fix,
                                  iterfield=['in_file', 'out_dir'],
                                  name='melodic4fix')

    # Don't know if this works. Could also set these defaults inside the
    # melodic4fix node definition...
    melodic4fix_node.inputs.template = template
    melodic4fix_node.inputs.varnorm = varnorm

    rename_ica = pe.MapNode(Function(input_names=['in_file'],
                                     output_names=['out_file'],
                                     function=extract_task),
                            name='rename_ica',
                            iterfield=['in_file'])

    mel4fix_workflow = pe.Workflow(name=name)

    mel4fix_workflow.connect(input_node, 'in_file', melodic4fix_node,
                             'in_file')

    mel4fix_workflow.connect(input_node, 'in_file', rename_ica, 'in_file')

    mel4fix_workflow.connect(rename_ica, 'out_file', melodic4fix_node,
                             'out_dir')

    mel4fix_workflow.connect(melodic4fix_node, 'out_dir', output_node,
                             'out_dir')

    return mel4fix_workflow
Ejemplo n.º 7
0
def create_motion_confound_workflow(order=2,
                                    fd_cutoff=.2,
                                    name='motion_confound'):

    input_node = pe.Node(interface=IdentityInterface(
        fields=['par_file', 'output_directory', 'sub_id']),
                         name='inputspec')

    output_node = pe.Node(
        interface=IdentityInterface(fields=['out_fd', 'out_ext_moco']),
        name='outputspec')

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    extend_motion_parameters = pe.MapNode(Extend_motion_parameters,
                                          iterfield=['par_file'],
                                          name='extend_motion_parameters')
    extend_motion_parameters.inputs.order = order

    framewise_disp = pe.MapNode(FramewiseDisplacement(parameter_source='FSL'),
                                iterfield=['in_file'],
                                name='framewise_disp')

    mcf_wf = pe.Workflow(name=name)
    mcf_wf.connect(input_node, 'output_directory', datasink, 'base_directory')
    mcf_wf.connect(input_node, 'sub_id', datasink, 'container')
    mcf_wf.connect(input_node, 'par_file', extend_motion_parameters,
                   'par_file')
    mcf_wf.connect(input_node, 'par_file', framewise_disp, 'in_file')
    mcf_wf.connect(extend_motion_parameters, 'out_ext', output_node,
                   'out_ext_moco')
    mcf_wf.connect(framewise_disp, 'out_file', output_node, 'out_fd')
    mcf_wf.connect(extend_motion_parameters, 'out_ext', datasink, 'confounds')
    mcf_wf.connect(framewise_disp, 'out_file', datasink, 'confounds.@df')

    return mcf_wf
Ejemplo n.º 8
0
def regTimeseriesQC(qcname, tag="", SinkDir=".", QCDIR="QC"):
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import PUMI.plot.timeseries as plot

    QCDir = os.path.abspath(globals._SinkDir_ + "/" + globals._QCDir_)
    if not os.path.exists(QCDir):
        os.makedirs(QCDir)

    if tag:
        tag = "_" + tag

    # Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['timeseries', 'modules', 'atlas']),
        name='inputspec')
    inputspec.inputs.atlas = None

    plotregts = pe.MapNode(interface=Function(
        input_names=['timeseries', 'modules', 'output_file', 'atlas'],
        output_names=['plotfile'],
        function=plot.plot_carpet_ts),
                           iterfield=['timeseries'],
                           name="qc_timeseries")
    plotregts.inputs.output_file = "qc_timeseries.png"

    # Save outputs which are important
    ds_qc = pe.Node(interface=io.DataSink(), name='ds_qc')
    ds_qc.inputs.base_directory = QCDir
    ds_qc.inputs.regexp_substitutions = [("(\/)[^\/]*$", tag + ".png")]

    # Create a workflow
    analysisflow = nipype.Workflow(name=qcname + tag + '_qc')

    analysisflow.connect(inputspec, 'timeseries', plotregts, 'timeseries')
    analysisflow.connect(inputspec, 'atlas', plotregts, 'atlas')
    analysisflow.connect(inputspec, 'modules', plotregts, 'modules')
    analysisflow.connect(plotregts, 'plotfile', ds_qc, qcname)

    return analysisflow
Ejemplo n.º 9
0
def bbr_workflow(SinkTag="func_preproc", wf_name="func2anat"):
    """
        Modified version of CPAC.registration.registration:

        `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/registration/registration.html`


        BBR registration of functional image to anat.

        Workflow inputs:
            :param func: One volume of the 4D fMRI (The one which is the closest to the fieldmap recording in time should be chosen- e.g: if fieldmap was recorded after the fMRI the last volume of it should be chosen).
            :param skull: The oriented high res T1w image.
            :param anat_wm_segmentation: WM probability mask in .
            :param anat_csf_segmentation: CSF probability mask in
            :param bbr_schedule: Parameters which specifies BBR options.
            :param SinkDir:
            :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found.

        Workflow outputs:




            :return: bbreg_workflow - workflow
                func="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/s002/func_data.nii.gz",
                 skull="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/MS001/highres.nii.gz",
                 anat_wm_segmentation="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/anat_preproc/fast/fast__prob_2.nii.gz",



        Balint Kincses
        [email protected]
        2018


        """
    import os
    import nipype.pipeline as pe
    from nipype.interfaces.utility import Function
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.io as io
    import PUMI.func_preproc.Onevol as onevol
    import PUMI.utils.QC as qc
    import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Define inputs of the workflow
    inputspec = pe.Node(utility.IdentityInterface(fields=[
        'func', 'skull', 'anat_wm_segmentation', 'anat_gm_segmentation',
        'anat_csf_segmentation', 'anat_ventricle_segmentation'
    ]),
                        name='inputspec')

    myonevol = onevol.onevol_workflow()

    # trilinear interpolation is used by default in linear registration for func to anat
    linear_reg = pe.MapNode(interface=fsl.FLIRT(),
                            iterfield=['in_file', 'reference'],
                            name='linear_func_to_anat')
    linear_reg.inputs.cost = 'corratio'
    linear_reg.inputs.dof = 6
    linear_reg.inputs.out_matrix_file = "lin_mat"

    # WM probability map is thresholded and masked
    wm_bb_mask = pe.MapNode(interface=fsl.ImageMaths(),
                            iterfield=['in_file'],
                            name='wm_bb_mask')
    wm_bb_mask.inputs.op_string = '-thr 0.5 -bin'
    # CSf probability map is thresholded and masked
    csf_bb_mask = pe.MapNode(interface=fsl.ImageMaths(),
                             iterfield=['in_file'],
                             name='csf_bb_mask')
    csf_bb_mask.inputs.op_string = '-thr 0.5 -bin'

    # GM probability map is thresholded and masked
    gm_bb_mask = pe.MapNode(interface=fsl.ImageMaths(),
                            iterfield=['in_file'],
                            name='gm_bb_mask')
    gm_bb_mask.inputs.op_string = '-thr 0.1 -bin'  # liberal mask to capture all gm signal

    # ventricle probability map is thresholded and masked
    vent_bb_mask = pe.MapNode(interface=fsl.ImageMaths(),
                              iterfield=['in_file'],
                              name='vent_bb_mask')
    vent_bb_mask.inputs.op_string = '-thr 0.8 -bin -ero -dilM'  # stricter threshold and some morphology for compcor

    # add the CSF and WM masks
    #add_masks=pe.MapNode(interface=fsl.ImageMaths(),
    #                     iterfield=['in_file','in_file2'],
    #                     name='add_masks')
    #add_masks.inputs.op_string = ' -add'

    # A function is defined for define bbr argumentum which says flirt to perform bbr registration
    # for each element of the list, due to MapNode
    def bbreg_args(bbreg_target):
        return '-cost bbr -wmseg ' + bbreg_target

    bbreg_arg_convert = pe.MapNode(interface=Function(
        input_names=["bbreg_target"],
        output_names=["arg"],
        function=bbreg_args),
                                   iterfield=['bbreg_target'],
                                   name="bbr_arg_converter")

    # BBR registration within the FLIRT node
    bbreg_func_to_anat = pe.MapNode(
        interface=fsl.FLIRT(),
        iterfield=['in_file', 'reference', 'in_matrix_file', 'args'],
        name='bbreg_func_to_anat')
    bbreg_func_to_anat.inputs.dof = 6

    # calculate the inverse of the transformation matrix (of func to anat)
    convertmatrix = pe.MapNode(interface=fsl.ConvertXFM(),
                               iterfield=['in_file'],
                               name="convertmatrix")
    convertmatrix.inputs.invert_xfm = True

    # use the invers registration (anat to func) to transform anatomical csf mask
    reg_anatmask_to_func1 = pe.MapNode(
        interface=fsl.FLIRT(apply_xfm=True, interp='nearestneighbour'),
        iterfield=['in_file', 'reference', 'in_matrix_file'],
        name='anatmasks_to_func1')
    #reg_anatmask_to_func1.inputs.apply_xfm = True
    # use the invers registration (anat to func) to transform anatomical wm mask
    reg_anatmask_to_func2 = pe.MapNode(
        interface=fsl.FLIRT(apply_xfm=True, interp='nearestneighbour'),
        iterfield=['in_file', 'reference', 'in_matrix_file'],
        name='anatmasks_to_func2')
    #reg_anatmask_to_func2.inputs.apply_xfm = True

    # use the invers registration (anat to func) to transform anatomical gm mask
    reg_anatmask_to_func3 = pe.MapNode(
        interface=fsl.FLIRT(apply_xfm=True, interp='nearestneighbour'),
        iterfield=['in_file', 'reference', 'in_matrix_file'],
        name='anatmasks_to_func3')
    # reg_anatmask_to_func2.inputs.apply_xfm = True

    # use the invers registration (anat to func) to transform anatomical gm mask
    reg_anatmask_to_func4 = pe.MapNode(
        interface=fsl.FLIRT(apply_xfm=True, interp='nearestneighbour'),
        iterfield=['in_file', 'reference', 'in_matrix_file'],
        name='anatmasks_to_func4')
    # reg_anatmask_to_func2.inputs.apply_xfm = True

    # Create png images for quality check
    myqc = qc.vol2png("func2anat")

    # Save outputs which are important
    ds = pe.Node(interface=io.DataSink(), name='ds_nii')
    ds.inputs.base_directory = SinkDir
    ds.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    # Define outputs of the workflow
    outputspec = pe.Node(utility.IdentityInterface(fields=[
        'func_sample2anat', 'example_func', 'func_to_anat_linear_xfm',
        'anat_to_func_linear_xfm', 'csf_mask_in_funcspace',
        'wm_mask_in_funcspace', 'gm_mask_in_funcspace',
        'ventricle_mask_in_funcspace'
    ]),
                         name='outputspec')

    analysisflow = pe.Workflow(name=wf_name)
    analysisflow.base_dir = '.'
    analysisflow.connect(inputspec, 'func', myonevol, 'inputspec.func')
    analysisflow.connect(myonevol, 'outputspec.func1vol', linear_reg,
                         'in_file')
    analysisflow.connect(inputspec, 'skull', linear_reg, 'reference')
    analysisflow.connect(linear_reg, 'out_matrix_file', bbreg_func_to_anat,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol', bbreg_func_to_anat,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_wm_segmentation', bbreg_arg_convert,
                         'bbreg_target')
    analysisflow.connect(bbreg_arg_convert, 'arg', bbreg_func_to_anat, 'args')
    analysisflow.connect(inputspec, 'skull', bbreg_func_to_anat, 'reference')
    analysisflow.connect(bbreg_func_to_anat, 'out_matrix_file', convertmatrix,
                         'in_file')
    analysisflow.connect(convertmatrix, 'out_file', reg_anatmask_to_func1,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol',
                         reg_anatmask_to_func1, 'reference')
    analysisflow.connect(csf_bb_mask, 'out_file', reg_anatmask_to_func1,
                         'in_file')
    analysisflow.connect(convertmatrix, 'out_file', reg_anatmask_to_func2,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol',
                         reg_anatmask_to_func2, 'reference')
    analysisflow.connect(wm_bb_mask, 'out_file', reg_anatmask_to_func2,
                         'in_file')
    analysisflow.connect(convertmatrix, 'out_file', reg_anatmask_to_func3,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol',
                         reg_anatmask_to_func3, 'reference')
    analysisflow.connect(gm_bb_mask, 'out_file', reg_anatmask_to_func3,
                         'in_file')
    analysisflow.connect(convertmatrix, 'out_file', reg_anatmask_to_func4,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol',
                         reg_anatmask_to_func4, 'reference')
    analysisflow.connect(vent_bb_mask, 'out_file', reg_anatmask_to_func4,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_wm_segmentation', wm_bb_mask,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_csf_segmentation', csf_bb_mask,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_gm_segmentation', gm_bb_mask,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_ventricle_segmentation',
                         vent_bb_mask, 'in_file')
    analysisflow.connect(bbreg_func_to_anat, 'out_file', outputspec,
                         'func_sample2anat')
    analysisflow.connect(bbreg_func_to_anat, 'out_matrix_file', outputspec,
                         'func_to_anat_linear_xfm')
    analysisflow.connect(reg_anatmask_to_func1, 'out_file', outputspec,
                         'csf_mask_in_funcspace')
    analysisflow.connect(reg_anatmask_to_func2, 'out_file', outputspec,
                         'wm_mask_in_funcspace')
    analysisflow.connect(reg_anatmask_to_func3, 'out_file', outputspec,
                         'gm_mask_in_funcspace')
    analysisflow.connect(reg_anatmask_to_func4, 'out_file', outputspec,
                         'ventricle_mask_in_funcspace')
    analysisflow.connect(myonevol, 'outputspec.func1vol', outputspec,
                         'example_func')
    analysisflow.connect(convertmatrix, 'out_file', outputspec,
                         'anat_to_func_linear_xfm')
    analysisflow.connect(bbreg_func_to_anat, 'out_file', ds, "func2anat")
    analysisflow.connect(bbreg_func_to_anat, 'out_file', myqc,
                         'inputspec.bg_image')
    analysisflow.connect(wm_bb_mask, 'out_file', myqc,
                         'inputspec.overlay_image')

    return analysisflow
Ejemplo n.º 10
0
datagrab.inputs.field_template = dict(
    func=sys.argv[2],
    struct=sys.argv[1])  # specified by command line arguments
datagrab.inputs.sort_filelist = True

# sink: file - idx relationship!!
pop_id = pe.Node(interface=utils_convert.List2TxtFile, name='pop_id')
pop_id.inputs.rownum = 0
pop_id.inputs.out_file = "subject_IDs.txt"
ds_id = pe.Node(interface=nio.DataSink(), name='ds_pop_id')
ds_id.inputs.regexp_substitutions = [("(\/)[^\/]*$", "IDs.txt")]
ds_id.inputs.base_directory = globals._SinkDir_

# build the actual pipeline
reorient_struct = pe.MapNode(fsl.utils.Reorient2Std(),
                             iterfield=['in_file'],
                             name="reorient_struct")
reorient_func = pe.MapNode(fsl.utils.Reorient2Std(),
                           iterfield=['in_file'],
                           name="reorient_func")

myanatproc = anatproc.AnatProc(stdreg=_regtype_)
myanatproc.inputs.inputspec.bet_fract_int_thr = 0.3  # feel free to adjust, a nice bet is important!
myanatproc.inputs.inputspec.bet_vertical_gradient = -0.3  # feel free to adjust, a nice bet is important!
# try scripts/opt_bet.py to optimise these parameters

mybbr = bbr.bbr_workflow()
# Add arbitrary number of nii images wthin the same space. The default is to add csf and wm masks for anatcompcor calculation.
#myadding=adding.addimgs_workflow(numimgs=2)
add_masks = pe.MapNode(fsl.ImageMaths(op_string=' -add'),
                       iterfield=['in_file', 'in_file2'],
Ejemplo n.º 11
0
def func2mni(stdreg,
             carpet_plot="",
             wf_name='func2mni',
             SinkTag="func_preproc"):
    """
    stdreg: either globals._RegType_.ANTS or globals._RegType_.FSL (do default value to make sure the user has to decide explicitly)

    Transaform 4D functional image to MNI space.

    carpet_plot: string specifying the tag parameter for carpet plot of the standardized MRI measurement
            (default is "": no carpet plot)
            if not "", inputs atlaslabels and confounds should be defined (it might work with defaults, though)

    Workflow inputs:
    :param func
    :param linear_reg_mtrx
    :param nonlinear_reg_mtrx
    :param reference_brain
    :param atlas (optional)
    :param confounds (optional)
    :param confound_names (optional)


    Workflow outputs:




        :return: anat2mni_workflow - workflow


        anat="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/MS001/highres.nii.gz",
                      brain="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/MS001/highres_brain.nii.gz",


    Balint Kincses
    [email protected]
    2018


    """
    import os
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.ants as ants
    from nipype.interfaces.c3 import C3dAffineTool
    import PUMI.utils.globals as globals
    import PUMI.func_preproc.Onevol as onevol
    import PUMI.utils.QC as qc
    import nipype.interfaces.io as io
    from nipype.interfaces.utility import Function

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    inputspec = pe.Node(
        utility.IdentityInterface(fields=[
            'func',
            'anat',  # only obligatory if stdreg==globals._RegType_.ANTS
            'linear_reg_mtrx',
            'nonlinear_reg_mtrx',
            'reference_brain',
            'atlas',
            'confounds',
            'confound_names'
        ]),
        name='inputspec')

    inputspec.inputs.atlas = globals._FSLDIR_ + '/data/atlases/HarvardOxford/HarvardOxford-cort-maxprob-thr25-2mm.nii.gz'

    inputspec.inputs.reference_brain = globals._FSLDIR_ + "/data/standard/MNI152_T1_3mm_brain.nii.gz"  #3mm by default
    # TODO: this does not work with the iterfiled definition for ref_file below:
    # TODO: it should be sepcified in a function argument, whether it shopuld be iterated
    #TODO_ready: ANTS
    #TODO: make resampling voxel size for func parametrizable

    # apply transformation martices
    if stdreg == globals._RegType_.FSL:
        applywarp = pe.MapNode(interface=fsl.ApplyWarp(interp="spline", ),
                               iterfield=['in_file', 'field_file', 'premat'],
                               name='applywarp')
        myqc = qc.vol2png("func2mni", wf_name + "_FSL", overlayiterated=False)
        myqc.inputs.slicer.image_width = 500  # 500 # for the 2mm template
        myqc.inputs.slicer.threshold_edges = 0.1  # 0.1  # for the 2mm template
    else:  #ANTs
        # source file for C3dAffineToolmust not be 4D, so we extract the one example vol
        myonevol = onevol.onevol_workflow()
        # concat premat and ants transform
        bbr2ants = pe.MapNode(
            interface=C3dAffineTool(fsl2ras=True, itk_transform=True),
            iterfield=['source_file', 'transform_file',
                       'reference_file'],  # output: 'itk_transform'
            name="bbr2ants")
        #concat trfs into a list
        trflist = pe.MapNode(interface=Function(
            input_names=['trf_first', 'trf_second'],
            output_names=['trflist'],
            function=transformlist),
                             iterfield=['trf_first', 'trf_second'],
                             name="collect_trf")

        applywarp = pe.MapNode(interface=ants.ApplyTransforms(
            interpolation="BSpline", input_image_type=3),
                               iterfield=['input_image', 'transforms'],
                               name='applywarp')
        myqc = qc.vol2png("func2mni",
                          wf_name + "_ANTS3",
                          overlayiterated=False)
        myqc.inputs.slicer.image_width = 500  # 500 # for the 2mm template
        myqc.inputs.slicer.threshold_edges = 0.1  # 0.1  # for the 2mm template

    if carpet_plot:
        fmri_qc = qc.fMRI2QC("carpet_plots", tag=carpet_plot)

    outputspec = pe.Node(utility.IdentityInterface(fields=['func_std']),
                         name='outputspec')

    # Save outputs which are important
    ds_nii = pe.Node(interface=io.DataSink(), name='ds_nii')
    ds_nii.inputs.base_directory = SinkDir
    ds_nii.inputs.regexp_substitutions = [("(\/)[^\/]*$", wf_name + ".nii.gz")]

    analysisflow = pe.Workflow(wf_name)
    analysisflow.base_dir = '.'
    if stdreg == globals._RegType_.FSL:
        analysisflow.connect(inputspec, 'func', applywarp, 'in_file')
        analysisflow.connect(inputspec, 'linear_reg_mtrx', applywarp, 'premat')
        analysisflow.connect(inputspec, 'nonlinear_reg_mtrx', applywarp,
                             'field_file')
        analysisflow.connect(inputspec, 'reference_brain', applywarp,
                             'ref_file')
        analysisflow.connect(applywarp, 'out_file', outputspec, 'func_std')
        analysisflow.connect(applywarp, 'out_file', myqc, 'inputspec.bg_image')
        analysisflow.connect(inputspec, 'reference_brain', myqc,
                             'inputspec.overlay_image')
        analysisflow.connect(applywarp, 'out_file', ds_nii, 'func2mni')
    else:  # ANTs
        analysisflow.connect(inputspec, 'func', myonevol, 'inputspec.func')
        analysisflow.connect(myonevol, 'outputspec.func1vol', bbr2ants,
                             'source_file')
        analysisflow.connect(inputspec, 'linear_reg_mtrx', bbr2ants,
                             'transform_file')
        analysisflow.connect(inputspec, 'anat', bbr2ants, 'reference_file')
        analysisflow.connect(bbr2ants, 'itk_transform', trflist, 'trf_first')
        analysisflow.connect(inputspec, 'nonlinear_reg_mtrx', trflist,
                             'trf_second')
        analysisflow.connect(trflist, 'trflist', applywarp, 'transforms')
        analysisflow.connect(inputspec, 'func', applywarp, 'input_image')
        analysisflow.connect(inputspec, 'reference_brain', applywarp,
                             'reference_image')

        analysisflow.connect(applywarp, 'output_image', outputspec, 'func_std')
        analysisflow.connect(applywarp, 'output_image', myqc,
                             'inputspec.bg_image')
        analysisflow.connect(inputspec, 'reference_brain', myqc,
                             'inputspec.overlay_image')
        analysisflow.connect(applywarp, 'output_image', ds_nii, 'func2mni')

    if carpet_plot:
        if stdreg == globals._RegType_.FSL:
            analysisflow.connect(applywarp, 'out_file', fmri_qc,
                                 'inputspec.func')
        else:  # ANTs
            analysisflow.connect(applywarp, 'output_image', fmri_qc,
                                 'inputspec.func')

        analysisflow.connect(inputspec, 'atlas', fmri_qc, 'inputspec.atlas')
        analysisflow.connect(inputspec, 'confounds', fmri_qc,
                             'inputspec.confounds')

    return analysisflow
Ejemplo n.º 12
0
def extract_timeseries(SinkTag="connectivity",
                       wf_name="extract_timeseries",
                       modularise=True):
    ########################################################################
    # Extract timeseries
    ########################################################################

    import nipype.interfaces.nilearn as learn
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.io as io
    from nipype.interfaces.utility import Function
    import PUMI.utils.globals as globals
    import PUMI.utils.QC as qc
    import os

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Identitiy mapping for input variables
    inputspec = pe.Node(
        utility.IdentityInterface(fields=[
            'std_func',
            'atlas_file',  # nii labelmap (or 4D probmaps)
            'labels',  # list of short names to regions
            'modules'  # list of modules of regions
        ]),
        name='inputspec')
    # re-label atlas, so that regions corresponding to the same modules follow each other
    if modularise:
        relabel_atls = pe.Node(interface=Function(
            input_names=['atlas_file', 'modules', 'labels'],
            output_names=[
                'relabelled_atlas_file', 'reordered_modules',
                'reordered_labels', 'newlabels_file'
            ],
            function=relabel_atlas),
                               name='relabel_atlas')
        # Save outputs which are important
        ds_nii = pe.Node(interface=io.DataSink(), name='ds_relabeled_atlas')
        ds_nii.inputs.base_directory = SinkDir
        ds_nii.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

        # Save outputs which are important
        ds_newlabels = pe.Node(interface=io.DataSink(), name='ds_newlabels')
        ds_newlabels.inputs.base_directory = SinkDir
        ds_newlabels.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".tsv")]

    extract_timesereies = pe.MapNode(
        interface=learn.SignalExtraction(detrend=False),
        iterfield=['in_file'],
        name='extract_timeseries')

    # Save outputs which are important
    ds_txt = pe.Node(interface=io.DataSink(), name='ds_txt')
    ds_txt.inputs.base_directory = SinkDir
    ds_txt.inputs.regexp_substitutions = [("(\/)[^\/]*$", wf_name + ".tsv")]

    #QC
    timeseries_qc = qc.regTimeseriesQC("regional_timeseries", tag=wf_name)

    outputspec = pe.Node(utility.IdentityInterface(fields=[
        'timeseries_file', 'relabelled_atlas_file', 'reordered_modules',
        'reordered_labels'
    ]),
                         name='outputspec')

    # Create workflow
    analysisflow = pe.Workflow(wf_name)
    analysisflow.connect(inputspec, 'std_func', extract_timesereies, 'in_file')
    if modularise:
        analysisflow.connect(inputspec, 'atlas_file', relabel_atls,
                             'atlas_file')
        analysisflow.connect(inputspec, 'modules', relabel_atls, 'modules')
        analysisflow.connect(inputspec, 'labels', relabel_atls, 'labels')

        analysisflow.connect(relabel_atls, 'relabelled_atlas_file',
                             extract_timesereies, 'label_files')
        analysisflow.connect(relabel_atls, 'reordered_labels',
                             extract_timesereies, 'class_labels')
        analysisflow.connect(relabel_atls, 'reordered_modules', timeseries_qc,
                             'inputspec.modules')
        analysisflow.connect(relabel_atls, 'relabelled_atlas_file',
                             timeseries_qc, 'inputspec.atlas')
        analysisflow.connect(relabel_atls, 'relabelled_atlas_file', ds_nii,
                             'atlas_relabeled')
        analysisflow.connect(relabel_atls, 'newlabels_file', ds_newlabels,
                             'atlas_relabeled')
        analysisflow.connect(relabel_atls, 'relabelled_atlas_file', outputspec,
                             'relabelled_atlas_file')
        analysisflow.connect(relabel_atls, 'reordered_labels', outputspec,
                             'reordered_labels')
        analysisflow.connect(relabel_atls, 'reordered_modules', outputspec,
                             'reordered_modules')
    else:
        analysisflow.connect(inputspec, 'atlas_file', extract_timesereies,
                             'label_files')
        analysisflow.connect(inputspec, 'labels', extract_timesereies,
                             'class_labels')
        analysisflow.connect(inputspec, 'modules', timeseries_qc,
                             'inputspec.modules')
        analysisflow.connect(inputspec, 'atlas_file', timeseries_qc,
                             'inputspec.atlas')
        analysisflow.connect(inputspec, 'atlas_file', outputspec,
                             'relabelled_atlas_file')
        analysisflow.connect(inputspec, 'labels', outputspec,
                             'reordered_labels')
        analysisflow.connect(inputspec, 'modules', outputspec,
                             'reordered_modules')

    analysisflow.connect(extract_timesereies, 'out_file', ds_txt,
                         'regional_timeseries')
    analysisflow.connect(extract_timesereies, 'out_file', timeseries_qc,
                         'inputspec.timeseries')

    analysisflow.connect(extract_timesereies, 'out_file', outputspec,
                         'timeseries_file')

    return analysisflow
Ejemplo n.º 13
0
def create_retroicor_workflow(name = 'retroicor', order_or_timing = 'order'):
    
    """
    
    Creates RETROICOR regressors
    
    Example
    -------
    
    Inputs::
        inputnode.in_file - The .log file acquired together with EPI sequence
    Outputs::
        outputnode.regressor_files
    """
    
    # Define nodes:
    input_node = pe.Node(niu.IdentityInterface(fields=['in_files',
                                                    'phys_files',
                                                    'nr_dummies',
                                                    'MB_factor', 
                                                    'tr',
                                                    'slice_direction',
                                                    'phys_sample_rate',
                                                    'slice_timing',
                                                    'slice_order',
                                                    'hr_rvt',
                                                    ]), name='inputspec')

    # the slice time preprocessing node before we go into popp (PreparePNM)
    slice_times_from_gradients = pe.MapNode(niu.Function(input_names=['in_file', 'phys_file', 'nr_dummies', 'MB_factor', 'sample_rate'], 
                        output_names=['out_file', 'fig_file'], 
                        function=_distill_slice_times_from_gradients), name='slice_times_from_gradients', iterfield = ['in_file','phys_file'])
    
    slice_times_to_txt_file = pe.Node(niu.Function(input_names=['slice_times'], 
                        output_names=['out_file'], 
                        function=_slice_times_to_txt_file), name='slice_times_to_txt_file')

    pnm_prefixer = pe.MapNode(niu.Function(input_names=['filename'], 
                        output_names=['out_string'], 
                        function=_preprocess_nii_files_to_pnm_evs_prefix), name='pnm_prefixer', iterfield = ['filename'])

    prepare_pnm = pe.MapNode(PreparePNM(), name='prepare_pnm', iterfield = ['in_file'])

    pnm_evs = pe.MapNode(PNMtoEVs(), name='pnm_evs', iterfield = ['functional_epi', 'cardiac', 'resp', 'hr', 'rvt', 'prefix'])

    # Define output node
    output_node = pe.Node(niu.IdentityInterface(fields=['new_phys', 'fig_file', 'evs']), name='outputspec')

    ########################################################################################
    # workflow
    ########################################################################################

    retroicor_workflow = pe.Workflow(name=name)
    
    # align phys-log data to nifti 
    retroicor_workflow.connect(input_node, 'in_files', slice_times_from_gradients, 'in_file')
    retroicor_workflow.connect(input_node, 'phys_files', slice_times_from_gradients, 'phys_file')
    retroicor_workflow.connect(input_node, 'nr_dummies', slice_times_from_gradients, 'nr_dummies')
    retroicor_workflow.connect(input_node, 'MB_factor', slice_times_from_gradients, 'MB_factor')
    retroicor_workflow.connect(input_node, 'phys_sample_rate', slice_times_from_gradients, 'sample_rate')

    # conditional here, for the creation of a separate slice timing file if order_or_timing is 'timing'
    # order_or_timing can also be 'order'
    if order_or_timing == 'timing':
        retroicor_workflow.connect(input_node, 'slice_timing', slice_times_to_txt_file, 'slice_times')
    
    # prepare pnm:
    retroicor_workflow.connect(input_node, 'phys_sample_rate', prepare_pnm, 'sampling_rate')
    retroicor_workflow.connect(input_node, 'tr', prepare_pnm, 'tr')
    retroicor_workflow.connect(slice_times_from_gradients, 'out_file', prepare_pnm, 'in_file')
    retroicor_workflow.connect(input_node, 'hr_rvt', prepare_pnm, 'hr_rvt')
    
    # pnm evs:
    retroicor_workflow.connect(input_node, 'in_files', pnm_prefixer, 'filename')
    retroicor_workflow.connect(pnm_prefixer, 'out_string', pnm_evs, 'prefix')
    retroicor_workflow.connect(input_node, 'in_files', pnm_evs, 'functional_epi')
    retroicor_workflow.connect(input_node, 'slice_direction', pnm_evs, 'slice_dir')
    retroicor_workflow.connect(input_node, 'tr', pnm_evs, 'tr')
    if order_or_timing ==   'timing':
        retroicor_workflow.connect(slice_times_to_txt_file, 'out_file', pnm_evs, 'slice_timing')
    elif order_or_timing == 'order':
        retroicor_workflow.connect(input_node, 'slice_order', pnm_evs, 'slice_order')
    retroicor_workflow.connect(prepare_pnm, 'card', pnm_evs, 'cardiac')
    retroicor_workflow.connect(prepare_pnm, 'resp', pnm_evs, 'resp')
    retroicor_workflow.connect(prepare_pnm, 'hr', pnm_evs, 'hr')
    retroicor_workflow.connect(prepare_pnm, 'rvt', pnm_evs, 'rvt')

    retroicor_workflow.connect(slice_times_from_gradients, 'out_file', output_node, 'new_phys')
    retroicor_workflow.connect(slice_times_from_gradients, 'fig_file', output_node, 'fig_file')
    retroicor_workflow.connect(pnm_evs, 'evs', output_node, 'evs')

    return retroicor_workflow
Ejemplo n.º 14
0
def create_VWM_anti_pp_workflow(analysis_info, name='VWM-anti'):
    """Summary
    
    Parameters
    ----------
    analysis_info : TYPE
        Description
    name : str, optional
        Description
    
    Returns
    -------
    TYPE
        Description
    """
    import os.path as op
    import nipype.pipeline as pe
    import tempfile
    import glob
    from nipype.interfaces import fsl
    from nipype.interfaces.utility import Function, Merge, IdentityInterface
    from nipype.interfaces.io import SelectFiles, DataSink
    from nipype.interfaces.ants import ApplyTransforms
    from bids.grabbids import BIDSLayout

    # Importing of custom nodes from spynoza packages; assumes that spynoza is installed:
    # pip install git+https://github.com/spinoza-centre/spynoza.git@develop
    from spynoza.filtering.nodes import Savgol_filter, Savgol_filter_confounds
    from spynoza.conversion.nodes import psc
    from spynoza.utils import get_scaninfo, pickfirst
    from utils import mask_nii_2_hdf5, nistats_confound_glm, mask_to_tsv

    input_node = pe.Node(IdentityInterface(
        fields=['bids_directory', 'fmriprep_directory', 'output_directory', 'mask_directory', 'sub_id']), name='inputspec')

    BIDSNiiGrabber = pe.Node(Function(function=get_niftis, input_names=["subject_id",
                                                                        "data_dir", "task", "space"],
                                      output_names=["nii_files"]), name="BIDSNiiGrabber")
    BIDSNiiGrabber.inputs.space = 'mni'

    BIDSEventsGrabber = pe.Node(Function(function=get_events, input_names=["subject_id",
                                                                           "data_dir", "task"],
                                         output_names=["event_files"]), name="BIDSEventsGrabber")
    
    BIDSConfoundsGrabber = pe.Node(Function(function=get_confounds, input_names=["subject_id",
                                                                                 "data_dir", "task"],
                                            output_names=["confounds_tsv_files"]), name="BIDSConfoundsGrabber")
    
    MaskGrabber = pe.Node(Function(function=get_masks, input_names=["mask_directory"],
                                   output_names=["mask_files"]), name="MaskGrabber")

    HDF5PSCMasker = pe.Node(Function(input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'], output_names=['hdf5_file'],
                                     function=mask_nii_2_hdf5),
                            name='hdf5_psc_masker')
    HDF5PSCMasker.inputs.folder_alias = 'psc'
    HDF5PSCMasker.inputs.hdf5_file = op.join(tempfile.mkdtemp(), 'roi.h5')

    HDF5PSCNuisMasker = pe.Node(Function(input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'], output_names=['hdf5_file'],
                                         function=mask_nii_2_hdf5),
                                name='hdf5_psc_nuis_masker')
    HDF5PSCNuisMasker.inputs.folder_alias = 'psc_nuis'

    # HDF5StatsMasker = pe.Node(Function(input_names = ['in_files', 'mask_files', 'hdf5_file', 'folder_alias'], output_names = ['hdf5_file'],
    #                             function = mask_nii_2_hdf5),
    #                             name = 'hdf5_stats_masker')
    # HDF5StatsMasker.inputs.folder_alias = 'stats'

    HDF5ROIMasker = pe.Node(Function(input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'], output_names=['hdf5_file'],
                                     function=mask_nii_2_hdf5),
                            name='hdf5_roi_masker')
    HDF5ROIMasker.inputs.folder_alias = 'rois'

    ConfoundGLM = pe.MapNode(Function(input_names=['nifti_file', 'confounds_file', 'which_confounds'], output_names=['output_pdf', 'output_nifti'],
                                      function=nistats_confound_glm),
                             name='nistats_confound_glm', iterfield=["nifti_file", "confounds_file"])
    ConfoundGLM.inputs.which_confounds = analysis_info['nuisance_columns']

    # VolTransNode = pe.MapNode(interface=fsl.preprocess.ApplyXFM(apply_xfm=False, apply_isoxfm=True, interp='sinc'),
    #                                                     name='vol_trans', iterfield = ['in_file'])

    # VolTransNode = pe.MapNode(interface=ApplyTransforms(transforms='identity', interpolation='LanczosWindowedSinc'),
    #                                                     name='vol_trans', iterfield = ['input_image'])

    ThreshNode = pe.MapNode(fsl.Threshold(thresh=analysis_info['MNI_mask_threshold'], args='-bin', output_datatype='int'),
                            name='thresh', iterfield=['in_file'])

    TSVMasker = pe.MapNode(Function(input_names=['in_file', 'mask_files'], output_names=['out_file'],
                                 function=mask_to_tsv), iterfield=['in_file'],
                        name='tsv_masker')

    ROIResampler = pe.Node(Function(input_names=['mni_roi_files', 'mni_epi_space_file'], output_names=['output_roi_files'],
                                    function=resample_rois),
                           name='roi_resampler')

    sgfilter = pe.MapNode(interface=Savgol_filter,
                          name='sgfilter',
                          iterfield=['in_file'])
    sgfilter_confounds = pe.MapNode(interface=Savgol_filter_confounds,
                                    name='sgfilter_confounds',
                                    iterfield=['confounds'])

    # Both fmri data and nuisances are filtered with identical parameters
    sgfilter.inputs.polyorder = sgfilter_confounds.inputs.polyorder = analysis_info[
        'sgfilter_polyorder']
    sgfilter.inputs.deriv = sgfilter_confounds.inputs.deriv = analysis_info['sgfilter_deriv']
    sgfilter.inputs.window_length = sgfilter_confounds.inputs.window_length = analysis_info[
        'sgfilter_window_length']
    sgfilter.inputs.tr = sgfilter_confounds.inputs.tr = analysis_info['RepetitionTime']

    # set the psc function
    psc.inputs.func = analysis_info['psc_function']

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    ########################################################################################
    # workflow
    ########################################################################################

    # the actual top-level workflow
    VWM_anti_pp_workflow = pe.Workflow(name=name)

    # data source
    VWM_anti_pp_workflow.connect(
        input_node, 'bids_directory', BIDSEventsGrabber, 'data_dir')
    VWM_anti_pp_workflow.connect(input_node, 'sub_id',
                              BIDSEventsGrabber, 'subject_id')
    VWM_anti_pp_workflow.connect(
        input_node, 'fmriprep_directory', BIDSNiiGrabber, 'data_dir')
    VWM_anti_pp_workflow.connect(input_node, 'sub_id',
                              BIDSNiiGrabber, 'subject_id')
    VWM_anti_pp_workflow.connect(
        input_node, 'fmriprep_directory', BIDSConfoundsGrabber, 'data_dir')
    VWM_anti_pp_workflow.connect(input_node, 'sub_id',
                              BIDSConfoundsGrabber, 'subject_id')
    VWM_anti_pp_workflow.connect(
        input_node, 'mask_directory', MaskGrabber, 'mask_directory')

    # filter and psc
    VWM_anti_pp_workflow.connect(BIDSNiiGrabber, 'nii_files', sgfilter, 'in_file')
    VWM_anti_pp_workflow.connect(sgfilter, 'out_file', psc, 'in_file')
    # do the same filtering on confounds
    VWM_anti_pp_workflow.connect(BIDSConfoundsGrabber, 'confounds_tsv_files', sgfilter_confounds, 'confounds')

    # cleanup GLM
    VWM_anti_pp_workflow.connect(psc, 'out_file', ConfoundGLM, 'nifti_file')
    VWM_anti_pp_workflow.connect(
        sgfilter_confounds, 'out_file', ConfoundGLM, 'confounds_file')

    # preparing masks, ANTS and fsl not working correctly
    # ANTs
    # pearl_pp_workflow.connect(BIDSNiiGrabber, ('nii_files', pickfirst), VolTransNode, 'reference_image')
    # pearl_pp_workflow.connect(MaskGrabber, 'mask_files', VolTransNode, 'input_image')
    # fsl
    # pearl_pp_workflow.connect(BIDSNiiGrabber, ('nii_files', pickfirst), VolTransNode, 'reference')
    # pearl_pp_workflow.connect(MaskGrabber, 'mask_files', VolTransNode, 'in_file')
    # pearl_pp_workflow.connect(VolTransNode, 'output_image', ThreshNode, 'in_file')

    VWM_anti_pp_workflow.connect(
        BIDSNiiGrabber, ('nii_files', pickfirst), ROIResampler, 'mni_epi_space_file')
    VWM_anti_pp_workflow.connect(
        MaskGrabber, 'mask_files', ROIResampler, 'mni_roi_files')
    VWM_anti_pp_workflow.connect(
        ROIResampler, 'output_roi_files', ThreshNode, 'in_file')

    # masking data
    VWM_anti_pp_workflow.connect(psc, 'out_file', HDF5PSCMasker, 'in_files')
    VWM_anti_pp_workflow.connect(ThreshNode, 'out_file',
                              HDF5PSCMasker, 'mask_files')

    VWM_anti_pp_workflow.connect(
        ConfoundGLM, 'output_nifti', HDF5PSCNuisMasker, 'in_files')
    VWM_anti_pp_workflow.connect(ThreshNode, 'out_file',
                              HDF5PSCNuisMasker, 'mask_files')
    VWM_anti_pp_workflow.connect(
        HDF5PSCMasker, 'hdf5_file', HDF5PSCNuisMasker, 'hdf5_file')

    # needs stats before we do a masker....
    # pearl_pp_workflow.connect(VolTransNode, 'out_file', HDF5StatsMasker, 'in_files')
    # pearl_pp_workflow.connect(ThreshNode, 'out_file', HDF5StatsMasker, 'mask_files')
    # pearl_pp_workflow.connect(HDF5PSCNuisMasker, 'hdf5_file', HDF5StatsMasker, 'hdf5_file')

    VWM_anti_pp_workflow.connect(
        ROIResampler, 'output_roi_files', HDF5ROIMasker, 'in_files')
    VWM_anti_pp_workflow.connect(ThreshNode, 'out_file',
                              HDF5ROIMasker, 'mask_files')
    VWM_anti_pp_workflow.connect(
        HDF5PSCNuisMasker, 'hdf5_file', HDF5ROIMasker, 'hdf5_file')

    # mask to .tsv, for one timecourse per roi
    VWM_anti_pp_workflow.connect(
        ROIResampler, 'output_roi_files', TSVMasker, 'mask_files')
    VWM_anti_pp_workflow.connect(
        ConfoundGLM, 'output_nifti', TSVMasker, 'in_file')

    # set up output folder
    VWM_anti_pp_workflow.connect(
        input_node, 'output_directory', datasink, 'base_directory')

    # connect all outputs to datasink
    VWM_anti_pp_workflow.connect(
        ConfoundGLM, 'output_nifti', datasink, 'confound_glm')
    VWM_anti_pp_workflow.connect(
        BIDSEventsGrabber, 'event_files', datasink, 'events')
    VWM_anti_pp_workflow.connect(sgfilter, 'out_file', datasink, 'sg_filter')
    VWM_anti_pp_workflow.connect(
        sgfilter_confounds, 'out_file', datasink, 'sg_filter_confound')
    VWM_anti_pp_workflow.connect(psc, 'out_file', datasink, 'psc')
    VWM_anti_pp_workflow.connect(
        ROIResampler, 'output_roi_files', datasink, 'masks_f')
    VWM_anti_pp_workflow.connect(ThreshNode, 'out_file', datasink, 'masks_b')
    VWM_anti_pp_workflow.connect(TSVMasker, 'out_file', datasink, 'tsv')
    VWM_anti_pp_workflow.connect(HDF5PSCNuisMasker, 'hdf5_file', datasink, 'h5')
    VWM_anti_pp_workflow.connect(
        ConfoundGLM, 'output_pdf', datasink, 'confound_glm_report')

    return VWM_anti_pp_workflow
Ejemplo n.º 15
0
    data_filt = data - data_filt + data_filt.mean(axis=-1)[:, np.newaxis]
    data_filt = data_filt.reshape(dims)
    img = nib.Nifti1Image(data_filt, affine=affine, header=header)
    new_name = os.path.basename(in_file).split('.')[:-2][0] + '_sg.nii.gz'
    out_file = os.path.abspath(new_name)
    nib.save(img, out_file)
    return out_file


Savgol_filter = Function(
    function=savgol_filter,
    input_names=['in_file', 'polyorder', 'deriv', 'window_length', 'tr'],
    output_names=['out_file'])

sgfilter = pe.MapNode(interface=Savgol_filter,
                      name='sgfilter',
                      iterfield=['in_file'])


def savgol_filter_confounds(confounds,
                            tr,
                            polyorder=3,
                            deriv=0,
                            window_length=120):
    import pandas as pd
    from scipy.signal import savgol_filter
    import numpy as np
    import os

    confounds_table = pd.read_table(confounds)
Ejemplo n.º 16
0
def fast_workflow(SinkTag="anat_preproc", wf_name="tissue_segmentation"):
    """
    Borrowed from the PUMI project: https://github.com/spisakt/PUMI
    Balint Kincses
    [email protected]
    2019
     Modified version of CPAC.seg_preproc.seg_preproc

     `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/seg_preproc/seg_preproc.html`


        Do the segmentation of a brain extracted T1w image.


        Workflow inputs:
            :param brain: The brain extracted image, the output of the better_workflow.
            :param init_transform: The standard to anat linear transformation matrix (which is calculated in the Anat2MNI.py script). Beware of the resolution of the reference (standard) image, the default value is 2mm.
            :param priorprob: A list of tissue probability maps in the prior (=reference=standard) space. By default it must be 3 element(in T1w images the CSF, GM, WM order is valid)
            :param SinkDir:
            :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found.

        Workflow outputs:




            :return: fast_workflow - workflow




        Balint Kincses
        [email protected]
        2018


        """

    #This is a Nipype generator. Warning, here be dragons.
    #!/usr/bin/env python
    import sys
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.io as io
    #import PUMI.utils.QC as qc
    #import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    #Basic interface class generates identity mappings
    inputspec = pe.Node(utility.IdentityInterface(
        fields=['brain', 'stand2anat_xfm', 'priorprob']),
                        name='inputspec')
    # inputspec.inputs.stand2anat_xfm='/home/analyser/Documents/PAINTER/probewith2subj/preprocess_solvetodos/anat2mni_fsl/inv_linear_reg0_xfm/mapflow/_inv_linear_reg0_xfm0/anat_brain_flirt_inv.mat'

    #TODO_ready set standard mask to 2mm

    inputspec.inputs.priorprob = [
        globals._FSLDIR_ + '/data/standard/tissuepriors/avg152T1_csf.hdr',
        globals._FSLDIR_ + '/data/standard/tissuepriors/avg152T1_gray.hdr',
        globals._FSLDIR_ + '/data/standard/tissuepriors/avg152T1_white.hdr'
    ]

    # TODO_ready: use prior probabilioty maps
    # Wraps command **fast**
    fast = pe.MapNode(interface=fsl.FAST(),
                      iterfield=['in_files', 'init_transform'],
                      name='fast')
    fast.inputs.img_type = 1
    fast.inputs.segments = True
    fast.inputs.probability_maps = True
    fast.inputs.out_basename = 'fast_'

    #myqc = qc.vol2png("tissue_segmentation", overlay=False)
    #myqc.inputs.slicer.colour_map = globals._FSLDIR_ + '/etc/luts/renderjet.lut'

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=[
        'probmap_csf', 'probmap_gm', 'probmap_wm', 'mixeltype', 'parvol_csf',
        'parvol_gm', 'parvol_wm', 'partial_volume_map'
    ]),
                         name='outputspec')

    # Save outputs which are important
    ds = pe.Node(interface=io.DataSink(), name='ds')
    ds.inputs.base_directory = SinkDir
    ds.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    def pickindex(vec, i):
        #print "************************************************************************************************************************************************"
        #print vec
        #print i
        return [x[i] for x in vec]

    #Create a workflow to connect all those nodes
    analysisflow = nipype.Workflow(wf_name)
    analysisflow.base_dir = '.'
    analysisflow.connect(inputspec, 'brain', fast, 'in_files')
    analysisflow.connect(inputspec, 'stand2anat_xfm', fast, 'init_transform')
    analysisflow.connect(inputspec, 'priorprob', fast, 'other_priors')
    # analysisflow.connect(inputspec, 'stand_csf' ,fast,('other_priors', pickindex, 0))
    # analysisflow.connect(inputspec, 'stand_gm' ,fast,('other_priors', pickindex, 1))
    # analysisflow.connect(inputspec, 'stand_wm' ,fast,('other_priors', pickindex, 2))

    #nalysisflow.connect(fast, 'probability_maps', outputspec, 'probability_maps')
    analysisflow.connect(fast, ('probability_maps', pickindex, 0), outputspec,
                         'probmap_csf')
    analysisflow.connect(fast, ('probability_maps', pickindex, 1), outputspec,
                         'probmap_gm')
    analysisflow.connect(fast, ('probability_maps', pickindex, 2), outputspec,
                         'probmap_wm')
    analysisflow.connect(fast, 'mixeltype', outputspec, 'mixeltype')
    #analysisflow.connect(fast, 'partial_volume_files', outputspec, 'partial_volume_files')
    analysisflow.connect(fast, ('partial_volume_files', pickindex, 0),
                         outputspec, 'parvol_csf')
    analysisflow.connect(fast, ('partial_volume_files', pickindex, 0),
                         outputspec, 'parvol_gm')
    analysisflow.connect(fast, ('partial_volume_files', pickindex, 0),
                         outputspec, 'parvol_wm')
    analysisflow.connect(fast, 'partial_volume_map', outputspec,
                         'partial_volume_map')
    analysisflow.connect(fast, ('probability_maps', pickindex, 0), ds,
                         'fast_csf')
    analysisflow.connect(fast, ('probability_maps', pickindex, 1), ds,
                         'fast_gm')
    analysisflow.connect(fast, ('probability_maps', pickindex, 2), ds,
                         'fast_wm')
    #analysisflow.connect(fast, 'partial_volume_map', myqc, 'inputspec.bg_image')

    return analysisflow
#Generic datagrabber module that wraps around glob in an
NodeHash_1e88370 = pe.Node(io.S3DataGrabber(infields=['sub_id', 'run_id'],
                                            outfields=['func']),
                           name='NodeName_1e88370')
NodeHash_1e88370.inputs.bucket = 'openneuro'
NodeHash_1e88370.inputs.sort_filelist = True
NodeHash_1e88370.inputs.template = '%s/func/%s_task-simon_%s_bold.nii.gz'
NodeHash_1e88370.inputs.anon = True
NodeHash_1e88370.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
NodeHash_1e88370.inputs.local_directory = '/tmp'
NodeHash_1e88370.inputs.template_args = dict(
    func=[['sub_id', 'sub_id', 'run_id']])

#Wraps command **3dvolreg**
NodeHash_19153b0 = pe.MapNode(interface=afni.Volreg(),
                              name='NodeName_19153b0',
                              iterfield=['in_file'])
NodeHash_19153b0.inputs.outputtype = 'NIFTI_GZ'

#Generic datasink module to store structured outputs
NodeHash_2b96290 = pe.Node(interface=io.DataSink(), name='NodeName_2b96290')
NodeHash_2b96290.inputs.base_directory = '/tmp'

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(NodeHash_24ff4a0, 'sub_id', NodeHash_2b96290, 'container')
analysisflow.connect(NodeHash_24ff4a0, 'run_id', NodeHash_1e88370, 'run_id')
analysisflow.connect(NodeHash_24ff4a0, 'sub_id', NodeHash_1e88370, 'sub_id')
analysisflow.connect(NodeHash_1e88370, 'func', NodeHash_19153b0, 'in_file')
analysisflow.connect(NodeHash_19153b0, 'oned_file', NodeHash_2b96290,
                     'moco.moco_params')
Ejemplo n.º 18
0
import nipype.interfaces.fsl as fsl
import PUMI.func_preproc.FieldMapper as fm

datagrab = pe.Node(nio.DataGrabber(
    outfields=['func', 'phase', 'magnitude', 'TE1', 'TE2', 'dwelltime']),
                   name='data_grabber')

datagrab.inputs.base_directory = os.getcwd()  # do we need this?
datagrab.inputs.template = "*/*"  # do we need this?
datagrab.inputs.field_template = dict(func=sys.argv[1],
                                      phase=sys.argv[2],
                                      magnitude=sys.argv[3])
datagrab.inputs.sort_filelist = True

reorient_func = pe.MapNode(fsl.utils.Reorient2Std(),
                           iterfield=['in_file'],
                           name="reorient_func")

myfm = fm.fieldmapper(TE1=4.9,
                      TE2=7.3,
                      dwell_time=0.00035,
                      unwarp_direction="y-")

totalWorkflow = nipype.Workflow('fm_probe')
totalWorkflow.base_dir = '.'

totalWorkflow.connect([
    (datagrab, reorient_func, [('func', 'in_file')]),
    (reorient_func, myfm, [('out_file', 'inputspec.in_file')]),
    (datagrab, myfm, [('phase', 'inputspec.phase')]),
    (datagrab, myfm, [('magnitude', 'inputspec.magnitude')]),
Ejemplo n.º 19
0
def create_confound_workflow(name='confound'):

    input_node = pe.Node(interface=IdentityInterface(fields=[
        'in_file', 'par_file', 'fast_files', 'highres2epi_mat',
        'n_comp_tcompcor', 'n_comp_acompcor', 'output_directory', 'sub_id'
    ]),
                         name='inputspec')

    output_node = pe.Node(interface=IdentityInterface(fields=[
        'all_confounds',
    ]),
                          name='outputspec')

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    compute_DVARS = pe.MapNode(ComputeDVARS(save_all=True,
                                            remove_zerovariance=True),
                               iterfield=['in_file', 'in_mask'],
                               name='compute_DVARS')

    motion_wf = create_motion_confound_workflow(order=2)

    confound_wf = pe.Workflow(name=name)
    confound_wf.connect(input_node, 'par_file', motion_wf,
                        'inputspec.par_file')
    confound_wf.connect(input_node, 'sub_id', motion_wf, 'inputspec.sub_id')
    confound_wf.connect(input_node, 'output_directory', motion_wf,
                        'inputspec.output_directory')

    compcor_wf = create_compcor_workflow()
    confound_wf.connect(input_node, 'in_file', compcor_wf, 'inputspec.in_file')
    confound_wf.connect(input_node, 'fast_files', compcor_wf,
                        'inputspec.fast_files')
    confound_wf.connect(input_node, 'highres2epi_mat', compcor_wf,
                        'inputspec.highres2epi_mat')
    confound_wf.connect(input_node, 'n_comp_tcompcor', compcor_wf,
                        'inputspec.n_comp_tcompcor')
    confound_wf.connect(input_node, 'n_comp_acompcor', compcor_wf,
                        'inputspec.n_comp_acompcor')
    confound_wf.connect(input_node, 'sub_id', compcor_wf, 'inputspec.sub_id')
    confound_wf.connect(input_node, 'output_directory', compcor_wf,
                        'inputspec.output_directory')

    confound_wf.connect(compcor_wf, 'outputspec.epi_mask', compute_DVARS,
                        'in_mask')
    confound_wf.connect(input_node, 'in_file', compute_DVARS, 'in_file')

    concat = pe.MapNode(Concat_confound_files,
                        iterfield=['ext_par_file', 'fd_file', 'dvars_file'],
                        name='concat')

    confound_wf.connect(motion_wf, 'outputspec.out_ext_moco', concat,
                        'ext_par_file')
    confound_wf.connect(motion_wf, 'outputspec.out_fd', concat, 'fd_file')
    confound_wf.connect(compcor_wf, 'outputspec.acompcor_file', concat,
                        'acompcor_file')
    #confound_wf.connect(compcor_wf, 'outputspec.tcompcor_file', concat,
    #                    'tcompcor_file')
    confound_wf.connect(compute_DVARS, 'out_all', concat, 'dvars_file')
    confound_wf.connect(input_node, 'sub_id', datasink, 'sub_id')
    confound_wf.connect(input_node, 'output_directory', datasink,
                        'base_directory')
    confound_wf.connect(concat, 'out_file', datasink, 'confounds')

    return confound_wf
Ejemplo n.º 20
0
def extract_timeseries_nativespace(SinkTag="connectivity",
                                   wf_name="extract_timeseries_nativespace",
                                   global_signal=True):
    # this workflow transforms atlas back to native space and uses TsExtractor

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.io as io
    import nipype.interfaces.utility as utility
    import PUMI.func_preproc.func2standard as transform
    import PUMI.utils.globals as globals
    import PUMI.utils.QC as qc

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)
    wf = nipype.Workflow(wf_name)

    inputspec = pe.Node(
        utility.IdentityInterface(fields=[
            'atlas',
            'labels',
            'modules',
            'anat',  # only obligatory if stdreg==globals._RegType_.ANTS
            'inv_linear_reg_mtrx',
            'inv_nonlinear_reg_mtrx',
            'func',
            'gm_mask',
            'confounds',
            'confound_names'
        ]),
        name="inputspec")

    # transform atlas back to native EPI spaces!
    atlas2native = transform.atlas2func(stdreg=globals._regType_)
    wf.connect(inputspec, 'atlas', atlas2native, 'inputspec.atlas')
    wf.connect(inputspec, 'anat', atlas2native, 'inputspec.anat')
    wf.connect(inputspec, 'inv_linear_reg_mtrx', atlas2native,
               'inputspec.inv_linear_reg_mtrx')
    wf.connect(inputspec, 'inv_nonlinear_reg_mtrx', atlas2native,
               'inputspec.inv_nonlinear_reg_mtrx')
    wf.connect(inputspec, 'func', atlas2native, 'inputspec.func')
    wf.connect(inputspec, 'gm_mask', atlas2native, 'inputspec.example_func')
    wf.connect(inputspec, 'confounds', atlas2native, 'inputspec.confounds')
    wf.connect(inputspec, 'confound_names', atlas2native,
               'inputspec.confound_names')

    # extract timeseries
    extract_timeseries = pe.MapNode(interface=utility.Function(
        input_names=['labels', 'labelmap', 'func', 'mask', 'global_signal'],
        output_names=['out_file', 'labels', 'out_gm_label'],
        function=TsExtractor),
                                    iterfield=['labelmap', 'func', 'mask'],
                                    name='extract_timeseries')
    extract_timeseries.inputs.global_signal = global_signal
    wf.connect(atlas2native, 'outputspec.atlas2func', extract_timeseries,
               'labelmap')
    wf.connect(inputspec, 'labels', extract_timeseries, 'labels')
    wf.connect(inputspec, 'gm_mask', extract_timeseries, 'mask')
    wf.connect(inputspec, 'func', extract_timeseries, 'func')

    # Save outputs which are important
    ds_regts = pe.Node(interface=io.DataSink(), name='ds_regts')
    ds_regts.inputs.base_directory = globals._SinkDir_
    ds_regts.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".tsv")]
    wf.connect(extract_timeseries, 'out_file', ds_regts, 'regional_timeseries')

    # QC
    timeseries_qc = qc.regTimeseriesQC("regional_timeseries", tag=wf_name)
    wf.connect(inputspec, 'modules', timeseries_qc, 'inputspec.modules')
    wf.connect(inputspec, 'atlas', timeseries_qc, 'inputspec.atlas')
    wf.connect(extract_timeseries, 'out_file', timeseries_qc,
               'inputspec.timeseries')

    # Basic interface class generates identity mappings
    outputspec = pe.Node(
        utility.IdentityInterface(fields=['timeseries', 'out_gm_label']),
        name='outputspec')
    wf.connect(extract_timeseries, 'out_file', outputspec, 'timeseries')
    wf.connect(extract_timeseries, 'out_gm_label', outputspec, 'out_gm_label')

    return wf
Ejemplo n.º 21
0
import nipype.interfaces.utility as utility
import PUMI.func_preproc.info.info_get as info_get
import PUMI.utils.utils_convert as utils_convert
import nipype.interfaces.afni as afni
import nipype.interfaces.io as io

OutJSON = SinkDir + "/outputs.JSON"
WorkingDirectory = "."

#Basic interface class generates identity mappings
NodeHash_6040006ae640 = pe.Node(utility.IdentityInterface(fields=['func','slicetiming_txt']), name = 'NodeName_6040006ae640')
NodeHash_6040006ae640.inputs.func = func
NodeHash_6040006ae640.inputs.slicetiming_txt = slicetiming_txt

#Custom interface wrapping function TR
NodeHash_6000004b9860 = pe.MapNode(interface = info_get.TR, name = 'NodeName_6000004b9860', iterfield = ['in_file'])

#Custom interface wrapping function Str2Float
NodeHash_6040006ae9a0 = pe.MapNode(interface = utils_convert.Str2Float, name = 'NodeName_6040006ae9a0', iterfield = ['str'])

#Custom interface wrapping function Float2Str
NodeHash_6040004aee80 = pe.MapNode(interface = utils_convert.Float2Str, name = 'NodeName_6040004aee80', iterfield = ['float'])

#Wraps command **3dTshift**
NodeHash_6040004ad140 = pe.MapNode(interface = afni.TShift(), name = 'NodeName_6040004ad140', iterfield = ['in_file', 'tr'])
NodeHash_6040004ad140.inputs.rltplus = True
NodeHash_6040004ad140.inputs.outputtype = "NIFTI_GZ"
NodeHash_6040004ad140.inputs.terminal_output = 'allatonce'

#Generic datasink module to store structured outputs
NodeHash_6080008b3d40 = pe.Node(interface = io.DataSink(), name = 'NodeName_6080008b3d40')
Ejemplo n.º 22
0
import nipype
import nipype.pipeline as pe
import nipype.interfaces.utility as utility
import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl
import firstlevelhelpers
import nipype.algorithms.modelgen as modelgen

WorkingDirectory = "~/Porcupipelines/ThisStudy"

#Basic interface class generates identity mappings
NodeHash_2c4dda0 = pe.Node(utility.IdentityInterface(fields=['sub_id']), name = 'NodeName_2c4dda0')
NodeHash_2c4dda0.inputs.sub_id = ['sub-02', 'sub-03', 'sub-04', 'sub-05', 'sub-06', 'sub-07', 'sub-08', 'sub-09', 'sub-10', 'sub-11', 'sub-12', 'sub-13', 'sub-14', 'sub-15', 'sub-16', 'sub-17', 'sub-18', 'sub-19', 'sub-20', 'sub-21']

#Generic datagrabber module that wraps around glob in an
NodeHash_17173a00 = pe.MapNode(io.S3DataGrabber(infields=['field_template','sub_id'], outfields=['func','events','anat']), name = 'NodeName_17173a00', iterfield = ['sub_id'])
NodeHash_17173a00.inputs.anon = True
NodeHash_17173a00.inputs.bucket = 'openneuro'
NodeHash_17173a00.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
NodeHash_17173a00.inputs.local_directory = '/tmp'
NodeHash_17173a00.inputs.sort_filelist = True
NodeHash_17173a00.inputs.template = '*'
NodeHash_17173a00.inputs.template_args = dict(func=[['sub_id', 'sub_id']], events=[['sub_id', 'sub_id']], anat=[['sub_id', 'sub_id']])
NodeHash_17173a00.inputs.field_template = dict(func='%s/func/%s_task-simon_run-1_bold.nii.gz', events='%s/func/%s_task-simon_run-1_events.tsv', anat='%s/anat/%s_T1w.nii.gz')

#Wraps command **bet**
NodeHash_20af2180 = pe.MapNode(interface = fsl.BET(), name = 'NodeName_20af2180', iterfield = ['in_file'])
NodeHash_20af2180.inputs.frac = 0.3
NodeHash_20af2180.inputs.robust = True

#Wraps command **fast**
Ejemplo n.º 23
0
def compcor_workflow(SinkTag="func_preproc", wf_name="compcor"):
    """


               `source: -`


               Component based noise reduction method (Behzadi et al.,2007): Regressing out principal components from noise ROIs.
               Here the aCompCor is used.

               Workflow inputs:
                   :param func_aligned: The reoriented and realigned functional image.
                   :param mask_files: Mask files which determine ROI(s). The default mask is the
                   :param components_file
                   :param num_componenets:
                   :param pre_filter: Detrend time series prior to component extraction.
                   :param TR
                   :param SinkDir:
                   :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow.

               Workflow outputs:




                   :return: slt_workflow - workflow




               Balint Kincses
               [email protected]
               2018


     """

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.algorithms.confounds as cnf
    import PUMI.func_preproc.info.info_get as info_get
    import PUMI.utils.utils_convert as utils_convert
    import nipype.interfaces.io as io
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import PUMI.utils.QC as qc
    import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['func_aligned', 'mask_file']),
        name='inputspec')

    myqc = qc.vol2png("compcor_noiseroi")

    # Save outputs which are important
    ds_nii = pe.Node(interface=io.DataSink(), name='ds_nii')
    ds_nii.inputs.base_directory = SinkDir
    ds_nii.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    # standardize timeseries prior to compcor. added by tspisak
    scale = pe.MapNode(interface=utility.Function(input_names=['in_file'],
                                                  output_names=['scaled_file'],
                                                  function=scale_vol),
                       iterfield=['in_file'],
                       name='scale_func')

    # Calculate compcor files
    compcor = pe.MapNode(
        interface=cnf.ACompCor(pre_filter='polynomial',
                               header_prefix="",
                               num_components=5),
        iterfield=['realigned_file', 'repetition_time', 'mask_files'],
        name='compcor')

    # Custom interface wrapping function Float2Str
    func_str2float = pe.MapNode(interface=utils_convert.Str2Float,
                                iterfield=['str'],
                                name='func_str2float')
    # Drop first line of the Acompcor function output
    drop_firstline = pe.MapNode(interface=utils_convert.DropFirstLine,
                                iterfield=['txt'],
                                name='drop_firstline')
    # Custom interface wrapping function TR
    TRvalue = pe.MapNode(interface=info_get.TR,
                         iterfield=['in_file'],
                         name='TRvalue')

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=['components_file']),
                         name='outputspec')

    # save data out with Datasink
    ds_text = pe.Node(interface=io.DataSink(), name='ds_txt')
    ds_text.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".txt")]
    ds_text.inputs.base_directory = SinkDir

    # Create a workflow to connect all those nodes
    analysisflow = nipype.Workflow(wf_name)
    analysisflow.connect(inputspec, 'func_aligned', scale, 'in_file')
    analysisflow.connect(scale, 'scaled_file', compcor, 'realigned_file')
    analysisflow.connect(inputspec, 'func_aligned', TRvalue, 'in_file')
    analysisflow.connect(TRvalue, 'TR', func_str2float, 'str')
    analysisflow.connect(func_str2float, 'float', compcor, 'repetition_time')
    #analysisflow.connect(TRvalue, 'TR', compcor, 'repetition_time')
    analysisflow.connect(inputspec, 'mask_file', compcor, 'mask_files')
    analysisflow.connect(compcor, 'components_file', drop_firstline, 'txt')
    analysisflow.connect(drop_firstline, 'droppedtxtfloat', outputspec,
                         'components_file')
    analysisflow.connect(compcor, 'components_file', ds_text, 'compcor_noise')

    analysisflow.connect(inputspec, 'func_aligned', myqc, 'inputspec.bg_image')
    analysisflow.connect(inputspec, 'mask_file', myqc,
                         'inputspec.overlay_image')

    analysisflow.connect(inputspec, 'mask_file', ds_nii, 'compcor_noise_mask')

    return analysisflow
Ejemplo n.º 24
0
# sink: file - idx relationship!!
pop_id = pe.Node(interface=utils_convert.List2TxtFile,
                     name='pop_id')
pop_id.inputs.rownum = 0
pop_id.inputs.out_file = "subject_IDs.txt"
totalWorkflow.connect(datagrab, 'bold', pop_id, 'in_list')

ds_id = pe.Node(interface=io.DataSink(), name='ds_pop_id')
ds_id.inputs.regexp_substitutions = [("(\/)[^\/]*$", "IDs.txt")]
ds_id.inputs.base_directory = globals._SinkDir_
totalWorkflow.connect(pop_id, 'txt_file', ds_id, 'subjects')

# build the actual pipeline
reorient_struct = pe.MapNode(fsl.utils.Reorient2Std(output_type='NIFTI_GZ'),
                      iterfield=['in_file'],
                      name="reorient_struct")
totalWorkflow.connect(datagrab, 'T1w', reorient_struct, 'in_file')

reorient_func = pe.MapNode(fsl.utils.Reorient2Std(output_type='NIFTI_GZ'),
                      iterfield=['in_file'],
                      name="reorient_func")
totalWorkflow.connect(datagrab, 'bold', reorient_func, 'in_file')

# prior probmaps for FAST are now switched off by default in PUMI
# ToDo: make settable
myanatproc = anatproc.AnatProc(stdreg=globals._regType_)
myanatproc.inputs.inputspec.bet_fract_int_thr = opts.bet_fract_int_thr #0.3  # feel free to adjust, a nice bet is important!
myanatproc.inputs.inputspec.bet_vertical_gradient = opts.bet_vertical_gradient #-0.3 # feel free to adjust, a nice bet is important!
# try scripts/opt_bet.py to optimise these parameters
totalWorkflow.connect(reorient_struct, 'out_file', myanatproc, 'inputspec.anat')
Ejemplo n.º 25
0
def onevol_workflow(SinkTag="anat_preproc", wf_name="get_example_vol"):
    '''
    This function receive the raw functional image and return its last volume for registration purposes.
    MORE: It also returns information from the header file.
        Workflow inputs:
            :param func: Functional image.
            :param SinkDir:
            :param SinkTag: The output directiry in which the returned images (see workflow outputs) could be found.

        Workflow outputs:


            :return: onevol_workflow - workflow

        Balint Kincses
        [email protected]
        2018

    '''

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import PUMI.func_preproc.info.info_get as info_get
    import nipype.interfaces.io as io
    import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Basic interface class generates identity mappings
    inputspec = pe.Node(utility.IdentityInterface(fields=['func']),
                        name='inputspec')
    #inputspec.inputs.func = "/home/balint/Dokumentumok/phd/essen/PAINTER/probe/s002/func_data.nii.gz"

    # Get dimension infos
    idx = pe.MapNode(interface=info_get.tMinMax,
                     iterfield=['in_files'],
                     name='idx')

    # Get the last volume of the func image
    fslroi = pe.MapNode(fsl.ExtractROI(),
                        iterfield=['in_file', 't_min'],
                        name='fslroi')
    fslroi.inputs.t_size = 1

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=['func1vol']),
                         name='outputspec')

    # Generic datasink module to store structured outputs
    ds = pe.Node(interface=io.DataSink(), name='ds')
    ds.inputs.base_directory = SinkDir
    ds.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    analysisflow = nipype.Workflow(wf_name)
    analysisflow.connect(inputspec, 'func', idx, 'in_files')
    analysisflow.connect(inputspec, 'func', fslroi, 'in_file')
    analysisflow.connect(idx, 'refvolidx', fslroi, 't_min')
    analysisflow.connect(fslroi, 'roi_file', ds, 'funclastvol')
    analysisflow.connect(fslroi, 'roi_file', outputspec, 'func1vol')

    return analysisflow
Ejemplo n.º 26
0
def create_B0_workflow(name='b0_unwarping', scanner='philips'):
    """ Does B0 field unwarping

    Example
    -------
    >>> nipype_epicorrect = create_unwarping_workflow('unwarp',)
    >>> unwarp.inputs.input_node.in_file = 'subj1_run1_bold.nii.gz'
    >>> unwarp.inputs.input_node.fieldmap_mag = 'subj1_run1_mag.nii.gz'
    >>> unwarp.inputs.input_node.fieldmap_pha = 'subj1_run1_phas.nii.gz'
    >>> unwarp.inputs.input_node.wfs = 12.223
    >>> unwarp.inputs.input_node.epi_factor = 35.0
    >>> unwarp.inputs.input_node.acceleration = 3.0
    >>> unwarp.inputs.input_node.te_diff = 0.005
    >>> unwarp.inputs.input_node.phase_encoding_direction = 'y'
    >>> nipype_epicorrect.run()

    Inputs::
        input_node.in_file - Volume acquired with EPI sequence
        input_node.fieldmap_mag - Magnitude of the fieldmap
        input_node.fieldmap_pha - Phase difference of the fieldmap
        input_node.wfs - Water-fat-shift in pixels
        input_node.epi_factor - EPI factor
        input_node.acceleration - Acceleration factor used for EPI parallel imaging (SENSE)
        input_node.te_diff - Time difference between TE in seconds.
        input_node.phase_encoding_direction - Unwarp direction (default should be "y")
    Outputs::
        outputnode.epi_corrected
    """

    # Nodes:
    # ------

    # Define input and workflow:
    input_node = pe.Node(name='inputspec',
                         interface=IdentityInterface(fields=[
                             'in_files', 'fieldmap_mag', 'fieldmap_pha', 'wfs',
                             'epi_factor', 'acceleration', 'echo_spacing',
                             'te_diff', 'phase_encoding_direction'
                         ]))

    # Normalize phase difference of the fieldmap phase to be [-pi, pi)
    norm_pha = pe.Node(interface=Prepare_phasediff, name='normalize_phasediff')

    # Mask the magnitude of the fieldmap
    mask_mag = pe.Node(fsl.BET(mask=True), name='mask_magnitude')
    mask_mag_dil = pe.Node(interface=Dilate_mask, name='mask_dilate')

    # Unwrap fieldmap phase using FSL PRELUDE
    prelude = pe.Node(fsl.PRELUDE(process3d=True), name='phase_unwrap')

    # Convert unwrapped fieldmap phase to radials per second:
    radials_per_second = pe.Node(interface=Radials_per_second,
                                 name='radials_ps')

    # in case of SIEMENS scanner:
    prepare_fieldmap = pe.Node(PrepareFieldmap(), name='prepare_fieldmap')

    # Register unwrapped fieldmap (rad/s) to epi, using the magnitude of the fieldmap
    registration = pe.MapNode(fsl.FLIRT(bins=256,
                                        cost='corratio',
                                        dof=6,
                                        interp='trilinear',
                                        searchr_x=[-10, 10],
                                        searchr_y=[-10, 10],
                                        searchr_z=[-10, 10]),
                              iterfield=['reference'],
                              name='registration')

    # transform unwrapped fieldmap (rad/s)
    applyxfm = pe.MapNode(fsl.ApplyXFM(interp='trilinear'),
                          iterfield=['reference', 'in_matrix_file'],
                          name='apply_xfm')

    # compute effective echospacing:
    echo_spacing_philips = pe.Node(interface=Compute_echo_spacing_philips,
                                   name='echo_spacing_philips')
    echo_spacing_siemens = pe.Node(interface=Compute_echo_spacing_siemens,
                                   name='echo_spacing_siemens')
    te_diff_in_ms = pe.Node(interface=TE_diff_ms, name='te_diff_in_ms')

    # Unwarp with FSL Fugue
    fugue = pe.MapNode(interface=fsl.FUGUE(median_2dfilter=True),
                       iterfield=['in_file', 'unwarped_file', 'fmap_in_file'],
                       name='fugue')

    # Convert unwrapped fieldmap phase to radials per second:
    out_file = pe.MapNode(interface=Make_output_filename,
                          iterfield=['in_file'],
                          name='out_file')

    # Define output node
    outputnode = pe.Node(
        IdentityInterface(fields=['out_files', 'field_coefs']),
        name='outputspec')

    # Workflow:
    # ---------

    unwarp_workflow = pe.Workflow(name=name)
    unwarp_workflow.connect(input_node, 'in_files', out_file, 'in_file')

    # registration:
    unwarp_workflow.connect(input_node, 'fieldmap_mag', mask_mag, 'in_file')
    unwarp_workflow.connect(mask_mag, 'mask_file', mask_mag_dil, 'in_file')
    unwarp_workflow.connect(mask_mag, 'out_file', registration, 'in_file')
    unwarp_workflow.connect(input_node, 'in_files', registration, 'reference')

    if scanner == 'philips':

        # prepare fieldmap:
        unwarp_workflow.connect(input_node, 'fieldmap_pha', norm_pha,
                                'in_file')
        unwarp_workflow.connect(input_node, 'fieldmap_mag', prelude,
                                'magnitude_file')
        unwarp_workflow.connect(norm_pha, 'out_file', prelude, 'phase_file')
        unwarp_workflow.connect(mask_mag_dil, 'out_file', prelude, 'mask_file')
        unwarp_workflow.connect(prelude, 'unwrapped_phase_file',
                                radials_per_second, 'in_file')
        unwarp_workflow.connect(input_node, 'te_diff', radials_per_second,
                                'asym')

        # transform fieldmap:
        unwarp_workflow.connect(radials_per_second, 'out_file', applyxfm,
                                'in_file')
        unwarp_workflow.connect(registration, 'out_matrix_file', applyxfm,
                                'in_matrix_file')
        unwarp_workflow.connect(input_node, 'in_files', applyxfm, 'reference')

        # compute echo spacing:
        unwarp_workflow.connect(input_node, 'wfs', echo_spacing_philips, 'wfs')
        unwarp_workflow.connect(input_node, 'epi_factor', echo_spacing_philips,
                                'epi_factor')
        unwarp_workflow.connect(input_node, 'acceleration',
                                echo_spacing_philips, 'acceleration')
        unwarp_workflow.connect(echo_spacing_philips, 'echo_spacing', fugue,
                                'dwell_time')

    elif scanner == 'siemens':

        unwarp_workflow.connect(input_node, 'te_diff', te_diff_in_ms,
                                'te_diff')

        # prepare fieldmap:
        unwarp_workflow.connect(mask_mag, 'out_file', prepare_fieldmap,
                                'in_magnitude')
        unwarp_workflow.connect(input_node, 'fieldmap_pha', prepare_fieldmap,
                                'in_phase')
        unwarp_workflow.connect(te_diff_in_ms, 'te_diff', prepare_fieldmap,
                                'delta_TE')

        # transform fieldmap:
        unwarp_workflow.connect(prepare_fieldmap, 'out_fieldmap', applyxfm,
                                'in_file')
        unwarp_workflow.connect(registration, 'out_matrix_file', applyxfm,
                                'in_matrix_file')
        unwarp_workflow.connect(input_node, 'in_files', applyxfm, 'reference')

        # compute echo spacing:
        unwarp_workflow.connect(input_node, 'acceleration',
                                echo_spacing_siemens, 'acceleration')
        unwarp_workflow.connect(input_node, 'echo_spacing',
                                echo_spacing_siemens, 'echo_spacing')
        unwarp_workflow.connect(echo_spacing_siemens, 'echo_spacing', fugue,
                                'dwell_time')

    unwarp_workflow.connect(input_node, 'in_files', fugue, 'in_file')
    unwarp_workflow.connect(out_file, 'out_file', fugue, 'unwarped_file')
    unwarp_workflow.connect(applyxfm, 'out_file', fugue, 'fmap_in_file')
    unwarp_workflow.connect(input_node, 'te_diff', fugue, 'asym_se_time')
    unwarp_workflow.connect(input_node, 'phase_encoding_direction', fugue,
                            'unwarp_direction')
    unwarp_workflow.connect(fugue, 'unwarped_file', outputnode, 'out_files')
    unwarp_workflow.connect(applyxfm, 'out_file', outputnode, 'field_coefs')

    # # Connect
    # unwarp_workflow.connect(input_node, 'in_files', out_file, 'in_file')
    # unwarp_workflow.connect(input_node, 'fieldmap_pha', norm_pha, 'in_file')
    # unwarp_workflow.connect(input_node, 'fieldmap_mag', mask_mag, 'in_file')
    # unwarp_workflow.connect(mask_mag, 'mask_file', mask_mag_dil, 'in_file')
    # unwarp_workflow.connect(input_node, 'fieldmap_mag', prelude, 'magnitude_file')
    # unwarp_workflow.connect(norm_pha, 'out_file', prelude, 'phase_file')
    # unwarp_workflow.connect(mask_mag_dil, 'out_file', prelude, 'mask_file')
    # unwarp_workflow.connect(prelude, 'unwrapped_phase_file', radials_per_second, 'in_file')
    # unwarp_workflow.connect(input_node, 'te_diff', radials_per_second, 'asym')
    # unwarp_workflow.connect(mask_mag, 'out_file', registration, 'in_file')
    # unwarp_workflow.connect(input_node, 'in_files', registration, 'reference')
    # unwarp_workflow.connect(radials_per_second, 'out_file', applyxfm, 'in_file')
    # unwarp_workflow.connect(registration, 'out_matrix_file', applyxfm, 'in_matrix_file')
    # unwarp_workflow.connect(input_node, 'in_files', applyxfm, 'reference')
    # if compute_echo_spacing:
    #     unwarp_workflow.connect(input_node, 'wfs', echo_spacing, 'wfs')
    #     unwarp_workflow.connect(input_node, 'epi_factor', echo_spacing, 'epi_factor')
    #     unwarp_workflow.connect(input_node, 'acceleration', echo_spacing, 'acceleration')
    #     unwarp_workflow.connect(echo_spacing, 'echo_spacing', fugue, 'dwell_time')
    # else:
    #     unwarp_workflow.connect(input_node, 'echo_spacing', fugue, 'dwell_time')
    # unwarp_workflow.connect(input_node, 'in_files', fugue, 'in_file')
    # unwarp_workflow.connect(out_file, 'out_file', fugue, 'unwarped_file')
    # unwarp_workflow.connect(applyxfm, 'out_file', fugue, 'fmap_in_file')
    # unwarp_workflow.connect(input_node, 'te_diff', fugue, 'asym_se_time')
    # unwarp_workflow.connect(input_node, 'phase_encoding_direction', fugue, 'unwarp_direction')
    # unwarp_workflow.connect(fugue, 'unwarped_file', outputnode, 'out_files')
    # unwarp_workflow.connect(applyxfm, 'out_file', outputnode, 'field_coefs')

    return unwarp_workflow
Ejemplo n.º 27
0
def bet_workflow(Robust=True,
                 fmri=False,
                 SinkTag="anat_preproc",
                 wf_name="brain_extraction"):
    """
    Modified version of CPAC.anat_preproc.anat_preproc:

    `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/anat_preproc/anat_preproc.html`


    Creates a brain extracted image and its mask from a T1w anatomical image.

    Workflow inputs:
        :param anat: The reoriented anatomical file.
        :param SinkDir:
        :param SinkTag: The output directiry in which the returned images (see workflow outputs) could be found.

    Workflow outputs:




        :return: bet_workflow - workflow




    Balint Kincses
    [email protected]
    2018


    """

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.io as io
    import PUMI.utils.QC as qc
    import PUMI.utils.globals as globals
    import PUMI.func_preproc.Onevol as onevol

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    #Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=[
            'in_file',
            'opt_R',
            'fract_int_thr',  # optional
            'vertical_gradient'
        ]),  # optional
        name='inputspec')
    inputspec.inputs.opt_R = Robust
    if fmri:
        inputspec.inputs.fract_int_thr = globals._fsl_bet_fract_int_thr_func_
    else:
        inputspec.inputs.fract_int_thr = globals._fsl_bet_fract_int_thr_anat_

    inputspec.inputs.vertical_gradient = globals._fsl_bet_vertical_gradient_

    #Wraps command **bet**
    bet = pe.MapNode(interface=fsl.BET(), iterfield=['in_file'], name='bet')
    bet.inputs.mask = True
    # bet.inputs.robust=Robust
    if fmri:
        bet.inputs.functional = True
        myonevol = onevol.onevol_workflow(wf_name="onevol")
        applymask = pe.MapNode(fsl.ApplyMask(),
                               iterfield=['in_file', 'mask_file'],
                               name="apply_mask")

    myqc = qc.vol2png(wf_name, overlay=True)

    #Basic interface class generates identity mappings
    outputspec = pe.Node(
        utility.IdentityInterface(fields=['brain', 'brain_mask']),
        name='outputspec')

    # Save outputs which are important
    ds = pe.Node(interface=io.DataSink(), name='ds')
    ds.inputs.base_directory = SinkDir
    ds.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    #Create a workflow to connect all those nodes
    analysisflow = nipype.Workflow(
        wf_name)  # The name here determine the folder of the workspace
    analysisflow.base_dir = '.'
    analysisflow.connect(inputspec, 'in_file', bet, 'in_file')
    analysisflow.connect(inputspec, 'opt_R', bet, 'robust')
    analysisflow.connect(inputspec, 'fract_int_thr', bet, 'frac')
    analysisflow.connect(inputspec, 'vertical_gradient', bet,
                         'vertical_gradient')
    analysisflow.connect(bet, 'mask_file', outputspec, 'brain_mask')
    if fmri:

        analysisflow.connect(bet, 'mask_file', myonevol, 'inputspec.func')
        analysisflow.connect(myonevol, 'outputspec.func1vol', applymask,
                             'mask_file')
        analysisflow.connect(inputspec, 'in_file', applymask, 'in_file')
        analysisflow.connect(applymask, 'out_file', outputspec, 'brain')
    else:
        analysisflow.connect(bet, 'out_file', outputspec, 'brain')
    analysisflow.connect(bet, 'out_file', ds, 'bet_brain')
    analysisflow.connect(bet, 'mask_file', ds, 'brain_mask')

    analysisflow.connect(inputspec, 'in_file', myqc, 'inputspec.bg_image')
    analysisflow.connect(bet, 'out_file', myqc, 'inputspec.overlay_image')

    return analysisflow
Ejemplo n.º 28
0
def addimgs_workflow(numimgs=2,
                    SinkDir=".",
                    SinkTag="func_preproc",
                    WorkingDirectory="."):
    """


               `source: -`


               Add any number of images whic are in the same space. The input files must be NIFTI files.

               Workflow inputs:
                   :param any number of .nii(.gz) files.
                   :param SinkDir:
                   :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow.

               Workflow outputs:


                   :return: addimgs_workflow - workflow




               Balint Kincses
               [email protected]
               2018


     """

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import PUMI.utils.utils_convert as utils_convert
    from nipype.interfaces.utility import Function
    import nipype.interfaces.fsl as fsl


    SinkDir = os.path.abspath(SinkDir + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    inputs=[]
    for i in range(1, numimgs + 1):
        inputs.append("par" + str(i))


    # Basic interface class generates identity mappings
    inputspec = pe.Node(utility.IdentityInterface(fields=inputs),
                        name='inputspec')
    # Add masks with FSL
    add_masks = pe.MapNode(fsl.ImageMaths(op_string=' -add'),
                         iterfield=inputs,
                         name="addimgs")

    outputspec = pe.Node(utility.IdentityInterface(fields=['added_imgs']),
                                    name='outputspec')
    # Create workflow
    analysisflow = nipype.Workflow('addimgsWorkflow')
    analysisflow.base_dir = '.'
    #connect
    for i in range(1, numimgs + 1):
        actparam = "par" + str(i)
        analysisflow.connect(inputspec, actparam, add_masks, actparam)
    #analysisflow.connect(inputspec, inputs, add_masks, inputs)
    analysisflow.connect(add_masks, 'out_file', outputspec, 'added_imgs')


    return analysisflow
Ejemplo n.º 29
0
import nipype.interfaces.io as io

OutJSON = SinkDir + "/outputs.JSON"

#Basic interface class generates identity mappings
NodeHash_604000eb5d20 = pe.Node(utility.IdentityInterface(fields=['func','magnitude','phase','TE1','TE2','dwell_time','unwarp_direction']), name = 'NodeName_604000eb5d20')
NodeHash_604000eb5d20.inputs.func = func
NodeHash_604000eb5d20.inputs.magnitude = magnitude
NodeHash_604000eb5d20.inputs.phase = phase
NodeHash_604000eb5d20.inputs.TE1 = TE1
NodeHash_604000eb5d20.inputs.TE2 = TE2
NodeHash_604000eb5d20.inputs.dwell_time = dwell_time
NodeHash_604000eb5d20.inputs.unwarp_direction = unwarp_direction

#Wraps command **bet**
NodeHash_604000cba700 = pe.MapNode(interface = fsl.BET(), name = 'NodeName_604000cba700', iterfield = ['in_file'])
NodeHash_604000cba700.inputs.mask = True

#Wraps command **fslmaths**
NodeHash_600001ab26c0 = pe.MapNode(interface = fsl.ErodeImage(), name = 'NodeName_600001ab26c0', iterfield = ['in_file'])

#Wraps command **fslmaths**
NodeHash_60c0018a6e40 = pe.MapNode(interface = fsl.ErodeImage(), name = 'NodeName_60c0018a6e40', iterfield = ['in_file'])

#Custom interface wrapping function SubTwo
NodeHash_60c0018a4860 = pe.Node(interface = utils_math.SubTwo, name = 'NodeName_60c0018a4860')

#Custom interface wrapping function Abs
NodeHash_600001eab220 = pe.Node(interface = utils_math.Abs, name = 'NodeName_600001eab220')

#Wraps command **fsl_prepare_fieldmap**
Ejemplo n.º 30
0
def create_preprocessing_workflow(analysis_params, name='yesno_3T'):
    import os.path as op
    import nipype.pipeline as pe
    from nipype.interfaces import fsl
    from nipype.interfaces.utility import Function, Merge, IdentityInterface
    from nipype.interfaces.io import SelectFiles, DataSink
    from IPython import embed as shell

    # Importing of custom nodes from spynoza packages; assumes that spynoza is installed:
    # pip install git+https://github.com/spinoza-centre/spynoza.git@develop
    from spynoza.utils import get_scaninfo, pickfirst, average_over_runs, set_nifti_intercept_slope
    from spynoza.uniformization.workflows import create_non_uniformity_correct_4D_file
    from spynoza.unwarping.b0.workflows import create_B0_workflow
    from spynoza.motion_correction.workflows import create_motion_correction_workflow
    from spynoza.registration.workflows import create_registration_workflow
    from spynoza.filtering.nodes import sgfilter
    from spynoza.conversion.nodes import psc
    from spynoza.denoising.retroicor.workflows import create_retroicor_workflow
    from spynoza.masking.workflows import create_masks_from_surface_workflow
    from spynoza.glm.nodes import fit_nuisances

    ########################################################################################
    # nodes
    ########################################################################################

    input_node = pe.Node(
        IdentityInterface(fields=[
            'task',  # main
            'sub_id',  # main
            'ses_id',  # main
            'raw_data_dir',  # main
            'output_directory',  # main
            'sub_FS_id',  # main
            'FS_subject_dir',  # motion correction
            'RepetitionTime',  # motion correction
            'which_file_is_EPI_space',  # motion correction
            'standard_file',  # registration
            'topup_conf_file',  # unwarping
            'EchoTimeDiff',  # unwarping
            'EpiFactor',  # unwarping
            'SenseFactor',  # unwarping
            'WaterFatShift',  # unwarping
            'PhaseEncodingDirection',  # unwarping
            'EchoSpacing'  # unwarping
            'psc_func',  # percent signal change
            'sg_filter_window_length',  # temporal filtering
            'sg_filter_order',  # temporal filtering
            'SliceEncodingDirection',  # retroicor
            'PhysiologySampleRate',  # retroicor
            'SliceTiming',  # retroicor
            'SliceOrder',  # retroicor
            'NumberDummyScans',  # retroicor
            'MultiBandFactor',  # retroicor
            'hr_rvt',  # retroicor
            'av_func',  # extra
            'EchoTime',  # extra
            'bd_design_matrix_file',  # extra
        ]),
        name='inputspec')

    for param in analysis_params:
        exec('input_node.inputs.{} = analysis_params[param]'.format(param))

    # i/o node
    datasource_templates = dict(
        func=
        '{sub_id}/{ses_id}/func/{sub_id}_{ses_id}_task-{task}*_bold.nii.gz',
        magnitude='{sub_id}/{ses_id}/fmap/{sub_id}_{ses_id}*magnitude.nii.gz',
        phasediff='{sub_id}/{ses_id}/fmap/{sub_id}_{ses_id}*phasediff.nii.gz',
        #physio='{sub_id}/{ses_id}/func/*{task}*physio.*',
        #events='{sub_id}/{ses_id}/func/*{task}*_events.pickle',
        #eye='{sub_id}/{ses_id}/func/*{task}*_eyedata.edf'
    )
    datasource = pe.Node(SelectFiles(datasource_templates,
                                     sort_filelist=True,
                                     raise_on_empty=False),
                         name='datasource')

    output_node = pe.Node(IdentityInterface(
        fields=(['temporal_filtered_files', 'percent_signal_change_files'])),
                          name='outputspec')

    # nodes for setting the slope/intercept of incoming niftis to (1, 0)
    # this is apparently necessary for the B0 map files
    int_slope_B0_magnitude = pe.Node(Function(
        input_names=['in_file'],
        output_names=['out_file'],
        function=set_nifti_intercept_slope),
                                     name='int_slope_B0_magnitude')
    int_slope_B0_phasediff = pe.Node(Function(
        input_names=['in_file'],
        output_names=['out_file'],
        function=set_nifti_intercept_slope),
                                     name='int_slope_B0_phasediff')

    # reorient nodes
    reorient_epi = pe.MapNode(interface=fsl.Reorient2Std(),
                              name='reorient_epi',
                              iterfield=['in_file'])
    reorient_B0_magnitude = pe.Node(interface=fsl.Reorient2Std(),
                                    name='reorient_B0_magnitude')
    reorient_B0_phasediff = pe.Node(interface=fsl.Reorient2Std(),
                                    name='reorient_B0_phasediff')

    # bet_epi = pe.MapNode(interface=
    #     fsl.BET(frac=analysis_parameters['bet_f_value'], vertical_gradient = analysis_parameters['bet_g_value'],
    #             functional=True, mask = True), name='bet_epi', iterfield=['in_file'])

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    ########################################################################################
    # workflow
    ########################################################################################

    # the actual top-level workflow
    preprocessing_workflow = pe.Workflow(name=name)
    preprocessing_workflow.base_dir = op.join(analysis_params['base_dir'],
                                              'temp/')

    # data source
    preprocessing_workflow.connect(input_node, 'raw_data_dir', datasource,
                                   'base_directory')
    preprocessing_workflow.connect(input_node, 'sub_id', datasource, 'sub_id')
    preprocessing_workflow.connect(input_node, 'ses_id', datasource, 'ses_id')
    preprocessing_workflow.connect(input_node, 'task', datasource, 'task')

    # and data sink
    preprocessing_workflow.connect(input_node, 'output_directory', datasink,
                                   'base_directory')

    # BET (we don't do this, because we expect the raw data in the bids folder to be betted
    # already for anonymization purposes)
    # preprocessing_workflow.connect(datasource, 'func', bet_epi, 'in_file')

    # non-uniformity correction
    # preprocessing_workflow.connect(bet_epi, 'out_file', nuc, 'in_file')
    # preprocessing_workflow.connect(datasource, 'func', nuc, 'in_file')

    # reorient images
    preprocessing_workflow.connect(datasource, 'func', reorient_epi, 'in_file')
    preprocessing_workflow.connect(datasource, 'magnitude',
                                   reorient_B0_magnitude, 'in_file')
    preprocessing_workflow.connect(datasource, 'phasediff',
                                   reorient_B0_phasediff, 'in_file')
    preprocessing_workflow.connect(reorient_epi, 'out_file', datasink,
                                   'reorient')

    #B0 field correction:
    if analysis_params['B0_or_topup'] == 'B0':
        # set slope/intercept to unity for B0 map
        preprocessing_workflow.connect(reorient_B0_magnitude, 'out_file',
                                       int_slope_B0_magnitude, 'in_file')
        preprocessing_workflow.connect(reorient_B0_phasediff, 'out_file',
                                       int_slope_B0_phasediff, 'in_file')
        #B0 field correction:
        if 'EchoSpacing' in analysis_params:
            B0_wf = create_B0_workflow(name='B0', scanner='siemens')
            preprocessing_workflow.connect(input_node, 'EchoSpacing', B0_wf,
                                           'inputspec.echo_spacing')
        else:
            B0_wf = create_B0_workflow(name='B0', scanner='philips')
            preprocessing_workflow.connect(input_node, 'WaterFatShift', B0_wf,
                                           'inputspec.wfs')
            preprocessing_workflow.connect(input_node, 'EpiFactor', B0_wf,
                                           'inputspec.epi_factor')
        preprocessing_workflow.connect(input_node, 'SenseFactor', B0_wf,
                                       'inputspec.acceleration')
        preprocessing_workflow.connect(reorient_epi, 'out_file', B0_wf,
                                       'inputspec.in_files')
        preprocessing_workflow.connect(int_slope_B0_magnitude, 'out_file',
                                       B0_wf, 'inputspec.fieldmap_mag')
        preprocessing_workflow.connect(int_slope_B0_phasediff, 'out_file',
                                       B0_wf, 'inputspec.fieldmap_pha')
        preprocessing_workflow.connect(input_node, 'EchoTimeDiff', B0_wf,
                                       'inputspec.te_diff')
        preprocessing_workflow.connect(input_node, 'PhaseEncodingDirection',
                                       B0_wf,
                                       'inputspec.phase_encoding_direction')
        preprocessing_workflow.connect(B0_wf, 'outputspec.field_coefs',
                                       datasink, 'B0.fieldcoef')
        preprocessing_workflow.connect(B0_wf, 'outputspec.out_files', datasink,
                                       'B0')

    # motion correction
    motion_proc = create_motion_correction_workflow(
        'moco', method=analysis_params['moco_method'])
    if analysis_params['B0_or_topup'] == 'B0':
        preprocessing_workflow.connect(B0_wf, 'outputspec.out_files',
                                       motion_proc, 'inputspec.in_files')
    elif analysis_params['B0_or_topup'] == 'neither':
        preprocessing_workflow.connect(bet_epi, 'out_file', motion_proc,
                                       'inputspec.in_files')
    preprocessing_workflow.connect(input_node, 'RepetitionTime', motion_proc,
                                   'inputspec.tr')
    preprocessing_workflow.connect(input_node, 'output_directory', motion_proc,
                                   'inputspec.output_directory')
    preprocessing_workflow.connect(input_node, 'which_file_is_EPI_space',
                                   motion_proc,
                                   'inputspec.which_file_is_EPI_space')

    # registration
    reg = create_registration_workflow(analysis_params, name='reg')
    preprocessing_workflow.connect(input_node, 'output_directory', reg,
                                   'inputspec.output_directory')
    preprocessing_workflow.connect(motion_proc, 'outputspec.EPI_space_file',
                                   reg, 'inputspec.EPI_space_file')
    preprocessing_workflow.connect(input_node, 'sub_FS_id', reg,
                                   'inputspec.freesurfer_subject_ID')
    preprocessing_workflow.connect(input_node, 'FS_subject_dir', reg,
                                   'inputspec.freesurfer_subject_dir')
    preprocessing_workflow.connect(input_node, 'standard_file', reg,
                                   'inputspec.standard_file')

    # temporal filtering
    preprocessing_workflow.connect(input_node, 'sg_filter_window_length',
                                   sgfilter, 'window_length')
    preprocessing_workflow.connect(input_node, 'sg_filter_order', sgfilter,
                                   'polyorder')
    preprocessing_workflow.connect(motion_proc,
                                   'outputspec.motion_corrected_files',
                                   sgfilter, 'in_file')
    preprocessing_workflow.connect(sgfilter, 'out_file', datasink, 'tf')

    # node for percent signal change
    preprocessing_workflow.connect(input_node, 'psc_func', psc, 'func')
    preprocessing_workflow.connect(sgfilter, 'out_file', psc, 'in_file')
    preprocessing_workflow.connect(psc, 'out_file', datasink, 'psc')

    # # retroicor functionality
    # if analysis_params['perform_physio'] == 1:
    #     retr = create_retroicor_workflow(name = 'retroicor', order_or_timing = analysis_params['retroicor_order_or_timing'])
    #
    #     # # retroicor can take the crudest form of epi file, so that it proceeds quickly
    #     preprocessing_workflow.connect(datasource, 'func', retr, 'inputspec.in_files')
    #     preprocessing_workflow.connect(datasource, 'physio', retr, 'inputspec.phys_files')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.nr_dummies', retr, 'inputspec.nr_dummies')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.MultiBandFactor', retr, 'inputspec.MB_factor')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.tr', retr, 'inputspec.tr')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.SliceEncodingDirection', retr, 'inputspec.slice_direction')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.SliceTiming', retr, 'inputspec.slice_timing')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.SliceOrder', retr, 'inputspec.slice_order')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.PhysiologySampleRate', retr, 'inputspec.phys_sample_rate')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.hr_rvt', retr, 'inputspec.hr_rvt')
    #
    #     # fit nuisances from retroicor
    #     # preprocessing_workflow.connect(retr, 'outputspec.evs', fit_nuis, 'slice_regressor_list')
    #     # preprocessing_workflow.connect(motion_proc, 'outputspec.extended_motion_correction_parameters', fit_nuis, 'vol_regressors')
    #     # preprocessing_workflow.connect(psc, 'out_file', fit_nuis, 'in_file')
    #
    #     # preprocessing_workflow.connect(fit_nuis, 'res_file', av_r, 'in_files')
    #
    #     preprocessing_workflow.connect(retr, 'outputspec.new_phys', datasink, 'phys.log')
    #     preprocessing_workflow.connect(retr, 'outputspec.fig_file', datasink, 'phys.figs')
    #     preprocessing_workflow.connect(retr, 'outputspec.evs', datasink, 'phys.evs')
    #     # preprocessing_workflow.connect(fit_nuis, 'res_file', datasink, 'phys.res')
    #     # preprocessing_workflow.connect(fit_nuis, 'rsq_file', datasink, 'phys.rsq')
    #     # preprocessing_workflow.connect(fit_nuis, 'beta_file', datasink, 'phys.betas')
    #
    #     # preprocessing_workflow.connect(av_r, 'out_file', datasink, 'av_r')

    #
    # ########################################################################################
    # # masking stuff if doing mri analysis
    # ########################################################################################
    #
    #     all_mask_opds = ['dc'] + analysis_parameters[u'avg_subject_RS_label_folders']
    #     all_mask_lds = [''] + analysis_parameters[u'avg_subject_RS_label_folders']
    #
    #     # loop across different folders to mask
    #     # untested as yet.
    #     masking_list = []
    #     dilate_list = []
    #     for opd, label_directory in zip(all_mask_opds,all_mask_lds):
    #         dilate_list.append(
    #             pe.MapNode(interface=fsl.maths.DilateImage(
    #                 operation = 'mean', kernel_shape = 'sphere', kernel_size = analysis_parameters['dilate_kernel_size']),
    #                 name='dilate_'+label_directory, iterfield=['in_file']))
    #
    #         masking_list.append(create_masks_from_surface_workflow(name = 'masks_from_surface_'+label_directory))
    #
    #         masking_list[-1].inputs.inputspec.label_directory = label_directory
    #         masking_list[-1].inputs.inputspec.fill_thresh = 0.005
    #         masking_list[-1].inputs.inputspec.re = '*.label'
    #
    #         preprocessing_workflow.connect(motion_proc, 'outputspec.EPI_space_file', masking_list[-1], 'inputspec.EPI_space_file')
    #         preprocessing_workflow.connect(input_node, 'output_directory', masking_list[-1], 'inputspec.output_directory')
    #         preprocessing_workflow.connect(input_node, 'FS_subject_dir', masking_list[-1], 'inputspec.freesurfer_subject_dir')
    #         preprocessing_workflow.connect(input_node, 'FS_ID', masking_list[-1], 'inputspec.freesurfer_subject_ID')
    #         preprocessing_workflow.connect(reg, 'rename_register.out_file', masking_list[-1], 'inputspec.reg_file')
    #
    #         preprocessing_workflow.connect(masking_list[-1], 'outputspec.masks', dilate_list[-1], 'in_file')
    #         preprocessing_workflow.connect(dilate_list[-1], 'out_file', datasink, 'masks.'+opd)
    #
    #     # # surface-based label import in to EPI space, but now for RS labels
    #     # these should have been imported to the subject's FS folder,
    #     # see scripts/annot_conversion.sh
    #     RS_masks_from_surface = create_masks_from_surface_workflow(name = 'RS_masks_from_surface')
    #     RS_masks_from_surface.inputs.inputspec.label_directory = analysis_parameters['avg_subject_label_folder']
    #     RS_masks_from_surface.inputs.inputspec.fill_thresh = 0.005
    #     RS_masks_from_surface.inputs.inputspec.re = '*.label'
    #
    #     preprocessing_workflow.connect(motion_proc, 'outputspec.EPI_space_file', RS_masks_from_surface, 'inputspec.EPI_space_file')
    #     preprocessing_workflow.connect(input_node, 'output_directory', RS_masks_from_surface, 'inputspec.output_directory')
    #     preprocessing_workflow.connect(input_node, 'FS_subject_dir', RS_masks_from_surface, 'inputspec.freesurfer_subject_dir')
    #     preprocessing_workflow.connect(input_node, 'FS_ID', RS_masks_from_surface, 'inputspec.freesurfer_subject_ID')
    #     preprocessing_workflow.connect(reg, 'rename_register.out_file', RS_masks_from_surface, 'inputspec.reg_file')
    #
    #     preprocessing_workflow.connect(RS_masks_from_surface, 'outputspec.masks', RS_dilate_cortex, 'in_file')
    #     preprocessing_workflow.connect(RS_dilate_cortex, 'out_file', datasink, 'masks.'+analysis_parameters['avg_subject_label_folder'])

    ########################################################################################
    # wrapping up, sending data to datasink
    ########################################################################################

    # preprocessing_workflow.connect(bet_epi, 'out_file', datasink, 'bet.epi')
    # preprocessing_workflow.connect(bet_epi, 'mask_file', datasink, 'bet.epimask')
    # preprocessing_workflow.connect(bet_topup, 'out_file', datasink, 'bet.topup')
    # preprocessing_workflow.connect(bet_topup, 'mask_file', datasink, 'bet.topupmask')

    # preprocessing_workflow.connect(nuc, 'out_file', datasink, 'nuc')
    # preprocessing_workflow.connect(sgfilter, 'out_file', datasink, 'tf')
    # preprocessing_workflow.connect(psc, 'out_file', datasink, 'psc')
    # preprocessing_workflow.connect(datasource, 'physio', datasink, 'phys')

    return preprocessing_workflow