Exemple #1
0
def create_non_uniformity_correct_4D_file(auto_clip=False, clip_low=7,
                                          clip_high=200, n_procs=12):
    """non_uniformity_correct_4D_file corrects functional files for nonuniformity on a timepoint by timepoint way.
    Internally it implements a workflow to split the in_file, correct each separately and then merge them back together.
    This is an ugly workaround as we have to find the output of the workflow's datasink somewhere, but it should work.

    Parameters
    ----------
    in_file : str
        Absolute path to nifti-file.
    auto_clip : bool (default: False)
        whether to let 3dUniformize decide on clipping boundaries
    clip_low : float (default: 7),
        lower clipping bound for 3dUniformize
    clip_high : float (default: 200),
        higher clipping bound for 3dUniformize
    n_procs : int (default: 12),
        the number of processes to run the internal workflow with

    Returns
    -------
    out_file : non-uniformity corrected file
        List of absolute paths to nifti-files.    """

    # nodes
    input_node = pe.Node(IdentityInterface(
        fields=['in_file',
                'auto_clip',
                'clip_low',
                'clip_high',
                'output_directory',
                'sub_id']), name='inputspec')
    split = pe.Node(Function(input_names='in_file', output_names=['out_files'],
                             function=split_4D_to_3D), name='split')

    uniformer = pe.MapNode(
        Uniformize(clip_high=clip_high, clip_low=clip_low, auto_clip=auto_clip,
                   outputtype='NIFTI_GZ'), name='uniformer',
        iterfield=['in_file'])
    merge = pe.MapNode(fsl.Merge(dimension='t'), name='merge',
                       iterfield=['in_files'])

    datasink = pe.Node(nio.DataSink(infields=['topup'], container=''),
                       name='sinker')
    datasink.inputs.parameterization = False

    # workflow
    nuc_wf = pe.Workflow(name='nuc')
    nuc_wf.connect(input_node, 'sub_id', datasink, 'container')
    nuc_wf.connect(input_node, 'output_directory', datasink, 'base_directory')
    nuc_wf.connect(input_node, 'in_file', split, 'in_file')
    nuc_wf.connect(split, 'out_files', uniformer, 'in_file')
    nuc_wf.connect(uniformer, 'out_file', merge, 'in_files')
    nuc_wf.connect(merge, 'merged_file', datasink, 'uni')

    # nuc_wf.run('MultiProc', plugin_args={'n_procs': n_procs})
    # out_file = glob.glob(os.path.join(td, 'uni', fn_base + '_0000*.nii.gz'))[0]

    return nuc_wf
Exemple #2
0
def vol2png(qcname, tag="", overlay=True, overlayiterated=True):
    import PUMI.func_preproc.Onevol as onevol

    QCDir = os.path.abspath(globals._SinkDir_ + "/" + globals._QCDir_)
    if not os.path.exists(QCDir):
        os.makedirs(QCDir)

    if tag:
        tag = "_" + tag

    inputspec = pe.Node(
        utility.IdentityInterface(fields=['bg_image', 'overlay_image']),
        name='inputspec')

    analysisflow = pe.Workflow(name=qcname + tag + '_qc')

    myonevol_bg = onevol.onevol_workflow(wf_name="onebg")
    analysisflow.connect(inputspec, 'bg_image', myonevol_bg, 'inputspec.func')

    if overlay and not overlayiterated:
        #myonevol_ol = onevol.onevol_workflow(wf_name="oneol")
        #analysisflow.connect(inputspec, 'overlay_image', myonevol_ol, 'inputspec.func')
        slicer = pe.MapNode(interface=fsl.Slicer(),
                            iterfield=['in_file'],
                            name='slicer')

    # Create png images for quality check
    if overlay and overlayiterated:
        myonevol_ol = onevol.onevol_workflow(wf_name="oneol")
        analysisflow.connect(inputspec, 'overlay_image', myonevol_ol,
                             'inputspec.func')
        slicer = pe.MapNode(interface=fsl.Slicer(),
                            iterfield=['in_file', 'image_edges'],
                            name='slicer')
    if not overlay:
        slicer = pe.MapNode(interface=fsl.Slicer(),
                            iterfield=['in_file'],
                            name='slicer')

    slicer.inputs.image_width = 2000
    slicer.inputs.out_file = qcname
    # set output all axial slices into one picture
    slicer.inputs.sample_axial = 5
    #slicer.inputs.middle_slices = True

    # Save outputs which are important
    ds_qc = pe.Node(interface=io.DataSink(), name='ds_qc')
    ds_qc.inputs.base_directory = QCDir
    ds_qc.inputs.regexp_substitutions = [("(\/)[^\/]*$", tag + ".ppm")]

    analysisflow.connect(myonevol_bg, 'outputspec.func1vol', slicer, 'in_file')
    if overlay and not overlayiterated:
        analysisflow.connect(inputspec, 'overlay_image', slicer, 'image_edges')
    if overlay and overlayiterated:
        analysisflow.connect(myonevol_ol, 'outputspec.func1vol', slicer,
                             'image_edges')
    analysisflow.connect(slicer, 'out_file', ds_qc, qcname)

    return analysisflow
Exemple #3
0
def fMRI2QC(qcname, tag="", SinkDir=".", QCDIR="QC", indiv_atlas=False):
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import PUMI.plot.image as plot

    QCDir = os.path.abspath(globals._SinkDir_ + "/" + globals._QCDir_)
    if not os.path.exists(QCDir):
        os.makedirs(QCDir)

    if tag:
        tag = "_" + tag

    # Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['func', 'atlas', 'confounds']),
        name='inputspec')
    inputspec.inputs.atlas = globals._FSLDIR_ + '/data/atlases/HarvardOxford/HarvardOxford-cort-maxprob-thr25-3mm.nii.gz'

    if indiv_atlas:
        plotfmri = pe.MapNode(interface=Function(
            input_names=['func', 'atlaslabels', 'confounds', 'output_file'],
            output_names=['plotfile'],
            function=plot.plot_fmri_qc),
                              iterfield=['func', 'confounds', 'atlaslabels'],
                              name="qc_fmri")
    else:
        plotfmri = pe.MapNode(interface=Function(
            input_names=['func', 'atlaslabels', 'confounds', 'output_file'],
            output_names=['plotfile'],
            function=plot.plot_fmri_qc),
                              iterfield=['func', 'confounds'],
                              name="qc_fmri")

    plotfmri.inputs.output_file = "qc_fmri.png"
    # default atlas works only for standardized, 3mm-resoultion data

    # Save outputs which are important
    ds_qc = pe.Node(interface=io.DataSink(), name='ds_qc')
    ds_qc.inputs.base_directory = QCDir
    ds_qc.inputs.regexp_substitutions = [("(\/)[^\/]*$", tag + ".png")]

    # Create a workflow
    analysisflow = nipype.Workflow(name=qcname + tag + '_qc')

    analysisflow.connect(inputspec, 'func', plotfmri, 'func')
    analysisflow.connect(inputspec, 'atlas', plotfmri, 'atlaslabels')
    analysisflow.connect(inputspec, 'confounds', plotfmri, 'confounds')

    analysisflow.connect(plotfmri, 'plotfile', ds_qc, qcname)

    return analysisflow
Exemple #4
0
def create_anat_noise_roi_workflow(SinkTag="func_preproc",
                                   wf_name="create_noise_roi"):
    """
    Creates an anatomical noise ROI for use with compcor

    inputs are awaited from the (BBR-based) func2anat registration
    and are already transformed to functional space

    Tamas Spisak
    2018


    """
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import PUMI.utils.globals as globals

    # Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['wm_mask', 'ventricle_mask']),
        name='inputspec')

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=['noise_roi']),
                         name='outputspec')

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)
    wf = nipype.Workflow(wf_name)

    # erode WM mask in functional space
    erode_mask = pe.MapNode(fsl.ErodeImage(),
                            iterfield=['in_file'],
                            name="erode_wm_mask")
    wf.connect(inputspec, 'wm_mask', erode_mask, 'in_file')

    # add ventricle and eroded WM masks
    add_masks = pe.MapNode(fsl.ImageMaths(op_string=' -add'),
                           iterfield=['in_file', 'in_file2'],
                           name="addimgs")

    wf.connect(inputspec, 'ventricle_mask', add_masks, 'in_file')
    wf.connect(erode_mask, 'out_file', add_masks, 'in_file2')

    wf.connect(add_masks, 'out_file', outputspec, 'noise_roi')

    return wf
Exemple #5
0
def regTimeseriesQC(qcname, tag="", SinkDir=".", QCDIR="QC"):
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import PUMI.plot.timeseries as plot

    QCDir = os.path.abspath(globals._SinkDir_ + "/" + globals._QCDir_)
    if not os.path.exists(QCDir):
        os.makedirs(QCDir)

    if tag:
        tag = "_" + tag

    # Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['timeseries', 'modules', 'atlas']),
        name='inputspec')
    inputspec.inputs.atlas = None

    plotregts = pe.MapNode(interface=Function(
        input_names=['timeseries', 'modules', 'output_file', 'atlas'],
        output_names=['plotfile'],
        function=plot.plot_carpet_ts),
                           iterfield=['timeseries'],
                           name="qc_timeseries")
    plotregts.inputs.output_file = "qc_timeseries.png"

    # Save outputs which are important
    ds_qc = pe.Node(interface=io.DataSink(), name='ds_qc')
    ds_qc.inputs.base_directory = QCDir
    ds_qc.inputs.regexp_substitutions = [("(\/)[^\/]*$", tag + ".png")]

    # Create a workflow
    analysisflow = nipype.Workflow(name=qcname + tag + '_qc')

    analysisflow.connect(inputspec, 'timeseries', plotregts, 'timeseries')
    analysisflow.connect(inputspec, 'atlas', plotregts, 'atlas')
    analysisflow.connect(inputspec, 'modules', plotregts, 'modules')
    analysisflow.connect(plotregts, 'plotfile', ds_qc, qcname)

    return analysisflow
Exemple #6
0
def create_melodic_workflow(name='melodic', template=None, varnorm=True):

    input_node = pe.Node(IdentityInterface(fields=['in_file']),
                         name='inputspec')

    output_node = pe.Node(IdentityInterface(fields=['out_dir']),
                          name='outputspec')

    if template is None:
        template = op.join(op.dirname(op.dirname(op.abspath(__file__))),
                           'data', 'fsf_templates', 'melodic_template.fsf')

    melodic4fix_node = pe.MapNode(interface=Melodic4fix,
                                  iterfield=['in_file', 'out_dir'],
                                  name='melodic4fix')

    # Don't know if this works. Could also set these defaults inside the
    # melodic4fix node definition...
    melodic4fix_node.inputs.template = template
    melodic4fix_node.inputs.varnorm = varnorm

    rename_ica = pe.MapNode(Function(input_names=['in_file'],
                                     output_names=['out_file'],
                                     function=extract_task),
                            name='rename_ica',
                            iterfield=['in_file'])

    mel4fix_workflow = pe.Workflow(name=name)

    mel4fix_workflow.connect(input_node, 'in_file', melodic4fix_node,
                             'in_file')

    mel4fix_workflow.connect(input_node, 'in_file', rename_ica, 'in_file')

    mel4fix_workflow.connect(rename_ica, 'out_file', melodic4fix_node,
                             'out_dir')

    mel4fix_workflow.connect(melodic4fix_node, 'out_dir', output_node,
                             'out_dir')

    return mel4fix_workflow
Exemple #7
0
def create_motion_confound_workflow(order=2,
                                    fd_cutoff=.2,
                                    name='motion_confound'):

    input_node = pe.Node(interface=IdentityInterface(
        fields=['par_file', 'output_directory', 'sub_id']),
                         name='inputspec')

    output_node = pe.Node(
        interface=IdentityInterface(fields=['out_fd', 'out_ext_moco']),
        name='outputspec')

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    extend_motion_parameters = pe.MapNode(Extend_motion_parameters,
                                          iterfield=['par_file'],
                                          name='extend_motion_parameters')
    extend_motion_parameters.inputs.order = order

    framewise_disp = pe.MapNode(FramewiseDisplacement(parameter_source='FSL'),
                                iterfield=['in_file'],
                                name='framewise_disp')

    mcf_wf = pe.Workflow(name=name)
    mcf_wf.connect(input_node, 'output_directory', datasink, 'base_directory')
    mcf_wf.connect(input_node, 'sub_id', datasink, 'container')
    mcf_wf.connect(input_node, 'par_file', extend_motion_parameters,
                   'par_file')
    mcf_wf.connect(input_node, 'par_file', framewise_disp, 'in_file')
    mcf_wf.connect(extend_motion_parameters, 'out_ext', output_node,
                   'out_ext_moco')
    mcf_wf.connect(framewise_disp, 'out_file', output_node, 'out_fd')
    mcf_wf.connect(extend_motion_parameters, 'out_ext', datasink, 'confounds')
    mcf_wf.connect(framewise_disp, 'out_file', datasink, 'confounds.@df')

    return mcf_wf
def create_VWM_anti_pp_workflow(analysis_info, name='VWM-anti'):
    """Summary
    
    Parameters
    ----------
    analysis_info : TYPE
        Description
    name : str, optional
        Description
    
    Returns
    -------
    TYPE
        Description
    """
    import os.path as op
    import nipype.pipeline as pe
    import tempfile
    import glob
    from nipype.interfaces import fsl
    from nipype.interfaces.utility import Function, Merge, IdentityInterface
    from nipype.interfaces.io import SelectFiles, DataSink
    from nipype.interfaces.ants import ApplyTransforms
    from bids.grabbids import BIDSLayout

    # Importing of custom nodes from spynoza packages; assumes that spynoza is installed:
    # pip install git+https://github.com/spinoza-centre/spynoza.git@develop
    from spynoza.filtering.nodes import Savgol_filter, Savgol_filter_confounds
    from spynoza.conversion.nodes import psc
    from spynoza.utils import get_scaninfo, pickfirst
    from utils import mask_nii_2_hdf5, nistats_confound_glm, mask_to_tsv

    input_node = pe.Node(IdentityInterface(
        fields=['bids_directory', 'fmriprep_directory', 'output_directory', 'mask_directory', 'sub_id']), name='inputspec')

    BIDSNiiGrabber = pe.Node(Function(function=get_niftis, input_names=["subject_id",
                                                                        "data_dir", "task", "space"],
                                      output_names=["nii_files"]), name="BIDSNiiGrabber")
    BIDSNiiGrabber.inputs.space = 'mni'

    BIDSEventsGrabber = pe.Node(Function(function=get_events, input_names=["subject_id",
                                                                           "data_dir", "task"],
                                         output_names=["event_files"]), name="BIDSEventsGrabber")
    
    BIDSConfoundsGrabber = pe.Node(Function(function=get_confounds, input_names=["subject_id",
                                                                                 "data_dir", "task"],
                                            output_names=["confounds_tsv_files"]), name="BIDSConfoundsGrabber")
    
    MaskGrabber = pe.Node(Function(function=get_masks, input_names=["mask_directory"],
                                   output_names=["mask_files"]), name="MaskGrabber")

    HDF5PSCMasker = pe.Node(Function(input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'], output_names=['hdf5_file'],
                                     function=mask_nii_2_hdf5),
                            name='hdf5_psc_masker')
    HDF5PSCMasker.inputs.folder_alias = 'psc'
    HDF5PSCMasker.inputs.hdf5_file = op.join(tempfile.mkdtemp(), 'roi.h5')

    HDF5PSCNuisMasker = pe.Node(Function(input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'], output_names=['hdf5_file'],
                                         function=mask_nii_2_hdf5),
                                name='hdf5_psc_nuis_masker')
    HDF5PSCNuisMasker.inputs.folder_alias = 'psc_nuis'

    # HDF5StatsMasker = pe.Node(Function(input_names = ['in_files', 'mask_files', 'hdf5_file', 'folder_alias'], output_names = ['hdf5_file'],
    #                             function = mask_nii_2_hdf5),
    #                             name = 'hdf5_stats_masker')
    # HDF5StatsMasker.inputs.folder_alias = 'stats'

    HDF5ROIMasker = pe.Node(Function(input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'], output_names=['hdf5_file'],
                                     function=mask_nii_2_hdf5),
                            name='hdf5_roi_masker')
    HDF5ROIMasker.inputs.folder_alias = 'rois'

    ConfoundGLM = pe.MapNode(Function(input_names=['nifti_file', 'confounds_file', 'which_confounds'], output_names=['output_pdf', 'output_nifti'],
                                      function=nistats_confound_glm),
                             name='nistats_confound_glm', iterfield=["nifti_file", "confounds_file"])
    ConfoundGLM.inputs.which_confounds = analysis_info['nuisance_columns']

    # VolTransNode = pe.MapNode(interface=fsl.preprocess.ApplyXFM(apply_xfm=False, apply_isoxfm=True, interp='sinc'),
    #                                                     name='vol_trans', iterfield = ['in_file'])

    # VolTransNode = pe.MapNode(interface=ApplyTransforms(transforms='identity', interpolation='LanczosWindowedSinc'),
    #                                                     name='vol_trans', iterfield = ['input_image'])

    ThreshNode = pe.MapNode(fsl.Threshold(thresh=analysis_info['MNI_mask_threshold'], args='-bin', output_datatype='int'),
                            name='thresh', iterfield=['in_file'])

    TSVMasker = pe.MapNode(Function(input_names=['in_file', 'mask_files'], output_names=['out_file'],
                                 function=mask_to_tsv), iterfield=['in_file'],
                        name='tsv_masker')

    ROIResampler = pe.Node(Function(input_names=['mni_roi_files', 'mni_epi_space_file'], output_names=['output_roi_files'],
                                    function=resample_rois),
                           name='roi_resampler')

    sgfilter = pe.MapNode(interface=Savgol_filter,
                          name='sgfilter',
                          iterfield=['in_file'])
    sgfilter_confounds = pe.MapNode(interface=Savgol_filter_confounds,
                                    name='sgfilter_confounds',
                                    iterfield=['confounds'])

    # Both fmri data and nuisances are filtered with identical parameters
    sgfilter.inputs.polyorder = sgfilter_confounds.inputs.polyorder = analysis_info[
        'sgfilter_polyorder']
    sgfilter.inputs.deriv = sgfilter_confounds.inputs.deriv = analysis_info['sgfilter_deriv']
    sgfilter.inputs.window_length = sgfilter_confounds.inputs.window_length = analysis_info[
        'sgfilter_window_length']
    sgfilter.inputs.tr = sgfilter_confounds.inputs.tr = analysis_info['RepetitionTime']

    # set the psc function
    psc.inputs.func = analysis_info['psc_function']

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    ########################################################################################
    # workflow
    ########################################################################################

    # the actual top-level workflow
    VWM_anti_pp_workflow = pe.Workflow(name=name)

    # data source
    VWM_anti_pp_workflow.connect(
        input_node, 'bids_directory', BIDSEventsGrabber, 'data_dir')
    VWM_anti_pp_workflow.connect(input_node, 'sub_id',
                              BIDSEventsGrabber, 'subject_id')
    VWM_anti_pp_workflow.connect(
        input_node, 'fmriprep_directory', BIDSNiiGrabber, 'data_dir')
    VWM_anti_pp_workflow.connect(input_node, 'sub_id',
                              BIDSNiiGrabber, 'subject_id')
    VWM_anti_pp_workflow.connect(
        input_node, 'fmriprep_directory', BIDSConfoundsGrabber, 'data_dir')
    VWM_anti_pp_workflow.connect(input_node, 'sub_id',
                              BIDSConfoundsGrabber, 'subject_id')
    VWM_anti_pp_workflow.connect(
        input_node, 'mask_directory', MaskGrabber, 'mask_directory')

    # filter and psc
    VWM_anti_pp_workflow.connect(BIDSNiiGrabber, 'nii_files', sgfilter, 'in_file')
    VWM_anti_pp_workflow.connect(sgfilter, 'out_file', psc, 'in_file')
    # do the same filtering on confounds
    VWM_anti_pp_workflow.connect(BIDSConfoundsGrabber, 'confounds_tsv_files', sgfilter_confounds, 'confounds')

    # cleanup GLM
    VWM_anti_pp_workflow.connect(psc, 'out_file', ConfoundGLM, 'nifti_file')
    VWM_anti_pp_workflow.connect(
        sgfilter_confounds, 'out_file', ConfoundGLM, 'confounds_file')

    # preparing masks, ANTS and fsl not working correctly
    # ANTs
    # pearl_pp_workflow.connect(BIDSNiiGrabber, ('nii_files', pickfirst), VolTransNode, 'reference_image')
    # pearl_pp_workflow.connect(MaskGrabber, 'mask_files', VolTransNode, 'input_image')
    # fsl
    # pearl_pp_workflow.connect(BIDSNiiGrabber, ('nii_files', pickfirst), VolTransNode, 'reference')
    # pearl_pp_workflow.connect(MaskGrabber, 'mask_files', VolTransNode, 'in_file')
    # pearl_pp_workflow.connect(VolTransNode, 'output_image', ThreshNode, 'in_file')

    VWM_anti_pp_workflow.connect(
        BIDSNiiGrabber, ('nii_files', pickfirst), ROIResampler, 'mni_epi_space_file')
    VWM_anti_pp_workflow.connect(
        MaskGrabber, 'mask_files', ROIResampler, 'mni_roi_files')
    VWM_anti_pp_workflow.connect(
        ROIResampler, 'output_roi_files', ThreshNode, 'in_file')

    # masking data
    VWM_anti_pp_workflow.connect(psc, 'out_file', HDF5PSCMasker, 'in_files')
    VWM_anti_pp_workflow.connect(ThreshNode, 'out_file',
                              HDF5PSCMasker, 'mask_files')

    VWM_anti_pp_workflow.connect(
        ConfoundGLM, 'output_nifti', HDF5PSCNuisMasker, 'in_files')
    VWM_anti_pp_workflow.connect(ThreshNode, 'out_file',
                              HDF5PSCNuisMasker, 'mask_files')
    VWM_anti_pp_workflow.connect(
        HDF5PSCMasker, 'hdf5_file', HDF5PSCNuisMasker, 'hdf5_file')

    # needs stats before we do a masker....
    # pearl_pp_workflow.connect(VolTransNode, 'out_file', HDF5StatsMasker, 'in_files')
    # pearl_pp_workflow.connect(ThreshNode, 'out_file', HDF5StatsMasker, 'mask_files')
    # pearl_pp_workflow.connect(HDF5PSCNuisMasker, 'hdf5_file', HDF5StatsMasker, 'hdf5_file')

    VWM_anti_pp_workflow.connect(
        ROIResampler, 'output_roi_files', HDF5ROIMasker, 'in_files')
    VWM_anti_pp_workflow.connect(ThreshNode, 'out_file',
                              HDF5ROIMasker, 'mask_files')
    VWM_anti_pp_workflow.connect(
        HDF5PSCNuisMasker, 'hdf5_file', HDF5ROIMasker, 'hdf5_file')

    # mask to .tsv, for one timecourse per roi
    VWM_anti_pp_workflow.connect(
        ROIResampler, 'output_roi_files', TSVMasker, 'mask_files')
    VWM_anti_pp_workflow.connect(
        ConfoundGLM, 'output_nifti', TSVMasker, 'in_file')

    # set up output folder
    VWM_anti_pp_workflow.connect(
        input_node, 'output_directory', datasink, 'base_directory')

    # connect all outputs to datasink
    VWM_anti_pp_workflow.connect(
        ConfoundGLM, 'output_nifti', datasink, 'confound_glm')
    VWM_anti_pp_workflow.connect(
        BIDSEventsGrabber, 'event_files', datasink, 'events')
    VWM_anti_pp_workflow.connect(sgfilter, 'out_file', datasink, 'sg_filter')
    VWM_anti_pp_workflow.connect(
        sgfilter_confounds, 'out_file', datasink, 'sg_filter_confound')
    VWM_anti_pp_workflow.connect(psc, 'out_file', datasink, 'psc')
    VWM_anti_pp_workflow.connect(
        ROIResampler, 'output_roi_files', datasink, 'masks_f')
    VWM_anti_pp_workflow.connect(ThreshNode, 'out_file', datasink, 'masks_b')
    VWM_anti_pp_workflow.connect(TSVMasker, 'out_file', datasink, 'tsv')
    VWM_anti_pp_workflow.connect(HDF5PSCNuisMasker, 'hdf5_file', datasink, 'h5')
    VWM_anti_pp_workflow.connect(
        ConfoundGLM, 'output_pdf', datasink, 'confound_glm_report')

    return VWM_anti_pp_workflow
Exemple #9
0
#This is a Nipype generator. Warning, here be dragons.
import sys
import nipype
import nipype.pipeline as pe
import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl
import nipype.algorithms.confounds as confounds

WorkingDirectory = "~/Porcupipelines/ThisStudy"

#Generic datagrabber module that wraps around glob in an
NodeHash_30f69e0 = pe.Node(io.S3DataGrabber(outfields=['outfiles']),
                           name='NodeName_30f69e0')
NodeHash_30f69e0.inputs.bucket = 'openneuro'
NodeHash_30f69e0.inputs.sort_filelist = True
NodeHash_30f69e0.inputs.template = 'sub-01/func/sub-01_task-simon_run-1_bold.nii.gz'
NodeHash_30f69e0.inputs.anon = True
NodeHash_30f69e0.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
NodeHash_30f69e0.inputs.local_directory = '/tmp'

#Wraps command **slicetimer**
NodeHash_1d000c0 = pe.Node(interface=fsl.SliceTimer(), name='NodeName_1d000c0')

#Wraps command **mcflirt**
NodeHash_22f2e80 = pe.Node(interface=fsl.MCFLIRT(), name='NodeName_22f2e80')

#Computes the time-course SNR for a time series
NodeHash_50c02c0 = pe.Node(interface=confounds.TSNR(), name='NodeName_50c02c0')
NodeHash_50c02c0.inputs.regress_poly = 3

#Wraps command **fslstats**
Exemple #10
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python
import sys
import nipype
import nipype.pipeline as pe
import nipype.interfaces.utility as utility
import PUMI.func_preproc.info.info_get as info_get
import PUMI.utils.utils_convert as utils_convert
import nipype.interfaces.afni as afni
import nipype.interfaces.io as io

OutJSON = SinkDir + "/outputs.JSON"
WorkingDirectory = "."

#Basic interface class generates identity mappings
NodeHash_6040006ae640 = pe.Node(utility.IdentityInterface(fields=['func','slicetiming_txt']), name = 'NodeName_6040006ae640')
NodeHash_6040006ae640.inputs.func = func
NodeHash_6040006ae640.inputs.slicetiming_txt = slicetiming_txt

#Custom interface wrapping function TR
NodeHash_6000004b9860 = pe.MapNode(interface = info_get.TR, name = 'NodeName_6000004b9860', iterfield = ['in_file'])

#Custom interface wrapping function Str2Float
NodeHash_6040006ae9a0 = pe.MapNode(interface = utils_convert.Str2Float, name = 'NodeName_6040006ae9a0', iterfield = ['str'])

#Custom interface wrapping function Float2Str
NodeHash_6040004aee80 = pe.MapNode(interface = utils_convert.Float2Str, name = 'NodeName_6040004aee80', iterfield = ['float'])

#Wraps command **3dTshift**
NodeHash_6040004ad140 = pe.MapNode(interface = afni.TShift(), name = 'NodeName_6040004ad140', iterfield = ['in_file', 'tr'])
NodeHash_6040004ad140.inputs.rltplus = True
Exemple #11
0
def compcor_workflow(SinkTag="func_preproc", wf_name="compcor"):
    """


               `source: -`


               Component based noise reduction method (Behzadi et al.,2007): Regressing out principal components from noise ROIs.
               Here the aCompCor is used.

               Workflow inputs:
                   :param func_aligned: The reoriented and realigned functional image.
                   :param mask_files: Mask files which determine ROI(s). The default mask is the
                   :param components_file
                   :param num_componenets:
                   :param pre_filter: Detrend time series prior to component extraction.
                   :param TR
                   :param SinkDir:
                   :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow.

               Workflow outputs:




                   :return: slt_workflow - workflow




               Balint Kincses
               [email protected]
               2018


     """

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.algorithms.confounds as cnf
    import PUMI.func_preproc.info.info_get as info_get
    import PUMI.utils.utils_convert as utils_convert
    import nipype.interfaces.io as io
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import PUMI.utils.QC as qc
    import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['func_aligned', 'mask_file']),
        name='inputspec')

    myqc = qc.vol2png("compcor_noiseroi")

    # Save outputs which are important
    ds_nii = pe.Node(interface=io.DataSink(), name='ds_nii')
    ds_nii.inputs.base_directory = SinkDir
    ds_nii.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    # standardize timeseries prior to compcor. added by tspisak
    scale = pe.MapNode(interface=utility.Function(input_names=['in_file'],
                                                  output_names=['scaled_file'],
                                                  function=scale_vol),
                       iterfield=['in_file'],
                       name='scale_func')

    # Calculate compcor files
    compcor = pe.MapNode(
        interface=cnf.ACompCor(pre_filter='polynomial',
                               header_prefix="",
                               num_components=5),
        iterfield=['realigned_file', 'repetition_time', 'mask_files'],
        name='compcor')

    # Custom interface wrapping function Float2Str
    func_str2float = pe.MapNode(interface=utils_convert.Str2Float,
                                iterfield=['str'],
                                name='func_str2float')
    # Drop first line of the Acompcor function output
    drop_firstline = pe.MapNode(interface=utils_convert.DropFirstLine,
                                iterfield=['txt'],
                                name='drop_firstline')
    # Custom interface wrapping function TR
    TRvalue = pe.MapNode(interface=info_get.TR,
                         iterfield=['in_file'],
                         name='TRvalue')

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=['components_file']),
                         name='outputspec')

    # save data out with Datasink
    ds_text = pe.Node(interface=io.DataSink(), name='ds_txt')
    ds_text.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".txt")]
    ds_text.inputs.base_directory = SinkDir

    # Create a workflow to connect all those nodes
    analysisflow = nipype.Workflow(wf_name)
    analysisflow.connect(inputspec, 'func_aligned', scale, 'in_file')
    analysisflow.connect(scale, 'scaled_file', compcor, 'realigned_file')
    analysisflow.connect(inputspec, 'func_aligned', TRvalue, 'in_file')
    analysisflow.connect(TRvalue, 'TR', func_str2float, 'str')
    analysisflow.connect(func_str2float, 'float', compcor, 'repetition_time')
    #analysisflow.connect(TRvalue, 'TR', compcor, 'repetition_time')
    analysisflow.connect(inputspec, 'mask_file', compcor, 'mask_files')
    analysisflow.connect(compcor, 'components_file', drop_firstline, 'txt')
    analysisflow.connect(drop_firstline, 'droppedtxtfloat', outputspec,
                         'components_file')
    analysisflow.connect(compcor, 'components_file', ds_text, 'compcor_noise')

    analysisflow.connect(inputspec, 'func_aligned', myqc, 'inputspec.bg_image')
    analysisflow.connect(inputspec, 'mask_file', myqc,
                         'inputspec.overlay_image')

    analysisflow.connect(inputspec, 'mask_file', ds_nii, 'compcor_noise_mask')

    return analysisflow
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import nipype
import nipype.pipeline as pe

import nipype.interfaces.fsl as fsl
import nipype.interfaces.io as io

#Wraps command **bet**
my_fsl_BET = pe.Node(interface=fsl.BET(), name='my_fsl_BET', iterfield=[''])

#Generic datagrabber module that wraps around glob in an
my_io_S3DataGrabber = pe.Node(io.S3DataGrabber(outfields=["out_file, func"]),
                              name='my_io_S3DataGrabber')

#Generic datasink module to store structured outputs
my_io_DataSink = pe.Node(interface=io.DataSink(),
                         name='my_io_DataSink',
                         iterfield=[''])

#Wraps command **epi_reg**
my_fsl_EpiReg = pe.Node(interface=fsl.EpiReg(),
                        name='my_fsl_EpiReg',
                        iterfield=[''])

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(my_io_S3DataGrabber, "out_file", my_fsl_BET, "in_file")
analysisflow.connect(my_fsl_BET, "out_file", my_fsl_EpiReg, "t1_brain")
Exemple #13
0
def bbr_workflow(SinkTag="func_preproc", wf_name="func2anat"):
    """
        Modified version of CPAC.registration.registration:

        `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/registration/registration.html`


        BBR registration of functional image to anat.

        Workflow inputs:
            :param func: One volume of the 4D fMRI (The one which is the closest to the fieldmap recording in time should be chosen- e.g: if fieldmap was recorded after the fMRI the last volume of it should be chosen).
            :param skull: The oriented high res T1w image.
            :param anat_wm_segmentation: WM probability mask in .
            :param anat_csf_segmentation: CSF probability mask in
            :param bbr_schedule: Parameters which specifies BBR options.
            :param SinkDir:
            :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found.

        Workflow outputs:




            :return: bbreg_workflow - workflow
                func="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/s002/func_data.nii.gz",
                 skull="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/MS001/highres.nii.gz",
                 anat_wm_segmentation="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/anat_preproc/fast/fast__prob_2.nii.gz",



        Balint Kincses
        [email protected]
        2018


        """
    import os
    import nipype.pipeline as pe
    from nipype.interfaces.utility import Function
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.io as io
    import PUMI.func_preproc.Onevol as onevol
    import PUMI.utils.QC as qc
    import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Define inputs of the workflow
    inputspec = pe.Node(utility.IdentityInterface(fields=[
        'func', 'skull', 'anat_wm_segmentation', 'anat_gm_segmentation',
        'anat_csf_segmentation', 'anat_ventricle_segmentation'
    ]),
                        name='inputspec')

    myonevol = onevol.onevol_workflow()

    # trilinear interpolation is used by default in linear registration for func to anat
    linear_reg = pe.MapNode(interface=fsl.FLIRT(),
                            iterfield=['in_file', 'reference'],
                            name='linear_func_to_anat')
    linear_reg.inputs.cost = 'corratio'
    linear_reg.inputs.dof = 6
    linear_reg.inputs.out_matrix_file = "lin_mat"

    # WM probability map is thresholded and masked
    wm_bb_mask = pe.MapNode(interface=fsl.ImageMaths(),
                            iterfield=['in_file'],
                            name='wm_bb_mask')
    wm_bb_mask.inputs.op_string = '-thr 0.5 -bin'
    # CSf probability map is thresholded and masked
    csf_bb_mask = pe.MapNode(interface=fsl.ImageMaths(),
                             iterfield=['in_file'],
                             name='csf_bb_mask')
    csf_bb_mask.inputs.op_string = '-thr 0.5 -bin'

    # GM probability map is thresholded and masked
    gm_bb_mask = pe.MapNode(interface=fsl.ImageMaths(),
                            iterfield=['in_file'],
                            name='gm_bb_mask')
    gm_bb_mask.inputs.op_string = '-thr 0.1 -bin'  # liberal mask to capture all gm signal

    # ventricle probability map is thresholded and masked
    vent_bb_mask = pe.MapNode(interface=fsl.ImageMaths(),
                              iterfield=['in_file'],
                              name='vent_bb_mask')
    vent_bb_mask.inputs.op_string = '-thr 0.8 -bin -ero -dilM'  # stricter threshold and some morphology for compcor

    # add the CSF and WM masks
    #add_masks=pe.MapNode(interface=fsl.ImageMaths(),
    #                     iterfield=['in_file','in_file2'],
    #                     name='add_masks')
    #add_masks.inputs.op_string = ' -add'

    # A function is defined for define bbr argumentum which says flirt to perform bbr registration
    # for each element of the list, due to MapNode
    def bbreg_args(bbreg_target):
        return '-cost bbr -wmseg ' + bbreg_target

    bbreg_arg_convert = pe.MapNode(interface=Function(
        input_names=["bbreg_target"],
        output_names=["arg"],
        function=bbreg_args),
                                   iterfield=['bbreg_target'],
                                   name="bbr_arg_converter")

    # BBR registration within the FLIRT node
    bbreg_func_to_anat = pe.MapNode(
        interface=fsl.FLIRT(),
        iterfield=['in_file', 'reference', 'in_matrix_file', 'args'],
        name='bbreg_func_to_anat')
    bbreg_func_to_anat.inputs.dof = 6

    # calculate the inverse of the transformation matrix (of func to anat)
    convertmatrix = pe.MapNode(interface=fsl.ConvertXFM(),
                               iterfield=['in_file'],
                               name="convertmatrix")
    convertmatrix.inputs.invert_xfm = True

    # use the invers registration (anat to func) to transform anatomical csf mask
    reg_anatmask_to_func1 = pe.MapNode(
        interface=fsl.FLIRT(apply_xfm=True, interp='nearestneighbour'),
        iterfield=['in_file', 'reference', 'in_matrix_file'],
        name='anatmasks_to_func1')
    #reg_anatmask_to_func1.inputs.apply_xfm = True
    # use the invers registration (anat to func) to transform anatomical wm mask
    reg_anatmask_to_func2 = pe.MapNode(
        interface=fsl.FLIRT(apply_xfm=True, interp='nearestneighbour'),
        iterfield=['in_file', 'reference', 'in_matrix_file'],
        name='anatmasks_to_func2')
    #reg_anatmask_to_func2.inputs.apply_xfm = True

    # use the invers registration (anat to func) to transform anatomical gm mask
    reg_anatmask_to_func3 = pe.MapNode(
        interface=fsl.FLIRT(apply_xfm=True, interp='nearestneighbour'),
        iterfield=['in_file', 'reference', 'in_matrix_file'],
        name='anatmasks_to_func3')
    # reg_anatmask_to_func2.inputs.apply_xfm = True

    # use the invers registration (anat to func) to transform anatomical gm mask
    reg_anatmask_to_func4 = pe.MapNode(
        interface=fsl.FLIRT(apply_xfm=True, interp='nearestneighbour'),
        iterfield=['in_file', 'reference', 'in_matrix_file'],
        name='anatmasks_to_func4')
    # reg_anatmask_to_func2.inputs.apply_xfm = True

    # Create png images for quality check
    myqc = qc.vol2png("func2anat")

    # Save outputs which are important
    ds = pe.Node(interface=io.DataSink(), name='ds_nii')
    ds.inputs.base_directory = SinkDir
    ds.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    # Define outputs of the workflow
    outputspec = pe.Node(utility.IdentityInterface(fields=[
        'func_sample2anat', 'example_func', 'func_to_anat_linear_xfm',
        'anat_to_func_linear_xfm', 'csf_mask_in_funcspace',
        'wm_mask_in_funcspace', 'gm_mask_in_funcspace',
        'ventricle_mask_in_funcspace'
    ]),
                         name='outputspec')

    analysisflow = pe.Workflow(name=wf_name)
    analysisflow.base_dir = '.'
    analysisflow.connect(inputspec, 'func', myonevol, 'inputspec.func')
    analysisflow.connect(myonevol, 'outputspec.func1vol', linear_reg,
                         'in_file')
    analysisflow.connect(inputspec, 'skull', linear_reg, 'reference')
    analysisflow.connect(linear_reg, 'out_matrix_file', bbreg_func_to_anat,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol', bbreg_func_to_anat,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_wm_segmentation', bbreg_arg_convert,
                         'bbreg_target')
    analysisflow.connect(bbreg_arg_convert, 'arg', bbreg_func_to_anat, 'args')
    analysisflow.connect(inputspec, 'skull', bbreg_func_to_anat, 'reference')
    analysisflow.connect(bbreg_func_to_anat, 'out_matrix_file', convertmatrix,
                         'in_file')
    analysisflow.connect(convertmatrix, 'out_file', reg_anatmask_to_func1,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol',
                         reg_anatmask_to_func1, 'reference')
    analysisflow.connect(csf_bb_mask, 'out_file', reg_anatmask_to_func1,
                         'in_file')
    analysisflow.connect(convertmatrix, 'out_file', reg_anatmask_to_func2,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol',
                         reg_anatmask_to_func2, 'reference')
    analysisflow.connect(wm_bb_mask, 'out_file', reg_anatmask_to_func2,
                         'in_file')
    analysisflow.connect(convertmatrix, 'out_file', reg_anatmask_to_func3,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol',
                         reg_anatmask_to_func3, 'reference')
    analysisflow.connect(gm_bb_mask, 'out_file', reg_anatmask_to_func3,
                         'in_file')
    analysisflow.connect(convertmatrix, 'out_file', reg_anatmask_to_func4,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol',
                         reg_anatmask_to_func4, 'reference')
    analysisflow.connect(vent_bb_mask, 'out_file', reg_anatmask_to_func4,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_wm_segmentation', wm_bb_mask,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_csf_segmentation', csf_bb_mask,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_gm_segmentation', gm_bb_mask,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_ventricle_segmentation',
                         vent_bb_mask, 'in_file')
    analysisflow.connect(bbreg_func_to_anat, 'out_file', outputspec,
                         'func_sample2anat')
    analysisflow.connect(bbreg_func_to_anat, 'out_matrix_file', outputspec,
                         'func_to_anat_linear_xfm')
    analysisflow.connect(reg_anatmask_to_func1, 'out_file', outputspec,
                         'csf_mask_in_funcspace')
    analysisflow.connect(reg_anatmask_to_func2, 'out_file', outputspec,
                         'wm_mask_in_funcspace')
    analysisflow.connect(reg_anatmask_to_func3, 'out_file', outputspec,
                         'gm_mask_in_funcspace')
    analysisflow.connect(reg_anatmask_to_func4, 'out_file', outputspec,
                         'ventricle_mask_in_funcspace')
    analysisflow.connect(myonevol, 'outputspec.func1vol', outputspec,
                         'example_func')
    analysisflow.connect(convertmatrix, 'out_file', outputspec,
                         'anat_to_func_linear_xfm')
    analysisflow.connect(bbreg_func_to_anat, 'out_file', ds, "func2anat")
    analysisflow.connect(bbreg_func_to_anat, 'out_file', myqc,
                         'inputspec.bg_image')
    analysisflow.connect(wm_bb_mask, 'out_file', myqc,
                         'inputspec.overlay_image')

    return analysisflow
def create_all_calcarine_reward_2_h5_workflow(
        analysis_info, name='all_calcarine_reward_nii_2_h5'):
    import os.path as op
    import tempfile
    import nipype.pipeline as pe
    from nipype.interfaces import fsl
    from nipype.interfaces.utility import Function, Merge, IdentityInterface
    from spynoza.nodes.utils import get_scaninfo, dyns_min_1, topup_scan_params, apply_scan_params
    from nipype.interfaces.io import SelectFiles, DataSink

    # Importing of custom nodes from spynoza packages; assumes that spynoza is installed:
    # pip install git+https://github.com/spinoza-centre/spynoza.git@develop
    from utils.utils import mask_nii_2_hdf5, combine_eye_hdfs_to_nii_hdf

    input_node = pe.Node(
        IdentityInterface(fields=['sub_id', 'preprocessed_data_dir']),
        name='inputspec')

    # i/o node
    datasource_templates = dict(mcf='{sub_id}/mcf/*.nii.gz',
                                psc='{sub_id}/psc/*.nii.gz',
                                tf='{sub_id}/tf/*.nii.gz',
                                GLM='{sub_id}/GLM/*.nii.gz',
                                eye='{sub_id}/eye/h5/*.h5',
                                rois='{sub_id}/roi/*_vol.nii.gz')
    datasource = pe.Node(SelectFiles(datasource_templates,
                                     sort_filelist=True,
                                     raise_on_empty=False),
                         name='datasource')

    hdf5_psc_masker = pe.Node(Function(
        input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'],
        output_names=['hdf5_file'],
        function=mask_nii_2_hdf5),
                              name='hdf5_psc_masker')
    hdf5_psc_masker.inputs.folder_alias = 'psc'
    hdf5_psc_masker.inputs.hdf5_file = op.join(tempfile.mkdtemp(), 'roi.h5')

    hdf5_tf_masker = pe.Node(Function(
        input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'],
        output_names=['hdf5_file'],
        function=mask_nii_2_hdf5),
                             name='hdf5_tf_masker')
    hdf5_tf_masker.inputs.folder_alias = 'tf'
    hdf5_psc_masker.inputs.hdf5_file = op.join(tempfile.mkdtemp(), 'roi.h5')

    hdf5_mcf_masker = pe.Node(Function(
        input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'],
        output_names=['hdf5_file'],
        function=mask_nii_2_hdf5),
                              name='hdf5_mcf_masker')
    hdf5_mcf_masker.inputs.folder_alias = 'mcf'

    hdf5_GLM_masker = pe.Node(Function(
        input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'],
        output_names=['hdf5_file'],
        function=mask_nii_2_hdf5),
                              name='hdf5_GLM_masker')
    hdf5_GLM_masker.inputs.folder_alias = 'GLM'

    eye_hdfs_to_nii_masker = pe.Node(Function(
        input_names=['nii_hdf5_file', 'eye_hdf_filelist', 'new_alias'],
        output_names=['nii_hdf5_file'],
        function=combine_eye_hdfs_to_nii_hdf),
                                     name='eye_hdfs_to_nii_masker')
    eye_hdfs_to_nii_masker.inputs.new_alias = 'eye'

    # node for datasinking
    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    all_calcarine_reward_nii_2_h5_workflow = pe.Workflow(name=name)

    all_calcarine_reward_nii_2_h5_workflow.connect(input_node,
                                                   'preprocessed_data_dir',
                                                   datasink, 'base_directory')
    all_calcarine_reward_nii_2_h5_workflow.connect(input_node, 'sub_id',
                                                   datasink, 'container')

    all_calcarine_reward_nii_2_h5_workflow.connect(input_node,
                                                   'preprocessed_data_dir',
                                                   datasource,
                                                   'base_directory')
    all_calcarine_reward_nii_2_h5_workflow.connect(input_node, 'sub_id',
                                                   datasource, 'sub_id')

    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'psc',
                                                   hdf5_psc_masker, 'in_files')
    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'rois',
                                                   hdf5_psc_masker,
                                                   'mask_files')

    # the hdf5_file is created by the psc node, and then passed from masker to masker on into the datasink.
    all_calcarine_reward_nii_2_h5_workflow.connect(hdf5_psc_masker,
                                                   'hdf5_file', hdf5_tf_masker,
                                                   'hdf5_file')
    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'tf',
                                                   hdf5_tf_masker, 'in_files')
    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'rois',
                                                   hdf5_tf_masker,
                                                   'mask_files')

    all_calcarine_reward_nii_2_h5_workflow.connect(hdf5_tf_masker, 'hdf5_file',
                                                   hdf5_mcf_masker,
                                                   'hdf5_file')
    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'mcf',
                                                   hdf5_mcf_masker, 'in_files')
    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'rois',
                                                   hdf5_mcf_masker,
                                                   'mask_files')

    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'GLM',
                                                   hdf5_GLM_masker, 'in_files')
    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'rois',
                                                   hdf5_GLM_masker,
                                                   'mask_files')
    all_calcarine_reward_nii_2_h5_workflow.connect(hdf5_mcf_masker,
                                                   'hdf5_file',
                                                   hdf5_GLM_masker,
                                                   'hdf5_file')

    all_calcarine_reward_nii_2_h5_workflow.connect(hdf5_GLM_masker,
                                                   'hdf5_file',
                                                   eye_hdfs_to_nii_masker,
                                                   'nii_hdf5_file')
    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'eye',
                                                   eye_hdfs_to_nii_masker,
                                                   'eye_hdf_filelist')

    all_calcarine_reward_nii_2_h5_workflow.connect(eye_hdfs_to_nii_masker,
                                                   'nii_hdf5_file', datasink,
                                                   'h5')

    return all_calcarine_reward_nii_2_h5_workflow
Exemple #15
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import nipype
import nipype.pipeline as pe

import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl
import nipype.algorithms.confounds as confounds
import nipype.interfaces.utility as utility

#Generic datagrabber module that wraps around glob in an
my_io_S3DataGrabber = pe.Node(io.S3DataGrabber(outfields=["outfiles"]),
                              name='my_io_S3DataGrabber')
my_io_S3DataGrabber.inputs.bucket = 'openneuro'
my_io_S3DataGrabber.inputs.sort_filelist = True
my_io_S3DataGrabber.inputs.template = 'sub-01/func/sub-01_task-simon_run-1_bold.nii.gz'
my_io_S3DataGrabber.inputs.anon = True
my_io_S3DataGrabber.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
my_io_S3DataGrabber.inputs.local_directory = '/tmp'

#Wraps command **slicetimer**
my_fsl_SliceTimer = pe.Node(interface=fsl.SliceTimer(),
                            name='my_fsl_SliceTimer',
                            iterfield=[''])

#Wraps command **mcflirt**
my_fsl_MCFLIRT = pe.Node(interface=fsl.MCFLIRT(),
                         name='my_fsl_MCFLIRT',
                         iterfield=[''])
Exemple #16
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import nipype
import nipype.pipeline as pe

import nipype.interfaces.io as io
import nipype.interfaces.utility as utility
import nipype.interfaces.fsl as fsl

#Generic datagrabber module that wraps around glob in an
io_DataGrabber = pe.Node(io.DataGrabber(
    infields=["subj_id", "field_template", "session", "run_no"],
    outfields=["func", "struct"]),
                         name='io_DataGrabber')
io_DataGrabber.inputs.sort_filelist = True
io_DataGrabber.inputs.template = '*'
io_DataGrabber.inputs.base_directory = '/project/3018028.06/LEX_ELLEN/data/'
io_DataGrabber.inputs.template_args = dict(
    func=[['subj_id', 'subj_id', 'session', 'run_no']], struct=[['subj_id']])
io_DataGrabber.inputs.field_template = dict(
    func='%02d/func/%02d_s%d_r%d.nii',
    struct='%02d/anat/ses-mri01_t1_mprage_*.nii')

#Basic interface class generates identity mappings
utility_IdentityInterface = pe.Node(
    utility.IdentityInterface(fields=["subject", "session", "run_no"]),
    name='utility_IdentityInterface',
    iterfield=['subject', 'session', 'run_no'])
utility_IdentityInterface.iterables = [('subject', [50, 01]),
Exemple #17
0
def bet_workflow(Robust=True,
                 fmri=False,
                 SinkTag="anat_preproc",
                 wf_name="brain_extraction"):
    """
    Modified version of CPAC.anat_preproc.anat_preproc:

    `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/anat_preproc/anat_preproc.html`


    Creates a brain extracted image and its mask from a T1w anatomical image.

    Workflow inputs:
        :param anat: The reoriented anatomical file.
        :param SinkDir:
        :param SinkTag: The output directiry in which the returned images (see workflow outputs) could be found.

    Workflow outputs:




        :return: bet_workflow - workflow




    Balint Kincses
    [email protected]
    2018


    """

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.io as io
    import PUMI.utils.QC as qc
    import PUMI.utils.globals as globals
    import PUMI.func_preproc.Onevol as onevol

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    #Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=[
            'in_file',
            'opt_R',
            'fract_int_thr',  # optional
            'vertical_gradient'
        ]),  # optional
        name='inputspec')
    inputspec.inputs.opt_R = Robust
    if fmri:
        inputspec.inputs.fract_int_thr = globals._fsl_bet_fract_int_thr_func_
    else:
        inputspec.inputs.fract_int_thr = globals._fsl_bet_fract_int_thr_anat_

    inputspec.inputs.vertical_gradient = globals._fsl_bet_vertical_gradient_

    #Wraps command **bet**
    bet = pe.MapNode(interface=fsl.BET(), iterfield=['in_file'], name='bet')
    bet.inputs.mask = True
    # bet.inputs.robust=Robust
    if fmri:
        bet.inputs.functional = True
        myonevol = onevol.onevol_workflow(wf_name="onevol")
        applymask = pe.MapNode(fsl.ApplyMask(),
                               iterfield=['in_file', 'mask_file'],
                               name="apply_mask")

    myqc = qc.vol2png(wf_name, overlay=True)

    #Basic interface class generates identity mappings
    outputspec = pe.Node(
        utility.IdentityInterface(fields=['brain', 'brain_mask']),
        name='outputspec')

    # Save outputs which are important
    ds = pe.Node(interface=io.DataSink(), name='ds')
    ds.inputs.base_directory = SinkDir
    ds.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    #Create a workflow to connect all those nodes
    analysisflow = nipype.Workflow(
        wf_name)  # The name here determine the folder of the workspace
    analysisflow.base_dir = '.'
    analysisflow.connect(inputspec, 'in_file', bet, 'in_file')
    analysisflow.connect(inputspec, 'opt_R', bet, 'robust')
    analysisflow.connect(inputspec, 'fract_int_thr', bet, 'frac')
    analysisflow.connect(inputspec, 'vertical_gradient', bet,
                         'vertical_gradient')
    analysisflow.connect(bet, 'mask_file', outputspec, 'brain_mask')
    if fmri:

        analysisflow.connect(bet, 'mask_file', myonevol, 'inputspec.func')
        analysisflow.connect(myonevol, 'outputspec.func1vol', applymask,
                             'mask_file')
        analysisflow.connect(inputspec, 'in_file', applymask, 'in_file')
        analysisflow.connect(applymask, 'out_file', outputspec, 'brain')
    else:
        analysisflow.connect(bet, 'out_file', outputspec, 'brain')
    analysisflow.connect(bet, 'out_file', ds, 'bet_brain')
    analysisflow.connect(bet, 'mask_file', ds, 'brain_mask')

    analysisflow.connect(inputspec, 'in_file', myqc, 'inputspec.bg_image')
    analysisflow.connect(bet, 'out_file', myqc, 'inputspec.overlay_image')

    return analysisflow
Exemple #18
0
def onevol_workflow(SinkTag="anat_preproc", wf_name="get_example_vol"):
    '''
    This function receive the raw functional image and return its last volume for registration purposes.
    MORE: It also returns information from the header file.
        Workflow inputs:
            :param func: Functional image.
            :param SinkDir:
            :param SinkTag: The output directiry in which the returned images (see workflow outputs) could be found.

        Workflow outputs:


            :return: onevol_workflow - workflow

        Balint Kincses
        [email protected]
        2018

    '''

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import PUMI.func_preproc.info.info_get as info_get
    import nipype.interfaces.io as io
    import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Basic interface class generates identity mappings
    inputspec = pe.Node(utility.IdentityInterface(fields=['func']),
                        name='inputspec')
    #inputspec.inputs.func = "/home/balint/Dokumentumok/phd/essen/PAINTER/probe/s002/func_data.nii.gz"

    # Get dimension infos
    idx = pe.MapNode(interface=info_get.tMinMax,
                     iterfield=['in_files'],
                     name='idx')

    # Get the last volume of the func image
    fslroi = pe.MapNode(fsl.ExtractROI(),
                        iterfield=['in_file', 't_min'],
                        name='fslroi')
    fslroi.inputs.t_size = 1

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=['func1vol']),
                         name='outputspec')

    # Generic datasink module to store structured outputs
    ds = pe.Node(interface=io.DataSink(), name='ds')
    ds.inputs.base_directory = SinkDir
    ds.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    analysisflow = nipype.Workflow(wf_name)
    analysisflow.connect(inputspec, 'func', idx, 'in_files')
    analysisflow.connect(inputspec, 'func', fslroi, 'in_file')
    analysisflow.connect(idx, 'refvolidx', fslroi, 't_min')
    analysisflow.connect(fslroi, 'roi_file', ds, 'funclastvol')
    analysisflow.connect(fslroi, 'roi_file', outputspec, 'func1vol')

    return analysisflow
Exemple #19
0
#This is a Nipype generator. Warning, here be dragons.
import sys
import nipype
import nipype.pipeline as pe
import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl

WorkingDirectory = "~/Porcupipelines/ThisStudy"

#Generic datagrabber module that wraps around glob in an
NodeHash_17c5c70 = pe.Node(io.S3DataGrabber(outfields=['outfiles']),
                           name='NodeName_17c5c70')
NodeHash_17c5c70.inputs.bucket = 'openneuro'
NodeHash_17c5c70.inputs.sort_filelist = True
NodeHash_17c5c70.inputs.template = 'sub-01/anat/sub-01_T1w.nii.gz'
NodeHash_17c5c70.inputs.anon = True
NodeHash_17c5c70.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
NodeHash_17c5c70.inputs.local_directory = '/tmp'

#Wraps command **bet**
NodeHash_211a5f0 = pe.Node(interface=fsl.BET(), name='NodeName_211a5f0')

#Generic datasink module to store structured outputs
NodeHash_236ab50 = pe.Node(interface=io.DataSink(), name='NodeName_236ab50')
NodeHash_236ab50.inputs.base_directory = '/tmp'

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(NodeHash_211a5f0, 'out_file', NodeHash_236ab50,
                     'BET_results')
analysisflow.connect(NodeHash_17c5c70, 'outfiles', NodeHash_211a5f0, 'in_file')
Exemple #20
0
def create_B0_workflow(name='b0_unwarping', scanner='philips'):
    """ Does B0 field unwarping

    Example
    -------
    >>> nipype_epicorrect = create_unwarping_workflow('unwarp',)
    >>> unwarp.inputs.input_node.in_file = 'subj1_run1_bold.nii.gz'
    >>> unwarp.inputs.input_node.fieldmap_mag = 'subj1_run1_mag.nii.gz'
    >>> unwarp.inputs.input_node.fieldmap_pha = 'subj1_run1_phas.nii.gz'
    >>> unwarp.inputs.input_node.wfs = 12.223
    >>> unwarp.inputs.input_node.epi_factor = 35.0
    >>> unwarp.inputs.input_node.acceleration = 3.0
    >>> unwarp.inputs.input_node.te_diff = 0.005
    >>> unwarp.inputs.input_node.phase_encoding_direction = 'y'
    >>> nipype_epicorrect.run()

    Inputs::
        input_node.in_file - Volume acquired with EPI sequence
        input_node.fieldmap_mag - Magnitude of the fieldmap
        input_node.fieldmap_pha - Phase difference of the fieldmap
        input_node.wfs - Water-fat-shift in pixels
        input_node.epi_factor - EPI factor
        input_node.acceleration - Acceleration factor used for EPI parallel imaging (SENSE)
        input_node.te_diff - Time difference between TE in seconds.
        input_node.phase_encoding_direction - Unwarp direction (default should be "y")
    Outputs::
        outputnode.epi_corrected
    """

    # Nodes:
    # ------

    # Define input and workflow:
    input_node = pe.Node(name='inputspec',
                         interface=IdentityInterface(fields=[
                             'in_files', 'fieldmap_mag', 'fieldmap_pha', 'wfs',
                             'epi_factor', 'acceleration', 'echo_spacing',
                             'te_diff', 'phase_encoding_direction'
                         ]))

    # Normalize phase difference of the fieldmap phase to be [-pi, pi)
    norm_pha = pe.Node(interface=Prepare_phasediff, name='normalize_phasediff')

    # Mask the magnitude of the fieldmap
    mask_mag = pe.Node(fsl.BET(mask=True), name='mask_magnitude')
    mask_mag_dil = pe.Node(interface=Dilate_mask, name='mask_dilate')

    # Unwrap fieldmap phase using FSL PRELUDE
    prelude = pe.Node(fsl.PRELUDE(process3d=True), name='phase_unwrap')

    # Convert unwrapped fieldmap phase to radials per second:
    radials_per_second = pe.Node(interface=Radials_per_second,
                                 name='radials_ps')

    # in case of SIEMENS scanner:
    prepare_fieldmap = pe.Node(PrepareFieldmap(), name='prepare_fieldmap')

    # Register unwrapped fieldmap (rad/s) to epi, using the magnitude of the fieldmap
    registration = pe.MapNode(fsl.FLIRT(bins=256,
                                        cost='corratio',
                                        dof=6,
                                        interp='trilinear',
                                        searchr_x=[-10, 10],
                                        searchr_y=[-10, 10],
                                        searchr_z=[-10, 10]),
                              iterfield=['reference'],
                              name='registration')

    # transform unwrapped fieldmap (rad/s)
    applyxfm = pe.MapNode(fsl.ApplyXFM(interp='trilinear'),
                          iterfield=['reference', 'in_matrix_file'],
                          name='apply_xfm')

    # compute effective echospacing:
    echo_spacing_philips = pe.Node(interface=Compute_echo_spacing_philips,
                                   name='echo_spacing_philips')
    echo_spacing_siemens = pe.Node(interface=Compute_echo_spacing_siemens,
                                   name='echo_spacing_siemens')
    te_diff_in_ms = pe.Node(interface=TE_diff_ms, name='te_diff_in_ms')

    # Unwarp with FSL Fugue
    fugue = pe.MapNode(interface=fsl.FUGUE(median_2dfilter=True),
                       iterfield=['in_file', 'unwarped_file', 'fmap_in_file'],
                       name='fugue')

    # Convert unwrapped fieldmap phase to radials per second:
    out_file = pe.MapNode(interface=Make_output_filename,
                          iterfield=['in_file'],
                          name='out_file')

    # Define output node
    outputnode = pe.Node(
        IdentityInterface(fields=['out_files', 'field_coefs']),
        name='outputspec')

    # Workflow:
    # ---------

    unwarp_workflow = pe.Workflow(name=name)
    unwarp_workflow.connect(input_node, 'in_files', out_file, 'in_file')

    # registration:
    unwarp_workflow.connect(input_node, 'fieldmap_mag', mask_mag, 'in_file')
    unwarp_workflow.connect(mask_mag, 'mask_file', mask_mag_dil, 'in_file')
    unwarp_workflow.connect(mask_mag, 'out_file', registration, 'in_file')
    unwarp_workflow.connect(input_node, 'in_files', registration, 'reference')

    if scanner == 'philips':

        # prepare fieldmap:
        unwarp_workflow.connect(input_node, 'fieldmap_pha', norm_pha,
                                'in_file')
        unwarp_workflow.connect(input_node, 'fieldmap_mag', prelude,
                                'magnitude_file')
        unwarp_workflow.connect(norm_pha, 'out_file', prelude, 'phase_file')
        unwarp_workflow.connect(mask_mag_dil, 'out_file', prelude, 'mask_file')
        unwarp_workflow.connect(prelude, 'unwrapped_phase_file',
                                radials_per_second, 'in_file')
        unwarp_workflow.connect(input_node, 'te_diff', radials_per_second,
                                'asym')

        # transform fieldmap:
        unwarp_workflow.connect(radials_per_second, 'out_file', applyxfm,
                                'in_file')
        unwarp_workflow.connect(registration, 'out_matrix_file', applyxfm,
                                'in_matrix_file')
        unwarp_workflow.connect(input_node, 'in_files', applyxfm, 'reference')

        # compute echo spacing:
        unwarp_workflow.connect(input_node, 'wfs', echo_spacing_philips, 'wfs')
        unwarp_workflow.connect(input_node, 'epi_factor', echo_spacing_philips,
                                'epi_factor')
        unwarp_workflow.connect(input_node, 'acceleration',
                                echo_spacing_philips, 'acceleration')
        unwarp_workflow.connect(echo_spacing_philips, 'echo_spacing', fugue,
                                'dwell_time')

    elif scanner == 'siemens':

        unwarp_workflow.connect(input_node, 'te_diff', te_diff_in_ms,
                                'te_diff')

        # prepare fieldmap:
        unwarp_workflow.connect(mask_mag, 'out_file', prepare_fieldmap,
                                'in_magnitude')
        unwarp_workflow.connect(input_node, 'fieldmap_pha', prepare_fieldmap,
                                'in_phase')
        unwarp_workflow.connect(te_diff_in_ms, 'te_diff', prepare_fieldmap,
                                'delta_TE')

        # transform fieldmap:
        unwarp_workflow.connect(prepare_fieldmap, 'out_fieldmap', applyxfm,
                                'in_file')
        unwarp_workflow.connect(registration, 'out_matrix_file', applyxfm,
                                'in_matrix_file')
        unwarp_workflow.connect(input_node, 'in_files', applyxfm, 'reference')

        # compute echo spacing:
        unwarp_workflow.connect(input_node, 'acceleration',
                                echo_spacing_siemens, 'acceleration')
        unwarp_workflow.connect(input_node, 'echo_spacing',
                                echo_spacing_siemens, 'echo_spacing')
        unwarp_workflow.connect(echo_spacing_siemens, 'echo_spacing', fugue,
                                'dwell_time')

    unwarp_workflow.connect(input_node, 'in_files', fugue, 'in_file')
    unwarp_workflow.connect(out_file, 'out_file', fugue, 'unwarped_file')
    unwarp_workflow.connect(applyxfm, 'out_file', fugue, 'fmap_in_file')
    unwarp_workflow.connect(input_node, 'te_diff', fugue, 'asym_se_time')
    unwarp_workflow.connect(input_node, 'phase_encoding_direction', fugue,
                            'unwarp_direction')
    unwarp_workflow.connect(fugue, 'unwarped_file', outputnode, 'out_files')
    unwarp_workflow.connect(applyxfm, 'out_file', outputnode, 'field_coefs')

    # # Connect
    # unwarp_workflow.connect(input_node, 'in_files', out_file, 'in_file')
    # unwarp_workflow.connect(input_node, 'fieldmap_pha', norm_pha, 'in_file')
    # unwarp_workflow.connect(input_node, 'fieldmap_mag', mask_mag, 'in_file')
    # unwarp_workflow.connect(mask_mag, 'mask_file', mask_mag_dil, 'in_file')
    # unwarp_workflow.connect(input_node, 'fieldmap_mag', prelude, 'magnitude_file')
    # unwarp_workflow.connect(norm_pha, 'out_file', prelude, 'phase_file')
    # unwarp_workflow.connect(mask_mag_dil, 'out_file', prelude, 'mask_file')
    # unwarp_workflow.connect(prelude, 'unwrapped_phase_file', radials_per_second, 'in_file')
    # unwarp_workflow.connect(input_node, 'te_diff', radials_per_second, 'asym')
    # unwarp_workflow.connect(mask_mag, 'out_file', registration, 'in_file')
    # unwarp_workflow.connect(input_node, 'in_files', registration, 'reference')
    # unwarp_workflow.connect(radials_per_second, 'out_file', applyxfm, 'in_file')
    # unwarp_workflow.connect(registration, 'out_matrix_file', applyxfm, 'in_matrix_file')
    # unwarp_workflow.connect(input_node, 'in_files', applyxfm, 'reference')
    # if compute_echo_spacing:
    #     unwarp_workflow.connect(input_node, 'wfs', echo_spacing, 'wfs')
    #     unwarp_workflow.connect(input_node, 'epi_factor', echo_spacing, 'epi_factor')
    #     unwarp_workflow.connect(input_node, 'acceleration', echo_spacing, 'acceleration')
    #     unwarp_workflow.connect(echo_spacing, 'echo_spacing', fugue, 'dwell_time')
    # else:
    #     unwarp_workflow.connect(input_node, 'echo_spacing', fugue, 'dwell_time')
    # unwarp_workflow.connect(input_node, 'in_files', fugue, 'in_file')
    # unwarp_workflow.connect(out_file, 'out_file', fugue, 'unwarped_file')
    # unwarp_workflow.connect(applyxfm, 'out_file', fugue, 'fmap_in_file')
    # unwarp_workflow.connect(input_node, 'te_diff', fugue, 'asym_se_time')
    # unwarp_workflow.connect(input_node, 'phase_encoding_direction', fugue, 'unwarp_direction')
    # unwarp_workflow.connect(fugue, 'unwarped_file', outputnode, 'out_files')
    # unwarp_workflow.connect(applyxfm, 'out_file', outputnode, 'field_coefs')

    return unwarp_workflow
print("Starting RPN-signature...")
print("Memory usage limit: " + str(opts.mem_gb) + "GB")
print("Number of CPUs used: " + str(opts.nthreads))

totalWorkflow = nipype.Workflow('RPN')
if opts.debug:
    totalWorkflow.base_dir = globals._SinkDir_
else:
    totalWorkflow.base_dir = opts.tempdir  # preferably a fast temporary mount (working dir by default)

########################
# parse command line args
bids_dir = opts.bids_dir

# create BIDS data grabber
datagrab = pe.Node(io.BIDSDataGrabber(), name='data_grabber')
datagrab.inputs.base_dir = bids_dir

# BIDS filtering
if opts.task_id and opts.echo_idx:
    datagrab.inputs.output_query['bold'] = dict(datatype='func', task=opts.task_id, echo=opts.echo_idx)
elif opts.task_id:
    datagrab.inputs.output_query['bold'] = dict(datatype='func', task=opts.task_id)
elif opts.echo_idx:
    datagrab.inputs.output_query['bold'] = dict(datatype='func', echo=opts.echo_idx)

print("Participants selected:")
if (opts.participant_label):
    datagrab.inputs.subject = opts.participant_label
    print(opts.participant_label)
else:
def create_preprocessing_workflow(analysis_params, name='yesno_3T'):
    import os.path as op
    import nipype.pipeline as pe
    from nipype.interfaces import fsl
    from nipype.interfaces.utility import Function, Merge, IdentityInterface
    from nipype.interfaces.io import SelectFiles, DataSink
    from IPython import embed as shell

    # Importing of custom nodes from spynoza packages; assumes that spynoza is installed:
    # pip install git+https://github.com/spinoza-centre/spynoza.git@develop
    from spynoza.utils import get_scaninfo, pickfirst, average_over_runs, set_nifti_intercept_slope
    from spynoza.uniformization.workflows import create_non_uniformity_correct_4D_file
    from spynoza.unwarping.b0.workflows import create_B0_workflow
    from spynoza.motion_correction.workflows import create_motion_correction_workflow
    from spynoza.registration.workflows import create_registration_workflow
    from spynoza.filtering.nodes import sgfilter
    from spynoza.conversion.nodes import psc
    from spynoza.denoising.retroicor.workflows import create_retroicor_workflow
    from spynoza.masking.workflows import create_masks_from_surface_workflow
    from spynoza.glm.nodes import fit_nuisances

    ########################################################################################
    # nodes
    ########################################################################################

    input_node = pe.Node(
        IdentityInterface(fields=[
            'task',  # main
            'sub_id',  # main
            'ses_id',  # main
            'raw_data_dir',  # main
            'output_directory',  # main
            'sub_FS_id',  # main
            'FS_subject_dir',  # motion correction
            'RepetitionTime',  # motion correction
            'which_file_is_EPI_space',  # motion correction
            'standard_file',  # registration
            'topup_conf_file',  # unwarping
            'EchoTimeDiff',  # unwarping
            'EpiFactor',  # unwarping
            'SenseFactor',  # unwarping
            'WaterFatShift',  # unwarping
            'PhaseEncodingDirection',  # unwarping
            'EchoSpacing'  # unwarping
            'psc_func',  # percent signal change
            'sg_filter_window_length',  # temporal filtering
            'sg_filter_order',  # temporal filtering
            'SliceEncodingDirection',  # retroicor
            'PhysiologySampleRate',  # retroicor
            'SliceTiming',  # retroicor
            'SliceOrder',  # retroicor
            'NumberDummyScans',  # retroicor
            'MultiBandFactor',  # retroicor
            'hr_rvt',  # retroicor
            'av_func',  # extra
            'EchoTime',  # extra
            'bd_design_matrix_file',  # extra
        ]),
        name='inputspec')

    for param in analysis_params:
        exec('input_node.inputs.{} = analysis_params[param]'.format(param))

    # i/o node
    datasource_templates = dict(
        func=
        '{sub_id}/{ses_id}/func/{sub_id}_{ses_id}_task-{task}*_bold.nii.gz',
        magnitude='{sub_id}/{ses_id}/fmap/{sub_id}_{ses_id}*magnitude.nii.gz',
        phasediff='{sub_id}/{ses_id}/fmap/{sub_id}_{ses_id}*phasediff.nii.gz',
        #physio='{sub_id}/{ses_id}/func/*{task}*physio.*',
        #events='{sub_id}/{ses_id}/func/*{task}*_events.pickle',
        #eye='{sub_id}/{ses_id}/func/*{task}*_eyedata.edf'
    )
    datasource = pe.Node(SelectFiles(datasource_templates,
                                     sort_filelist=True,
                                     raise_on_empty=False),
                         name='datasource')

    output_node = pe.Node(IdentityInterface(
        fields=(['temporal_filtered_files', 'percent_signal_change_files'])),
                          name='outputspec')

    # nodes for setting the slope/intercept of incoming niftis to (1, 0)
    # this is apparently necessary for the B0 map files
    int_slope_B0_magnitude = pe.Node(Function(
        input_names=['in_file'],
        output_names=['out_file'],
        function=set_nifti_intercept_slope),
                                     name='int_slope_B0_magnitude')
    int_slope_B0_phasediff = pe.Node(Function(
        input_names=['in_file'],
        output_names=['out_file'],
        function=set_nifti_intercept_slope),
                                     name='int_slope_B0_phasediff')

    # reorient nodes
    reorient_epi = pe.MapNode(interface=fsl.Reorient2Std(),
                              name='reorient_epi',
                              iterfield=['in_file'])
    reorient_B0_magnitude = pe.Node(interface=fsl.Reorient2Std(),
                                    name='reorient_B0_magnitude')
    reorient_B0_phasediff = pe.Node(interface=fsl.Reorient2Std(),
                                    name='reorient_B0_phasediff')

    # bet_epi = pe.MapNode(interface=
    #     fsl.BET(frac=analysis_parameters['bet_f_value'], vertical_gradient = analysis_parameters['bet_g_value'],
    #             functional=True, mask = True), name='bet_epi', iterfield=['in_file'])

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    ########################################################################################
    # workflow
    ########################################################################################

    # the actual top-level workflow
    preprocessing_workflow = pe.Workflow(name=name)
    preprocessing_workflow.base_dir = op.join(analysis_params['base_dir'],
                                              'temp/')

    # data source
    preprocessing_workflow.connect(input_node, 'raw_data_dir', datasource,
                                   'base_directory')
    preprocessing_workflow.connect(input_node, 'sub_id', datasource, 'sub_id')
    preprocessing_workflow.connect(input_node, 'ses_id', datasource, 'ses_id')
    preprocessing_workflow.connect(input_node, 'task', datasource, 'task')

    # and data sink
    preprocessing_workflow.connect(input_node, 'output_directory', datasink,
                                   'base_directory')

    # BET (we don't do this, because we expect the raw data in the bids folder to be betted
    # already for anonymization purposes)
    # preprocessing_workflow.connect(datasource, 'func', bet_epi, 'in_file')

    # non-uniformity correction
    # preprocessing_workflow.connect(bet_epi, 'out_file', nuc, 'in_file')
    # preprocessing_workflow.connect(datasource, 'func', nuc, 'in_file')

    # reorient images
    preprocessing_workflow.connect(datasource, 'func', reorient_epi, 'in_file')
    preprocessing_workflow.connect(datasource, 'magnitude',
                                   reorient_B0_magnitude, 'in_file')
    preprocessing_workflow.connect(datasource, 'phasediff',
                                   reorient_B0_phasediff, 'in_file')
    preprocessing_workflow.connect(reorient_epi, 'out_file', datasink,
                                   'reorient')

    #B0 field correction:
    if analysis_params['B0_or_topup'] == 'B0':
        # set slope/intercept to unity for B0 map
        preprocessing_workflow.connect(reorient_B0_magnitude, 'out_file',
                                       int_slope_B0_magnitude, 'in_file')
        preprocessing_workflow.connect(reorient_B0_phasediff, 'out_file',
                                       int_slope_B0_phasediff, 'in_file')
        #B0 field correction:
        if 'EchoSpacing' in analysis_params:
            B0_wf = create_B0_workflow(name='B0', scanner='siemens')
            preprocessing_workflow.connect(input_node, 'EchoSpacing', B0_wf,
                                           'inputspec.echo_spacing')
        else:
            B0_wf = create_B0_workflow(name='B0', scanner='philips')
            preprocessing_workflow.connect(input_node, 'WaterFatShift', B0_wf,
                                           'inputspec.wfs')
            preprocessing_workflow.connect(input_node, 'EpiFactor', B0_wf,
                                           'inputspec.epi_factor')
        preprocessing_workflow.connect(input_node, 'SenseFactor', B0_wf,
                                       'inputspec.acceleration')
        preprocessing_workflow.connect(reorient_epi, 'out_file', B0_wf,
                                       'inputspec.in_files')
        preprocessing_workflow.connect(int_slope_B0_magnitude, 'out_file',
                                       B0_wf, 'inputspec.fieldmap_mag')
        preprocessing_workflow.connect(int_slope_B0_phasediff, 'out_file',
                                       B0_wf, 'inputspec.fieldmap_pha')
        preprocessing_workflow.connect(input_node, 'EchoTimeDiff', B0_wf,
                                       'inputspec.te_diff')
        preprocessing_workflow.connect(input_node, 'PhaseEncodingDirection',
                                       B0_wf,
                                       'inputspec.phase_encoding_direction')
        preprocessing_workflow.connect(B0_wf, 'outputspec.field_coefs',
                                       datasink, 'B0.fieldcoef')
        preprocessing_workflow.connect(B0_wf, 'outputspec.out_files', datasink,
                                       'B0')

    # motion correction
    motion_proc = create_motion_correction_workflow(
        'moco', method=analysis_params['moco_method'])
    if analysis_params['B0_or_topup'] == 'B0':
        preprocessing_workflow.connect(B0_wf, 'outputspec.out_files',
                                       motion_proc, 'inputspec.in_files')
    elif analysis_params['B0_or_topup'] == 'neither':
        preprocessing_workflow.connect(bet_epi, 'out_file', motion_proc,
                                       'inputspec.in_files')
    preprocessing_workflow.connect(input_node, 'RepetitionTime', motion_proc,
                                   'inputspec.tr')
    preprocessing_workflow.connect(input_node, 'output_directory', motion_proc,
                                   'inputspec.output_directory')
    preprocessing_workflow.connect(input_node, 'which_file_is_EPI_space',
                                   motion_proc,
                                   'inputspec.which_file_is_EPI_space')

    # registration
    reg = create_registration_workflow(analysis_params, name='reg')
    preprocessing_workflow.connect(input_node, 'output_directory', reg,
                                   'inputspec.output_directory')
    preprocessing_workflow.connect(motion_proc, 'outputspec.EPI_space_file',
                                   reg, 'inputspec.EPI_space_file')
    preprocessing_workflow.connect(input_node, 'sub_FS_id', reg,
                                   'inputspec.freesurfer_subject_ID')
    preprocessing_workflow.connect(input_node, 'FS_subject_dir', reg,
                                   'inputspec.freesurfer_subject_dir')
    preprocessing_workflow.connect(input_node, 'standard_file', reg,
                                   'inputspec.standard_file')

    # temporal filtering
    preprocessing_workflow.connect(input_node, 'sg_filter_window_length',
                                   sgfilter, 'window_length')
    preprocessing_workflow.connect(input_node, 'sg_filter_order', sgfilter,
                                   'polyorder')
    preprocessing_workflow.connect(motion_proc,
                                   'outputspec.motion_corrected_files',
                                   sgfilter, 'in_file')
    preprocessing_workflow.connect(sgfilter, 'out_file', datasink, 'tf')

    # node for percent signal change
    preprocessing_workflow.connect(input_node, 'psc_func', psc, 'func')
    preprocessing_workflow.connect(sgfilter, 'out_file', psc, 'in_file')
    preprocessing_workflow.connect(psc, 'out_file', datasink, 'psc')

    # # retroicor functionality
    # if analysis_params['perform_physio'] == 1:
    #     retr = create_retroicor_workflow(name = 'retroicor', order_or_timing = analysis_params['retroicor_order_or_timing'])
    #
    #     # # retroicor can take the crudest form of epi file, so that it proceeds quickly
    #     preprocessing_workflow.connect(datasource, 'func', retr, 'inputspec.in_files')
    #     preprocessing_workflow.connect(datasource, 'physio', retr, 'inputspec.phys_files')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.nr_dummies', retr, 'inputspec.nr_dummies')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.MultiBandFactor', retr, 'inputspec.MB_factor')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.tr', retr, 'inputspec.tr')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.SliceEncodingDirection', retr, 'inputspec.slice_direction')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.SliceTiming', retr, 'inputspec.slice_timing')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.SliceOrder', retr, 'inputspec.slice_order')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.PhysiologySampleRate', retr, 'inputspec.phys_sample_rate')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.hr_rvt', retr, 'inputspec.hr_rvt')
    #
    #     # fit nuisances from retroicor
    #     # preprocessing_workflow.connect(retr, 'outputspec.evs', fit_nuis, 'slice_regressor_list')
    #     # preprocessing_workflow.connect(motion_proc, 'outputspec.extended_motion_correction_parameters', fit_nuis, 'vol_regressors')
    #     # preprocessing_workflow.connect(psc, 'out_file', fit_nuis, 'in_file')
    #
    #     # preprocessing_workflow.connect(fit_nuis, 'res_file', av_r, 'in_files')
    #
    #     preprocessing_workflow.connect(retr, 'outputspec.new_phys', datasink, 'phys.log')
    #     preprocessing_workflow.connect(retr, 'outputspec.fig_file', datasink, 'phys.figs')
    #     preprocessing_workflow.connect(retr, 'outputspec.evs', datasink, 'phys.evs')
    #     # preprocessing_workflow.connect(fit_nuis, 'res_file', datasink, 'phys.res')
    #     # preprocessing_workflow.connect(fit_nuis, 'rsq_file', datasink, 'phys.rsq')
    #     # preprocessing_workflow.connect(fit_nuis, 'beta_file', datasink, 'phys.betas')
    #
    #     # preprocessing_workflow.connect(av_r, 'out_file', datasink, 'av_r')

    #
    # ########################################################################################
    # # masking stuff if doing mri analysis
    # ########################################################################################
    #
    #     all_mask_opds = ['dc'] + analysis_parameters[u'avg_subject_RS_label_folders']
    #     all_mask_lds = [''] + analysis_parameters[u'avg_subject_RS_label_folders']
    #
    #     # loop across different folders to mask
    #     # untested as yet.
    #     masking_list = []
    #     dilate_list = []
    #     for opd, label_directory in zip(all_mask_opds,all_mask_lds):
    #         dilate_list.append(
    #             pe.MapNode(interface=fsl.maths.DilateImage(
    #                 operation = 'mean', kernel_shape = 'sphere', kernel_size = analysis_parameters['dilate_kernel_size']),
    #                 name='dilate_'+label_directory, iterfield=['in_file']))
    #
    #         masking_list.append(create_masks_from_surface_workflow(name = 'masks_from_surface_'+label_directory))
    #
    #         masking_list[-1].inputs.inputspec.label_directory = label_directory
    #         masking_list[-1].inputs.inputspec.fill_thresh = 0.005
    #         masking_list[-1].inputs.inputspec.re = '*.label'
    #
    #         preprocessing_workflow.connect(motion_proc, 'outputspec.EPI_space_file', masking_list[-1], 'inputspec.EPI_space_file')
    #         preprocessing_workflow.connect(input_node, 'output_directory', masking_list[-1], 'inputspec.output_directory')
    #         preprocessing_workflow.connect(input_node, 'FS_subject_dir', masking_list[-1], 'inputspec.freesurfer_subject_dir')
    #         preprocessing_workflow.connect(input_node, 'FS_ID', masking_list[-1], 'inputspec.freesurfer_subject_ID')
    #         preprocessing_workflow.connect(reg, 'rename_register.out_file', masking_list[-1], 'inputspec.reg_file')
    #
    #         preprocessing_workflow.connect(masking_list[-1], 'outputspec.masks', dilate_list[-1], 'in_file')
    #         preprocessing_workflow.connect(dilate_list[-1], 'out_file', datasink, 'masks.'+opd)
    #
    #     # # surface-based label import in to EPI space, but now for RS labels
    #     # these should have been imported to the subject's FS folder,
    #     # see scripts/annot_conversion.sh
    #     RS_masks_from_surface = create_masks_from_surface_workflow(name = 'RS_masks_from_surface')
    #     RS_masks_from_surface.inputs.inputspec.label_directory = analysis_parameters['avg_subject_label_folder']
    #     RS_masks_from_surface.inputs.inputspec.fill_thresh = 0.005
    #     RS_masks_from_surface.inputs.inputspec.re = '*.label'
    #
    #     preprocessing_workflow.connect(motion_proc, 'outputspec.EPI_space_file', RS_masks_from_surface, 'inputspec.EPI_space_file')
    #     preprocessing_workflow.connect(input_node, 'output_directory', RS_masks_from_surface, 'inputspec.output_directory')
    #     preprocessing_workflow.connect(input_node, 'FS_subject_dir', RS_masks_from_surface, 'inputspec.freesurfer_subject_dir')
    #     preprocessing_workflow.connect(input_node, 'FS_ID', RS_masks_from_surface, 'inputspec.freesurfer_subject_ID')
    #     preprocessing_workflow.connect(reg, 'rename_register.out_file', RS_masks_from_surface, 'inputspec.reg_file')
    #
    #     preprocessing_workflow.connect(RS_masks_from_surface, 'outputspec.masks', RS_dilate_cortex, 'in_file')
    #     preprocessing_workflow.connect(RS_dilate_cortex, 'out_file', datasink, 'masks.'+analysis_parameters['avg_subject_label_folder'])

    ########################################################################################
    # wrapping up, sending data to datasink
    ########################################################################################

    # preprocessing_workflow.connect(bet_epi, 'out_file', datasink, 'bet.epi')
    # preprocessing_workflow.connect(bet_epi, 'mask_file', datasink, 'bet.epimask')
    # preprocessing_workflow.connect(bet_topup, 'out_file', datasink, 'bet.topup')
    # preprocessing_workflow.connect(bet_topup, 'mask_file', datasink, 'bet.topupmask')

    # preprocessing_workflow.connect(nuc, 'out_file', datasink, 'nuc')
    # preprocessing_workflow.connect(sgfilter, 'out_file', datasink, 'tf')
    # preprocessing_workflow.connect(psc, 'out_file', datasink, 'psc')
    # preprocessing_workflow.connect(datasource, 'physio', datasink, 'phys')

    return preprocessing_workflow
Exemple #23
0
def create_retroicor_workflow(name = 'retroicor', order_or_timing = 'order'):
    
    """
    
    Creates RETROICOR regressors
    
    Example
    -------
    
    Inputs::
        inputnode.in_file - The .log file acquired together with EPI sequence
    Outputs::
        outputnode.regressor_files
    """
    
    # Define nodes:
    input_node = pe.Node(niu.IdentityInterface(fields=['in_files',
                                                    'phys_files',
                                                    'nr_dummies',
                                                    'MB_factor', 
                                                    'tr',
                                                    'slice_direction',
                                                    'phys_sample_rate',
                                                    'slice_timing',
                                                    'slice_order',
                                                    'hr_rvt',
                                                    ]), name='inputspec')

    # the slice time preprocessing node before we go into popp (PreparePNM)
    slice_times_from_gradients = pe.MapNode(niu.Function(input_names=['in_file', 'phys_file', 'nr_dummies', 'MB_factor', 'sample_rate'], 
                        output_names=['out_file', 'fig_file'], 
                        function=_distill_slice_times_from_gradients), name='slice_times_from_gradients', iterfield = ['in_file','phys_file'])
    
    slice_times_to_txt_file = pe.Node(niu.Function(input_names=['slice_times'], 
                        output_names=['out_file'], 
                        function=_slice_times_to_txt_file), name='slice_times_to_txt_file')

    pnm_prefixer = pe.MapNode(niu.Function(input_names=['filename'], 
                        output_names=['out_string'], 
                        function=_preprocess_nii_files_to_pnm_evs_prefix), name='pnm_prefixer', iterfield = ['filename'])

    prepare_pnm = pe.MapNode(PreparePNM(), name='prepare_pnm', iterfield = ['in_file'])

    pnm_evs = pe.MapNode(PNMtoEVs(), name='pnm_evs', iterfield = ['functional_epi', 'cardiac', 'resp', 'hr', 'rvt', 'prefix'])

    # Define output node
    output_node = pe.Node(niu.IdentityInterface(fields=['new_phys', 'fig_file', 'evs']), name='outputspec')

    ########################################################################################
    # workflow
    ########################################################################################

    retroicor_workflow = pe.Workflow(name=name)
    
    # align phys-log data to nifti 
    retroicor_workflow.connect(input_node, 'in_files', slice_times_from_gradients, 'in_file')
    retroicor_workflow.connect(input_node, 'phys_files', slice_times_from_gradients, 'phys_file')
    retroicor_workflow.connect(input_node, 'nr_dummies', slice_times_from_gradients, 'nr_dummies')
    retroicor_workflow.connect(input_node, 'MB_factor', slice_times_from_gradients, 'MB_factor')
    retroicor_workflow.connect(input_node, 'phys_sample_rate', slice_times_from_gradients, 'sample_rate')

    # conditional here, for the creation of a separate slice timing file if order_or_timing is 'timing'
    # order_or_timing can also be 'order'
    if order_or_timing == 'timing':
        retroicor_workflow.connect(input_node, 'slice_timing', slice_times_to_txt_file, 'slice_times')
    
    # prepare pnm:
    retroicor_workflow.connect(input_node, 'phys_sample_rate', prepare_pnm, 'sampling_rate')
    retroicor_workflow.connect(input_node, 'tr', prepare_pnm, 'tr')
    retroicor_workflow.connect(slice_times_from_gradients, 'out_file', prepare_pnm, 'in_file')
    retroicor_workflow.connect(input_node, 'hr_rvt', prepare_pnm, 'hr_rvt')
    
    # pnm evs:
    retroicor_workflow.connect(input_node, 'in_files', pnm_prefixer, 'filename')
    retroicor_workflow.connect(pnm_prefixer, 'out_string', pnm_evs, 'prefix')
    retroicor_workflow.connect(input_node, 'in_files', pnm_evs, 'functional_epi')
    retroicor_workflow.connect(input_node, 'slice_direction', pnm_evs, 'slice_dir')
    retroicor_workflow.connect(input_node, 'tr', pnm_evs, 'tr')
    if order_or_timing ==   'timing':
        retroicor_workflow.connect(slice_times_to_txt_file, 'out_file', pnm_evs, 'slice_timing')
    elif order_or_timing == 'order':
        retroicor_workflow.connect(input_node, 'slice_order', pnm_evs, 'slice_order')
    retroicor_workflow.connect(prepare_pnm, 'card', pnm_evs, 'cardiac')
    retroicor_workflow.connect(prepare_pnm, 'resp', pnm_evs, 'resp')
    retroicor_workflow.connect(prepare_pnm, 'hr', pnm_evs, 'hr')
    retroicor_workflow.connect(prepare_pnm, 'rvt', pnm_evs, 'rvt')

    retroicor_workflow.connect(slice_times_from_gradients, 'out_file', output_node, 'new_phys')
    retroicor_workflow.connect(slice_times_from_gradients, 'fig_file', output_node, 'fig_file')
    retroicor_workflow.connect(pnm_evs, 'evs', output_node, 'evs')

    return retroicor_workflow
Exemple #24
0
import nipype.pipeline as pe
from nipype.interfaces.utility import Function

# ToDo: not use a mutable argument for sg_args


def _check_if_iterable(to_iter, arg):

    if not isinstance(arg, list):
        arg = [arg] * len(to_iter)

    return arg


fix_iterable = pe.Node(Function(input_names=['to_iter', 'arg'],
                                output_names='arg_fixed',
                                function=_check_if_iterable),
                       name='fix_iterable')
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import nipype
import nipype.pipeline as pe

import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl
import nipype.interfaces.afni as afni

#Generic datagrabber module that wraps around glob in an
io_S3DataGrabber = pe.Node(io.S3DataGrabber(outfields=["outfiles"]),
                           name='io_S3DataGrabber')
io_S3DataGrabber.inputs.bucket = 'openneuro'
io_S3DataGrabber.inputs.sort_filelist = True
io_S3DataGrabber.inputs.template = 'sub-01/anat/sub-01_T1w.nii.gz'
io_S3DataGrabber.inputs.anon = True
io_S3DataGrabber.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
io_S3DataGrabber.inputs.local_directory = '/tmp'

#Wraps command **bet**
fsl_BET = pe.Node(interface=fsl.BET(), name='fsl_BET', iterfield=[''])

#Generic datasink module to store structured outputs
io_DataSink = pe.Node(interface=io.DataSink(),
                      name='io_DataSink',
                      iterfield=[''])
io_DataSink.inputs.base_directory = '/tmp'

#Wraps command **3dAllineate**
Exemple #26
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import argpase
import nipype
import nipype.pipeline as pe

import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl

#Flexibly collect data from disk to feed into workflows.
io_select_files = pe.Node(io.SelectFiles(templates={}), name='io_select_files')

#Wraps the executable command ``bet``.
fsl_bet = pe.Node(interface=fsl.BET(), name='fsl_bet')

#Generic datasink module to store structured outputs
io_data_sink = pe.Node(interface=io.DataSink(), name='io_data_sink')

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(io_select_files, "anat", fsl_bet, "in_file")
analysisflow.connect(fsl_bet, "out_file", io_data_sink, "BET_results")

#Run the workflow
plugin = 'MultiProc'  #adjust your desired plugin here
plugin_args = {'n_procs': 1}  #adjust to your number of cores
analysisflow.write_graph(graph2use='flat', format='png', simple_form=False)
analysisflow.run(plugin=plugin, plugin_args=plugin_args)
Exemple #27
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import nipype
import nipype.pipeline as pe

import nipype.interfaces.io as io

#Flexibly collect data from disk to feed into workflows.
io_SelectFiles = pe.Node(io.SelectFiles(templates={}), name='io_SelectFiles')

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')

#Run the workflow
plugin = 'MultiProc'  #adjust your desired plugin here
plugin_args = {'n_procs': 1}  #adjust to your number of cores
analysisflow.write_graph(graph2use='flat', format='png', simple_form=False)
analysisflow.run(plugin=plugin, plugin_args=plugin_args)
Exemple #28
0
def func2mni(stdreg,
             carpet_plot="",
             wf_name='func2mni',
             SinkTag="func_preproc"):
    """
    stdreg: either globals._RegType_.ANTS or globals._RegType_.FSL (do default value to make sure the user has to decide explicitly)

    Transaform 4D functional image to MNI space.

    carpet_plot: string specifying the tag parameter for carpet plot of the standardized MRI measurement
            (default is "": no carpet plot)
            if not "", inputs atlaslabels and confounds should be defined (it might work with defaults, though)

    Workflow inputs:
    :param func
    :param linear_reg_mtrx
    :param nonlinear_reg_mtrx
    :param reference_brain
    :param atlas (optional)
    :param confounds (optional)
    :param confound_names (optional)


    Workflow outputs:




        :return: anat2mni_workflow - workflow


        anat="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/MS001/highres.nii.gz",
                      brain="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/MS001/highres_brain.nii.gz",


    Balint Kincses
    [email protected]
    2018


    """
    import os
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.ants as ants
    from nipype.interfaces.c3 import C3dAffineTool
    import PUMI.utils.globals as globals
    import PUMI.func_preproc.Onevol as onevol
    import PUMI.utils.QC as qc
    import nipype.interfaces.io as io
    from nipype.interfaces.utility import Function

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    inputspec = pe.Node(
        utility.IdentityInterface(fields=[
            'func',
            'anat',  # only obligatory if stdreg==globals._RegType_.ANTS
            'linear_reg_mtrx',
            'nonlinear_reg_mtrx',
            'reference_brain',
            'atlas',
            'confounds',
            'confound_names'
        ]),
        name='inputspec')

    inputspec.inputs.atlas = globals._FSLDIR_ + '/data/atlases/HarvardOxford/HarvardOxford-cort-maxprob-thr25-2mm.nii.gz'

    inputspec.inputs.reference_brain = globals._FSLDIR_ + "/data/standard/MNI152_T1_3mm_brain.nii.gz"  #3mm by default
    # TODO: this does not work with the iterfiled definition for ref_file below:
    # TODO: it should be sepcified in a function argument, whether it shopuld be iterated
    #TODO_ready: ANTS
    #TODO: make resampling voxel size for func parametrizable

    # apply transformation martices
    if stdreg == globals._RegType_.FSL:
        applywarp = pe.MapNode(interface=fsl.ApplyWarp(interp="spline", ),
                               iterfield=['in_file', 'field_file', 'premat'],
                               name='applywarp')
        myqc = qc.vol2png("func2mni", wf_name + "_FSL", overlayiterated=False)
        myqc.inputs.slicer.image_width = 500  # 500 # for the 2mm template
        myqc.inputs.slicer.threshold_edges = 0.1  # 0.1  # for the 2mm template
    else:  #ANTs
        # source file for C3dAffineToolmust not be 4D, so we extract the one example vol
        myonevol = onevol.onevol_workflow()
        # concat premat and ants transform
        bbr2ants = pe.MapNode(
            interface=C3dAffineTool(fsl2ras=True, itk_transform=True),
            iterfield=['source_file', 'transform_file',
                       'reference_file'],  # output: 'itk_transform'
            name="bbr2ants")
        #concat trfs into a list
        trflist = pe.MapNode(interface=Function(
            input_names=['trf_first', 'trf_second'],
            output_names=['trflist'],
            function=transformlist),
                             iterfield=['trf_first', 'trf_second'],
                             name="collect_trf")

        applywarp = pe.MapNode(interface=ants.ApplyTransforms(
            interpolation="BSpline", input_image_type=3),
                               iterfield=['input_image', 'transforms'],
                               name='applywarp')
        myqc = qc.vol2png("func2mni",
                          wf_name + "_ANTS3",
                          overlayiterated=False)
        myqc.inputs.slicer.image_width = 500  # 500 # for the 2mm template
        myqc.inputs.slicer.threshold_edges = 0.1  # 0.1  # for the 2mm template

    if carpet_plot:
        fmri_qc = qc.fMRI2QC("carpet_plots", tag=carpet_plot)

    outputspec = pe.Node(utility.IdentityInterface(fields=['func_std']),
                         name='outputspec')

    # Save outputs which are important
    ds_nii = pe.Node(interface=io.DataSink(), name='ds_nii')
    ds_nii.inputs.base_directory = SinkDir
    ds_nii.inputs.regexp_substitutions = [("(\/)[^\/]*$", wf_name + ".nii.gz")]

    analysisflow = pe.Workflow(wf_name)
    analysisflow.base_dir = '.'
    if stdreg == globals._RegType_.FSL:
        analysisflow.connect(inputspec, 'func', applywarp, 'in_file')
        analysisflow.connect(inputspec, 'linear_reg_mtrx', applywarp, 'premat')
        analysisflow.connect(inputspec, 'nonlinear_reg_mtrx', applywarp,
                             'field_file')
        analysisflow.connect(inputspec, 'reference_brain', applywarp,
                             'ref_file')
        analysisflow.connect(applywarp, 'out_file', outputspec, 'func_std')
        analysisflow.connect(applywarp, 'out_file', myqc, 'inputspec.bg_image')
        analysisflow.connect(inputspec, 'reference_brain', myqc,
                             'inputspec.overlay_image')
        analysisflow.connect(applywarp, 'out_file', ds_nii, 'func2mni')
    else:  # ANTs
        analysisflow.connect(inputspec, 'func', myonevol, 'inputspec.func')
        analysisflow.connect(myonevol, 'outputspec.func1vol', bbr2ants,
                             'source_file')
        analysisflow.connect(inputspec, 'linear_reg_mtrx', bbr2ants,
                             'transform_file')
        analysisflow.connect(inputspec, 'anat', bbr2ants, 'reference_file')
        analysisflow.connect(bbr2ants, 'itk_transform', trflist, 'trf_first')
        analysisflow.connect(inputspec, 'nonlinear_reg_mtrx', trflist,
                             'trf_second')
        analysisflow.connect(trflist, 'trflist', applywarp, 'transforms')
        analysisflow.connect(inputspec, 'func', applywarp, 'input_image')
        analysisflow.connect(inputspec, 'reference_brain', applywarp,
                             'reference_image')

        analysisflow.connect(applywarp, 'output_image', outputspec, 'func_std')
        analysisflow.connect(applywarp, 'output_image', myqc,
                             'inputspec.bg_image')
        analysisflow.connect(inputspec, 'reference_brain', myqc,
                             'inputspec.overlay_image')
        analysisflow.connect(applywarp, 'output_image', ds_nii, 'func2mni')

    if carpet_plot:
        if stdreg == globals._RegType_.FSL:
            analysisflow.connect(applywarp, 'out_file', fmri_qc,
                                 'inputspec.func')
        else:  # ANTs
            analysisflow.connect(applywarp, 'output_image', fmri_qc,
                                 'inputspec.func')

        analysisflow.connect(inputspec, 'atlas', fmri_qc, 'inputspec.atlas')
        analysisflow.connect(inputspec, 'confounds', fmri_qc,
                             'inputspec.confounds')

    return analysisflow
Exemple #29
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python
import sys
import nipype
import nipype.pipeline as pe
import nipype.interfaces.utility as utility
import nipype.interfaces.fsl as fsl
import PUMI.utils.utils_math as utils_math
import nipype.interfaces.io as io

OutJSON = SinkDir + "/outputs.JSON"

#Basic interface class generates identity mappings
NodeHash_604000eb5d20 = pe.Node(utility.IdentityInterface(fields=['func','magnitude','phase','TE1','TE2','dwell_time','unwarp_direction']), name = 'NodeName_604000eb5d20')
NodeHash_604000eb5d20.inputs.func = func
NodeHash_604000eb5d20.inputs.magnitude = magnitude
NodeHash_604000eb5d20.inputs.phase = phase
NodeHash_604000eb5d20.inputs.TE1 = TE1
NodeHash_604000eb5d20.inputs.TE2 = TE2
NodeHash_604000eb5d20.inputs.dwell_time = dwell_time
NodeHash_604000eb5d20.inputs.unwarp_direction = unwarp_direction

#Wraps command **bet**
NodeHash_604000cba700 = pe.MapNode(interface = fsl.BET(), name = 'NodeName_604000cba700', iterfield = ['in_file'])
NodeHash_604000cba700.inputs.mask = True

#Wraps command **fslmaths**
NodeHash_600001ab26c0 = pe.MapNode(interface = fsl.ErodeImage(), name = 'NodeName_600001ab26c0', iterfield = ['in_file'])

#Wraps command **fslmaths**
NodeHash_60c0018a6e40 = pe.MapNode(interface = fsl.ErodeImage(), name = 'NodeName_60c0018a6e40', iterfield = ['in_file'])
Exemple #30
0
    )
    print("Example:")
    print(sys.argv[0] +
          " \"highres_data/subject_*.nii.gz\" \"func_data/subject_*.nii.gz\"")
    quit()

if (len(sys.argv) > 3):
    globals._SinkDir_ = sys.argv[3]

##############################
_regtype_ = globals._RegType_.FSL
#_regtype_ = globals._RegType_.ANTS
##############################

# create data grabber
datagrab = pe.Node(nio.DataGrabber(outfields=['func', 'struct']),
                   name='data_grabber')

datagrab.inputs.base_directory = os.getcwd()  # do we need this?
datagrab.inputs.template = "*"  # do we need this?
datagrab.inputs.field_template = dict(
    func=sys.argv[2],
    struct=sys.argv[1])  # specified by command line arguments
datagrab.inputs.sort_filelist = True

# sink: file - idx relationship!!
pop_id = pe.Node(interface=utils_convert.List2TxtFile, name='pop_id')
pop_id.inputs.rownum = 0
pop_id.inputs.out_file = "subject_IDs.txt"
ds_id = pe.Node(interface=nio.DataSink(), name='ds_pop_id')
ds_id.inputs.regexp_substitutions = [("(\/)[^\/]*$", "IDs.txt")]
ds_id.inputs.base_directory = globals._SinkDir_