Beispiel #1
0
def mac_workflow(target_angle=90,
                 SinkTag="func_preproc",
                 wf_name="median_angle_correction"):
    """

     Modified version of CPAC.median_angle.median_angle:

    `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/median_angle/median_angle.html`


    Do the data censoring on the 4D functional data.

    Workflow inputs:
        :param func: The reoriented functional file.
        :param target angle: the default is 90.
        :param SinkDir:
        :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow..

    Workflow outputs:




        :return: datacens_workflow - workflow




    Balint Kincses
    [email protected]
    2018
    Median Angle Correction

    Parameters
    ----------
    name : string, optional
        Name of the workflow.

    Returns
    -------
    median_angle_correction : nipype.pipeline.engine.Workflow
        Median Angle Correction workflow.

    Notes
    -----

    Workflow Inputs::

        inputspec.subject : string (nifti file)
            Realigned nifti file of a subject
        inputspec.target_angle : integer
            Target angle in degrees to correct the median angle to

    Workflow Outputs::

        outputspec.subject : string (nifti file)
            Median angle corrected nifti file of the given subject
        outputspec.pc_angles : string (.npy file)
            Numpy file (.npy file) containing the angles (in radians) of all voxels with
            the 5 largest principal components.

    Median Angle Correction Procedure:

    1. Compute the median angle with respect to the first principal component of the subject
    2. Shift the angle of every voxel so that the new median angle equals the target angle

    Workflow Graph:

    .. image:: ../images/median_angle_correction.dot.png
        :width: 500

    Detailed Workflow Graph:

    .. image:: ../images/median_angle_correction_detailed.dot.png
        :width: 500

    """
    import os
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as utility
    import PUMI.utils.utils_convert as utils_convert
    import nipype.interfaces.io as io
    import PUMI.utils.globals as globals
    import PUMI.utils.QC as qc

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)
    #TODO set target angle...
    inputspec = pe.Node(utility.IdentityInterface(
        fields=['realigned_file', 'target_angle', 'mask']),
                        name='inputspec')
    inputspec.inputs.target_angle = target_angle
    outputspec = pe.Node(
        utility.IdentityInterface(fields=['final_func', 'pc_angles']),
        name='outputspec')

    # Caution: inpout fmri must be masked (background=0)
    mac = pe.MapNode(utility.Function(
        input_names=['target_angle_deg', 'realigned_file', 'mask'],
        output_names=['corrected_file', 'angles_file'],
        function=median_angle_correct),
                     iterfield=['realigned_file', 'mask'],
                     name='median_angle_correct')

    myqc = qc.timecourse2png("timeseries", tag="050_medang")

    # collect and save median angle values
    pop_medang = pe.Node(
        interface=utils_convert.
        List2TxtFile,  #TODO: save subject level median angle
        name='pop_medang')

    # save mac file
    ds = pe.Node(interface=io.DataSink(), name='ds')
    ds.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    # save data out with Datasink
    ds_medang = pe.Node(interface=io.DataSink(), name='ds_pop_medang')
    ds_medang.inputs.regexp_substitutions = [("(\/)[^\/]*$", "medang.txt")]
    ds_medang.inputs.base_directory = SinkDir

    #TODO set which files should be put into the datasink node...
    # Create workflow
    analysisflow = pe.Workflow(wf_name)
    analysisflow.connect(inputspec, 'realigned_file', mac, 'realigned_file')
    analysisflow.connect(inputspec, 'target_angle', mac, 'target_angle_deg')
    analysisflow.connect(inputspec, 'mask', mac, 'mask')
    analysisflow.connect(mac, 'corrected_file', outputspec, 'final_func')
    analysisflow.connect(mac, 'angles_file', outputspec, 'pc_angles')
    analysisflow.connect(mac, 'corrected_file', myqc, 'inputspec.func')
    # pop-level medang values
    analysisflow.connect(mac, 'angles_file', pop_medang, 'in_list')
    analysisflow.connect(pop_medang, 'txt_file', ds_medang, 'pop')
    analysisflow.connect(mac, 'corrected_file', ds, 'med_ang')

    return analysisflow
Beispiel #2
0
def nuissremov_workflow(SinkTag="func_preproc", wf_name="nuisance_correction"):
    """
    The script uses the noise information to regress it out from the data.
   Workflow inputs:
        :param in_file: The reoriented an motion corrected functional data.
        :param desing_file: A matrix which contains all the nuissance regressor(motion+compcor noise+...).
        :param filter_all: To regress out all the columns of the desing matrix (default: True)
        :param SinkDir:
        :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow.

   Workflow outputs:


    :return: nuissremov_workflow


    Balint Kincses
    [email protected]
    2018

    """

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.io as io
    import PUMI.utils.QC as qc
    import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['in_file', 'design_file']),
        name='inputspec')

    # Perform the nuissance regression
    nuisregression = pe.MapNode(interface=fsl.FilterRegressor(filter_all=True),
                                iterfield=['design_file', 'in_file'],
                                name='nuisregression')

    myqc = qc.timecourse2png("timeseries", tag="020_nuiscorr")

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=['out_file']),
                         name='outputspec')

    # save data out with Datasink
    ds = pe.Node(interface=io.DataSink(), name='ds')
    ds.inputs.base_directory = SinkDir

    #TODO_ready: qc timeseries before and after

    # Generate workflow
    analysisflow = nipype.Workflow(wf_name)
    analysisflow.connect(inputspec, 'in_file', nuisregression, 'in_file')
    analysisflow.connect(inputspec, 'design_file', nuisregression,
                         'design_file')
    analysisflow.connect(nuisregression, 'out_file', outputspec, 'out_file')
    analysisflow.connect(nuisregression, 'out_file', ds,
                         'func_nuiss_corrected')
    analysisflow.connect(nuisregression, 'out_file', myqc, 'inputspec.func')

    return analysisflow
Beispiel #3
0
def datacens_workflow_threshold(SinkTag="func_preproc",
                                wf_name="data_censoring",
                                ex_before=1,
                                ex_after=2):
    """

        Modified version of CPAC.scrubbing.scrubbing +
                            CPAC.generate_motion_statistics.generate_motion_statistics +
                            CPAC.func_preproc.func_preproc

    `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/scrubbing/scrubbing.html`
    `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/generate_motion_statistics/generate_motion_statistics.html`
    `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/func_preproc/func_preproc.html`

    Description:
        Do the data censoring on the 4D functional data. First, it calculates the framewise displacement according to Power's method. Second, it
        indexes the volumes which FD is in the upper part in percent(determined by the threshold variable which is 5% by default). Thirdly, it excludes those volumes and one volume
        before and 2 volumes after the indexed volume. The workflow returns a 4D scrubbed functional data.

    Workflow inputs:
        :param func: The reoriented,motion occrected, nuissance removed and bandpass filtered functional file.
        :param FD: the frame wise displacement calculated by the MotionCorrecter.py script
        :param threshold: threshold of FD volumes which should be excluded
        :param SinkDir:
        :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow..

    Workflow outputs:

        :return: datacens_workflow - workflow




    Balint Kincses
    [email protected]
    2018


    References
    ----------

    .. [1] Power, J. D., Barnes, K. A., Snyder, A. Z., Schlaggar, B. L., & Petersen, S. E. (2012). Spurious
           but systematic correlations in functional connectivity MRI networks arise from subject motion. NeuroImage, 59(3),
           2142-2154. doi:10.1016/j.neuroimage.2011.10.018

    .. [2] Power, J. D., Barnes, K. A., Snyder, A. Z., Schlaggar, B. L., & Petersen, S. E. (2012). Steps
           toward optimizing motion artifact removal in functional connectivity MRI; a reply to Carp.
           NeuroImage. doi:10.1016/j.neuroimage.2012.03.017

    .. [3] Jenkinson, M., Bannister, P., Brady, M., Smith, S., 2002. Improved optimization for the robust
           and accurate linear registration and motion correction of brain images. Neuroimage 17, 825-841.

    """

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.io as io
    import PUMI.utils.utils_convert as utils_convert
    import PUMI.utils.globals as globals
    import PUMI.utils.QC as qc

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Identitiy mapping for input variables
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['func', 'FD', 'threshold']),
        name='inputspec')
    inputspec.inputs.threshold = 0.2  #mm

    #TODO_ready check CPAC.generate_motion_statistics.generate_motion_statistics script. It may use the FD of Jenkinson to index volumes which violate the upper threhold limit, no matter what we set.
    # - we use the power method to calculate FD
    above_thr = pe.MapNode(utility.Function(
        input_names=['in_file', 'threshold', 'frames_before', 'frames_after'],
        output_names=[
            'frames_in_idx', 'frames_out_idx', 'percentFD',
            'percent_scrubbed_file', 'fd_scrubbed_file', 'nvol'
        ],
        function=above_threshold),
                           iterfield=['in_file'],
                           name='above_threshold')
    above_thr.inputs.frames_before = ex_before
    above_thr.inputs.frames_after = ex_after

    # Save outputs which are important
    ds_fd_scrub = pe.Node(interface=io.DataSink(), name='ds_fd_scrub')
    ds_fd_scrub.inputs.base_directory = SinkDir
    ds_fd_scrub.inputs.regexp_substitutions = [("(\/)[^\/]*$",
                                                "FD_scrubbed.csv")]
    pop_perc_scrub = pe.Node(interface=utils_convert.List2TxtFileOpen,
                             name='pop_perc_scrub')

    # save data out with Datasink
    ds_pop_perc_scrub = pe.Node(interface=io.DataSink(),
                                name='ds_pop_perc_scrub')
    ds_pop_perc_scrub.inputs.regexp_substitutions = [
        ("(\/)[^\/]*$", "pop_percent_scrubbed.txt")
    ]
    ds_pop_perc_scrub.inputs.base_directory = SinkDir

    # Generate the weird input for the scrubbing procedure which is done in afni
    craft_scrub_input = pe.MapNode(
        utility.Function(input_names=['scrub_input', 'frames_in_1D_file'],
                         output_names=['scrub_input_string'],
                         function=get_indx),
        iterfield=['scrub_input', 'frames_in_1D_file'],
        name='scrubbing_craft_input_string')
    # Scrub the image
    scrubbed_preprocessed = pe.MapNode(utility.Function(
        input_names=['scrub_input'],
        output_names=['scrubbed_image'],
        function=scrub_image),
                                       iterfield=['scrub_input'],
                                       name='scrubbed_preprocessed')

    myqc = qc.timecourse2png("timeseries", tag="040_censored")

    outputspec = pe.Node(
        utility.IdentityInterface(fields=['scrubbed_image', 'FD_scrubbed']),
        name='outputspec')

    # save data out with Datasink
    ds = pe.Node(interface=io.DataSink(), name='ds')
    ds.inputs.base_directory = SinkDir

    #TODO_ready: some plot for qualitiy checking

    # Create workflow
    analysisflow = pe.Workflow(wf_name)
    ###Calculating mean Framewise Displacement (FD) as Power et al., 2012
    # Calculating frames to exclude and include after scrubbing
    analysisflow.connect(inputspec, 'FD', above_thr, 'in_file')
    analysisflow.connect(inputspec, 'threshold', above_thr, 'threshold')
    # Create the proper format for the scrubbing procedure
    analysisflow.connect(above_thr, 'frames_in_idx', craft_scrub_input,
                         'frames_in_1D_file')
    analysisflow.connect(
        above_thr, 'percent_scrubbed_file', ds,
        'percentFD')  # TODO save this in separate folder for QC
    analysisflow.connect(inputspec, 'func', craft_scrub_input, 'scrub_input')
    # Do the scubbing
    analysisflow.connect(craft_scrub_input, 'scrub_input_string',
                         scrubbed_preprocessed, 'scrub_input')
    # Output
    analysisflow.connect(scrubbed_preprocessed, 'scrubbed_image', outputspec,
                         'scrubbed_image')
    analysisflow.connect(above_thr, 'fd_scrubbed_file', outputspec,
                         'FD_scrubbed')  #TODO_ready: scrub FD file, as well
    analysisflow.connect(above_thr, 'fd_scrubbed_file', ds_fd_scrub,
                         'FD_scrubbed')

    analysisflow.connect(above_thr, 'percent_scrubbed_file', pop_perc_scrub,
                         'in_list')
    analysisflow.connect(pop_perc_scrub, 'txt_file', ds_pop_perc_scrub, 'pop')

    # Save a few files
    analysisflow.connect(scrubbed_preprocessed, 'scrubbed_image', ds,
                         'scrubbed_image')
    #analysisflow.connect(above_thr, 'percentFD', ds, 'scrubbed_image.@numberofvols')
    analysisflow.connect(scrubbed_preprocessed, 'scrubbed_image', myqc,
                         'inputspec.func')

    return analysisflow
Beispiel #4
0
def aroma_workflow(fwhm=0, # in mm
                SinkTag = "func_preproc", wf_name="ICA_AROMA"):

    """
   ICA AROMA method embedded into PUMI
   https://github.com/rhr-pruim/ICA-AROMA

    function input: fwhm: smoothing FWHM in mm. fwhm=0 means no smoothing

    Workflow inputs:
        :param mc_func: The reoriented and motion-corrected functional file.
        :param mc_params: motion parameters file from mcflirt
        :param SinkDir:
        :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow..

    Workflow outputs:




        :return: aroma_workflow - workflow

    Tamas Spisak
    [email protected]
    2018


    """
    from nipype.interfaces.fsl import ICA_AROMA
    import nipype.pipeline as pe
    from nipype.interfaces import utility
    import nipype.interfaces.io as io
    import PUMI.utils.QC as qc
    from nipype.interfaces.fsl import Smooth
    import os
    import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Define inputs of the workflow
    inputspec = pe.Node(utility.IdentityInterface(fields=['mc_func',
                                                          'mc_par',
                                                          'fnirt_warp_file',
                                                          'mat_file',
                                                          'mask',
                                                          'qc_mask'
                                                          ]),
                            name='inputspec')

    # build the actual pipeline
    if fwhm != 0:
        smoother = pe.MapNode(interface=Smooth(fwhm=fwhm),
                              iterfield=['in_file'],
                              name="smoother")
    myqc_before = qc.timecourse2png("timeseries", tag="1_original")

    aroma = pe.MapNode(interface=ICA_AROMA(denoise_type='both'),
                       iterfield=['in_file',
                                  'motion_parameters',
                                  'mat_file',
                                  'fnirt_warp_file',
                                  'mask'],
                       name="ICA_AROMA")
    aroma.inputs.denoise_type = 'both'
    aroma.inputs.out_dir = 'AROMA_out'

    myqc_after_nonaggr = qc.timecourse2png("timeseries", tag="2_nonaggressive")
    myqc_after_aggr = qc.timecourse2png("timeseries", tag="3_aggressive")  # put these in the same QC dir

    getMotICs=pe.MapNode(interface=Function(input_names=['aroma_dir'],
                                            output_names=['motion_ICs'],
                                            function=extract_motionICs),
                         iterfield=['aroma_dir'],
                         name="get_motion_ICs")

    # Save outputs which are important
    ds_nii = pe.Node(interface=io.DataSink(),
                 name='ds_nii')
    ds_nii.inputs.base_directory = SinkDir
    ds_nii.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    ds_txt = pe.Node(interface=io.DataSink(),
                     name='ds_txt')
    ds_txt.inputs.base_directory = SinkDir
    ds_txt.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".txt")]

    # Define outputs of the workflow
    outputspec = pe.Node(utility.IdentityInterface(fields=['aggr_denoised_file',
                                                           'nonaggr_denoised_file',
                                                           'motion_ICs',
                                                           'out_dir',
                                                           'fwhm']),
                         name='outputspec')
    outputspec.inputs.fwhm = fwhm

    analysisflow = pe.Workflow(name=wf_name)
    if fwhm != 0:
        analysisflow.connect(inputspec, 'mc_func', smoother, 'in_file')
        analysisflow.connect(smoother, 'smoothed_file', aroma, 'in_file')
        analysisflow.connect(smoother, 'smoothed_file', myqc_before, 'inputspec.func')
    else:
        analysisflow.connect(inputspec, 'mc_func', aroma, 'in_file')
        analysisflow.connect(inputspec, 'mc_func', myqc_before, 'inputspec.func')
    analysisflow.connect(inputspec, 'mc_par', aroma, 'motion_parameters')
    analysisflow.connect(inputspec, 'mat_file', aroma, 'mat_file')
    analysisflow.connect(inputspec, 'fnirt_warp_file', aroma, 'fnirt_warp_file')
    analysisflow.connect(inputspec, 'mask', aroma, 'mask')
    analysisflow.connect(aroma, 'out_dir', getMotICs, 'aroma_dir')
    analysisflow.connect(getMotICs, 'motion_ICs', ds_txt, 'motion_ICs')
    analysisflow.connect(aroma, 'aggr_denoised_file', ds_nii, 'AROMA_aggr_denoised')
    analysisflow.connect(aroma, 'nonaggr_denoised_file', ds_nii, 'AROMA_nonaggr_denoised')

    analysisflow.connect(inputspec, 'qc_mask', myqc_before, 'inputspec.mask')
    analysisflow.connect(aroma, 'aggr_denoised_file', myqc_after_aggr, 'inputspec.func')
    #analysisflow.connect(inputspec, 'qc_mask', myqc_after_aggr, 'inputspec.mask')
    analysisflow.connect(aroma, 'nonaggr_denoised_file', myqc_after_nonaggr, 'inputspec.func')
    #analysisflow.connect(inputspec, 'qc_mask', myqc_after_nonaggr, 'inputspec.mask')

    analysisflow.connect(aroma, 'aggr_denoised_file', outputspec, 'aggr_denoised_file')
    analysisflow.connect(aroma, 'nonaggr_denoised_file', outputspec, 'nonaggr_denoised_file')
    analysisflow.connect(aroma, 'out_dir', outputspec, 'out_dir')
    analysisflow.connect(getMotICs, 'motion_ICs', outputspec, 'motion_ICs')

    return analysisflow
Beispiel #5
0
def tmpfilt_workflow(highpass_Hz,
                     lowpass_Hz,
                     SinkTag="func_preproc",
                     wf_name="temporal_filtering"):
    #TODO kivezetni a higpass_inseces lowpass_insec valtozokat egy(esetleg kettto)-vel feljebbi szintekre.
    """
    Modified version of porcupine generated temporal filtering code:

    `source: -`


    Creates a slice time corrected functional image.

    Workflow inputs:
        :param func: The reoriented functional file.
        :param highpass: The highpass filter, which is 100s by default.
        :param lowpass: The lowpass filter, which is 1s by default.
        :param SinkDir:
        :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow.

    Workflow outputs:




        :return: tmpfilt_workflow - workflow




    Balint Kincses
    [email protected]
    2018


    """

    #This is a Nipype generator. Warning, here be dragons.
    #!/usr/bin/env python
    import sys
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import PUMI.utils.utils_math as utils_math
    import PUMI.func_preproc.info.info_get as info_get
    import PUMI.utils.utils_convert as utils_convert
    import nipype.interfaces.fsl as fsl
    from nipype.interfaces import afni
    import nipype.interfaces.io as io
    import PUMI.utils.globals as globals
    import PUMI.utils.QC as qc

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    #Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=[
            'func'
            #'highpass_Hz', # TODO: make these available as input
            #'lowpass_Hz'
        ]),
        name='inputspec')
    #nputspec.inputs.highpass_HZ = highpass_Hz
    #inputspec.inputs.lowpass_Hz = lowpass_Hz

    #Custom interface wrapping function Sec2sigmaV
    #func_sec2sigmaV = pe.MapNode(interface = utils_math.Sec2sigmaV,
    #                            iterfield=['TR'],
    #                          name = 'func_sec2sigmaV')
    #Custom interface wrapping function Sec2sigmaV_2
    #func_sec2sigmaV_2 = pe.MapNode(interface = utils_math.Sec2sigmaV,
    #                               iterfield=['TR'],
    #                           name = 'func_sec2sigmaV_2')

    # Custom interface wrapping function Str2Func
    func_str2float = pe.MapNode(interface=utils_convert.Str2Float,
                                iterfield=['str'],
                                name='func_str2float')

    #Wraps command **fslmaths**
    # TODO_done: change highpass filter to AFNI implewmentation:
    # https://neurostars.org/t/bandpass-filtering-different-outputs-from-fsl-and-nipype-custom-function/824
    #tmpfilt = pe.MapNode(interface=fsl.TemporalFilter(),
    #                     iterfield=['in_file','highpass','lowpass'],
    #                           name = 'tmpfilt')

    tmpfilt = pe.MapNode(interface=afni.Bandpass(highpass=highpass_Hz,
                                                 lowpass=lowpass_Hz),
                         iterfield=['in_file', 'tr'],
                         name='tmpfilt')
    tmpfilt.inputs.despike = False
    tmpfilt.inputs.no_detrend = False  #True
    tmpfilt.inputs.notrans = True  # hopefully there are no initial transients in our data
    tmpfilt.inputs.outputtype = 'NIFTI_GZ'
    # Get TR value from header
    TRvalue = pe.MapNode(interface=info_get.TR,
                         iterfield=['in_file'],
                         name='TRvalue')

    myqc = qc.timecourse2png("timeseries",
                             tag="030_filtered_" +
                             str(highpass_Hz).replace('0.', '') + "_" +
                             str(lowpass_Hz).replace('0.', '') + "_Hz")

    #Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=['func_tmplfilt']),
                         name='outputspec')

    #Generic datasink module to store structured outputs
    ds = pe.Node(interface=io.DataSink(), name='ds')
    ds.inputs.base_directory = SinkDir
    #ds.inputs.regexp_substitutions = [("tmpfilt/_NodeName_.{13}", "")]

    #Create a workflow to connect all those nodes
    analysisflow = nipype.Workflow(wf_name)
    analysisflow.connect(inputspec, 'func', tmpfilt, 'in_file')
    analysisflow.connect(inputspec, 'func', TRvalue, 'in_file')
    analysisflow.connect(TRvalue, 'TR', func_str2float, 'str')
    analysisflow.connect(func_str2float, 'float', tmpfilt, 'tr')
    #analysisflow.connect(inputspec, 'highpass_Hz', tmpfilt, 'highpass')
    #analysisflow.connect(inputspec, 'lowpass_Hz', tmpfilt, 'lowpass')
    analysisflow.connect(tmpfilt, 'out_file', ds, 'tmpfilt')
    analysisflow.connect(tmpfilt, 'out_file', outputspec, 'func_tmplfilt')
    analysisflow.connect(tmpfilt, 'out_file', myqc, 'inputspec.func')

    return analysisflow
Beispiel #6
0
def mc_workflow_afni(reference_vol="mid",
                     FD_mode="Power",
                     SinkTag="func_preproc",
                     wf_name="motion_correction_afni"):
    from nipype.interfaces.afni import preprocess
    import sys
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import PUMI.func_preproc.info.info_get as info_get
    import nipype.interfaces.io as io
    import nipype.algorithms.confounds as conf
    import PUMI.utils.utils_math as utils_math
    import PUMI.utils.utils_convert as utils_convert
    import PUMI.utils.globals as globals
    import PUMI.utils.QC as qc

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)
    QCDir = os.path.abspath(globals._SinkDir_ + "/" + globals._QCDir_)
    if not os.path.exists(QCDir):
        os.makedirs(QCDir)

    # Basic interface class generates identity mappings
    inputspec = pe.Node(utility.IdentityInterface(
        fields=['func', 'ref_vol', 'save_plots', 'stats_imgs']),
                        name='inputspec')
    inputspec.inputs.save_plots = True
    inputspec.inputs.stats_imgs = True
    inputspec.inputs.ref_vol = reference_vol

    # extract reference volume
    refvol = pe.MapNode(utility.Function(input_names=['refvol', 'func'],
                                         output_names=['refvol'],
                                         function=getRefVol),
                        iterfield=['func'],
                        name='getRefVol')

    if (reference_vol == "mean"):
        func_motion_correct1 = pe.MapNode(interface=preprocess.Volreg(),
                                          iterfield=["in_file", "basefile"],
                                          name='mc_afni_init')
        func_motion_correct1.inputs.args = '-Fourier -twopass'
        func_motion_correct1.inputs.zpad = 4
        func_motion_correct1.inputs.outputtype = 'NIFTI_GZ'

        # extract reference volume
        refvol2 = pe.MapNode(utility.Function(input_names=['refvol', 'func'],
                                              output_names=['refvol'],
                                              function=getRefVol),
                             iterfield=['func'],
                             name='getRefVol2')

    func_motion_correct = pe.MapNode(interface=preprocess.Volreg(),
                                     iterfield=["in_file", "basefile"],
                                     name='mc_afni')
    func_motion_correct.inputs.args = '-Fourier -twopass'
    func_motion_correct.inputs.zpad = 4
    func_motion_correct.inputs.outputtype = 'NIFTI_GZ'

    myqc = qc.timecourse2png("timeseries", tag="010_motioncorr")

    # Calculate Friston24 parameters
    calc_friston = pe.MapNode(utility.Function(
        input_names=['in_file'],
        output_names=['out_file'],
        function=calc_friston_twenty_four),
                              iterfield=['in_file'],
                              name='calc_friston')

    if FD_mode == "Power":
        calculate_FD = pe.MapNode(conf.FramewiseDisplacement(
            parameter_source='AFNI', save_plot=True),
                                  iterfield=['in_file'],
                                  name='calculate_FD_Power')
    elif FD_mode == "Jenkinson":
        calculate_FD = pe.MapNode(utility.Function(input_names=['in_file'],
                                                   output_names=['out_file'],
                                                   function=calculate_FD_J),
                                  iterfield=['in_file'],
                                  name='calculate_FD_Jenkinson')

        # compute mean and max FD
        meanFD = pe.MapNode(interface=utils_math.Txt2meanTxt,
                            iterfield=['in_file'],
                            name='meanFD')
        meanFD.inputs.axis = 0  # global mean
        meanFD.inputs.header = True  # global mean

        maxFD = pe.MapNode(interface=utils_math.Txt2maxTxt,
                           iterfield=['in_file'],
                           name='maxFD')
        maxFD.inputs.axis = 0  # global mean
        maxFD.inputs.header = True  # global mean

        pop_FD = pe.Node(interface=utils_convert.List2TxtFileOpen,
                         name='pop_FD')
        pop_FDmax = pe.Node(interface=utils_convert.List2TxtFileOpen,
                            name='pop_FDmax')

    # save data out with Datasink
    ds_fd = pe.Node(interface=io.DataSink(), name='ds_pop_fd')
    ds_fd.inputs.regexp_substitutions = [("(\/)[^\/]*$", "FD.txt")]
    ds_fd.inputs.base_directory = SinkDir

    # save data out with Datasink
    ds_fd_max = pe.Node(interface=io.DataSink(), name='ds_pop_fd_max')
    ds_fd_max.inputs.regexp_substitutions = [("(\/)[^\/]*$", "FD_max.txt")]
    ds_fd_max.inputs.base_directory = SinkDir

    # Save outputs which are important
    ds_qc_fd = pe.Node(interface=io.DataSink(), name='ds_qc_fd')
    ds_qc_fd.inputs.base_directory = QCDir
    ds_qc_fd.inputs.regexp_substitutions = [("(\/)[^\/]*$", "_FD.pdf")]

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=[
        'func_out_file', 'first24_file', 'mat_file', 'mc_par_file', 'FD_file'
    ]),
                         name='outputspec')

    # save data out with Datasink
    ds_nii = pe.Node(interface=io.DataSink(), name='ds_nii')
    ds_nii.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]
    ds_nii.inputs.base_directory = SinkDir

    # save data out with Datasink
    ds_text = pe.Node(interface=io.DataSink(), name='ds_txt')
    ds_text.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".txt")]
    ds_text.inputs.base_directory = SinkDir

    # TODO_ready set the proper images which has to be saved in a the datasink specified directory
    # Create a workflow to connect all those nodes
    analysisflow = nipype.Workflow(wf_name)

    analysisflow.connect(inputspec, 'func', refvol, 'func')
    analysisflow.connect(inputspec, 'ref_vol', refvol, 'refvol')
    if (reference_vol == "mean"):
        analysisflow.connect(inputspec, 'func', func_motion_correct1,
                             'in_file')
        analysisflow.connect(refvol, 'refvol', func_motion_correct1,
                             'basefile')
        analysisflow.connect(func_motion_correct1, 'out_file', refvol2, 'func')
        analysisflow.connect(inputspec, 'ref_vol', refvol2, 'refvol')
        analysisflow.connect(inputspec, 'func', func_motion_correct, 'in_file')
        analysisflow.connect(refvol2, 'refvol', func_motion_correct,
                             'basefile')
    else:
        analysisflow.connect(inputspec, 'func', func_motion_correct, 'in_file')
        analysisflow.connect(refvol, 'refvol', func_motion_correct, 'basefile')

    analysisflow.connect(func_motion_correct, 'oned_file', calc_friston,
                         'in_file')
    analysisflow.connect(func_motion_correct, 'oned_file', calculate_FD,
                         'in_file')

    analysisflow.connect(func_motion_correct, 'out_file', outputspec,
                         'func_out_file')
    analysisflow.connect(func_motion_correct, 'oned_matrix_save', outputspec,
                         'mat_file')
    analysisflow.connect(func_motion_correct, 'oned_file', outputspec,
                         'mc_par_file')
    analysisflow.connect(func_motion_correct, 'out_file', ds_nii, 'mc_func')
    analysisflow.connect(func_motion_correct, 'oned_file', ds_text, 'mc_par')
    # analysisflow.connect(func_motion_correct, 'variance_img', ds, 'mc.@variance_img')
    analysisflow.connect(calc_friston, 'out_file', outputspec, 'first24_file')
    analysisflow.connect(calc_friston, 'out_file', ds_text, 'mc_first24')
    analysisflow.connect(calculate_FD, 'out_file', outputspec, 'FD_file')
    analysisflow.connect(func_motion_correct, 'out_file', myqc,
                         'inputspec.func')
    # pop-level mean FD
    analysisflow.connect(calculate_FD, 'out_file', meanFD, 'in_file')
    analysisflow.connect(calculate_FD, 'out_file', ds_text, 'mc_fd')
    analysisflow.connect(meanFD, 'mean_file', pop_FD, 'in_list')
    analysisflow.connect(pop_FD, 'txt_file', ds_fd, 'pop')
    analysisflow.connect(calculate_FD, 'out_figure', ds_qc_fd, 'FD')

    analysisflow.connect(calculate_FD, 'out_file', maxFD, 'in_file')
    analysisflow.connect(maxFD, 'max_file', pop_FDmax, 'in_list')
    analysisflow.connect(pop_FDmax, 'txt_file', ds_fd_max, 'pop')

    return analysisflow
Beispiel #7
0
def mc_workflow_fsl(reference_vol="mid",
                    FD_mode="Power",
                    SinkTag="func_preproc",
                    wf_name="motion_correction_fsl"):
    """
    Modified version of CPAC.func_preproc.func_preproc and CPAC.generate_motion_statistics.generate_motion_statistics:

    `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/func_preproc/func_preproc.html`
    `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/generate_motion_statistics/generate_motion_statistics.html`

    Use FSL MCFLIRT to do the motion correction of the 4D functional data and use the 6df rigid body motion parameters to calculate friston24 parameters
    for later nuissance regression step.

    Workflow inputs:
        :param func: The reoriented functional file.
        :param reference_vol: Either "first", "mid", "last", "mean", or the index of the volume which the rigid body registration (motion correction) will use as reference.
        default: "mid"
        :param FD_mode Eiher "Power" or "Jenkinson"
        :param SinkDir:
        :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow..

    Workflow outputs:




        :return: mc_workflow - workflow




    Balint Kincses
    [email protected]
    2018


    """
    # TODO_ready nipype has the ability to calculate FD: the function from CPAC calculates it correctly
    # import relevant packages
    import sys
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import nipype.algorithms.confounds as conf
    import PUMI.func_preproc.info.info_get as info_get
    import nipype.interfaces.io as io
    import PUMI.utils.utils_math as utils_math
    import PUMI.utils.utils_convert as utils_convert
    import PUMI.utils.globals as globals
    import PUMI.utils.QC as qc

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)
    QCDir = os.path.abspath(globals._SinkDir_ + "/" + globals._QCDir_)
    if not os.path.exists(QCDir):
        os.makedirs(QCDir)

    # Basic interface class generates identity mappings
    inputspec = pe.Node(utility.IdentityInterface(
        fields=['func', 'ref_vol', 'save_plots', 'stats_imgs']),
                        name='inputspec')
    inputspec.inputs.save_plots = True
    inputspec.inputs.stats_imgs = True
    inputspec.inputs.ref_vol = reference_vol

    # todo_ready: make parametrizable: the reference_vol variable is an argumentum of the mc_workflow

    # extract reference volume
    refvol = pe.MapNode(utility.Function(input_names=['refvol', 'func'],
                                         output_names=['refvol'],
                                         function=getRefVol),
                        iterfield=['func'],
                        name='getRefVol')

    # Wraps command **mcflirt**
    mcflirt = pe.MapNode(
        interface=fsl.MCFLIRT(
            interpolation="spline",
            stats_imgs=False),  # stages=4), #stages 4: more accurate but slow
        iterfield=['in_file',
                   'ref_file'],  # , 'ref_vol'], # make parametrizable
        name='mcflirt')

    if (reference_vol == "mean"):
        mcflirt = pe.MapNode(
            interface=fsl.MCFLIRT(interpolation="spline", stats_imgs=False),
            # stages=4), #stages 4: more accurate but slow
            iterfield=['in_file'],  # , 'ref_vol'], # make parametrizable
            name='mcflirt')
        mcflirt.inputs.mean_vol = True
    else:
        mcflirt = pe.MapNode(
            interface=fsl.MCFLIRT(interpolation="spline", stats_imgs=False),
            # stages=4), #stages 4: more accurate but slow
            iterfield=['in_file',
                       'ref_file'],  # , 'ref_vol'], # make parametrizable
            name='mcflirt')

    mcflirt.inputs.dof = 6
    mcflirt.inputs.save_mats = True
    mcflirt.inputs.save_plots = True
    mcflirt.inputs.save_rms = True
    mcflirt.inputs.stats_imgs = False

    myqc = qc.timecourse2png("timeseries", tag="010_motioncorr")

    # Calculate Friston24 parameters
    calc_friston = pe.MapNode(utility.Function(
        input_names=['in_file'],
        output_names=['out_file'],
        function=calc_friston_twenty_four),
                              iterfield=['in_file'],
                              name='calc_friston')

    # Calculate FD based on Power's method
    if FD_mode == "Power":
        calculate_FD = pe.MapNode(conf.FramewiseDisplacement(
            parameter_source='FSL', save_plot=True),
                                  iterfield=['in_file'],
                                  name='calculate_FD_Power')
    elif FD_mode == "Jenkinson":
        calculate_FD = pe.MapNode(utility.Function(input_names=['in_file'],
                                                   output_names=['out_file'],
                                                   function=calculate_FD_J),
                                  iterfield=['in_file'],
                                  name='calculate_FD_Jenkinson')

    # compute mean and max FD
    meanFD = pe.MapNode(interface=utils_math.Txt2meanTxt,
                        iterfield=['in_file'],
                        name='meanFD')
    meanFD.inputs.axis = 0  # global mean
    meanFD.inputs.header = True  # global mean

    maxFD = pe.MapNode(interface=utils_math.Txt2maxTxt,
                       iterfield=['in_file'],
                       name='maxFD')
    maxFD.inputs.axis = 0  # global mean
    maxFD.inputs.header = True

    pop_FD = pe.Node(interface=utils_convert.List2TxtFileOpen, name='pop_FD')
    pop_FDmax = pe.Node(interface=utils_convert.List2TxtFileOpen,
                        name='pop_FDmax')

    # save data out with Datasink
    ds_fd = pe.Node(interface=io.DataSink(), name='ds_pop_fd')
    ds_fd.inputs.regexp_substitutions = [("(\/)[^\/]*$", "FD.txt")]
    ds_fd.inputs.base_directory = SinkDir

    # save data out with Datasink
    ds_fd_max = pe.Node(interface=io.DataSink(), name='ds_pop_fd_max')
    ds_fd_max.inputs.regexp_substitutions = [("(\/)[^\/]*$", "FD_max.txt")]
    ds_fd_max.inputs.base_directory = SinkDir

    plot_motion_rot = pe.MapNode(
        interface=fsl.PlotMotionParams(in_source='fsl'),
        name='plot_motion_rot',
        iterfield=['in_file'])
    plot_motion_rot.inputs.plot_type = 'rotations'

    plot_motion_tra = pe.MapNode(
        interface=fsl.PlotMotionParams(in_source='fsl'),
        name='plot_motion_trans',
        iterfield=['in_file'])
    plot_motion_tra.inputs.plot_type = 'translations'

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=[
        'func_out_file', 'first24_file', 'mat_file', 'mc_par_file', 'FD_file'
    ]),
                         name='outputspec')

    # save data out with Datasink
    ds_nii = pe.Node(interface=io.DataSink(), name='ds_nii')
    ds_nii.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]
    ds_nii.inputs.base_directory = SinkDir

    # save data out with Datasink
    ds_text = pe.Node(interface=io.DataSink(), name='ds_txt')
    ds_text.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".txt")]
    ds_text.inputs.base_directory = SinkDir

    # Save outputs which are important
    ds_qc_fd = pe.Node(interface=io.DataSink(), name='ds_qc_fd')
    ds_qc_fd.inputs.base_directory = QCDir
    ds_qc_fd.inputs.regexp_substitutions = [("(\/)[^\/]*$", "_FD.pdf")]

    # Save outputs which are important
    ds_qc_rot = pe.Node(interface=io.DataSink(), name='ds_qc_rot')
    ds_qc_rot.inputs.base_directory = QCDir
    ds_qc_rot.inputs.regexp_substitutions = [("(\/)[^\/]*$", "_rot.png")]

    # Save outputs which are important
    ds_qc_tra = pe.Node(interface=io.DataSink(), name='ds_qc_tra')
    ds_qc_tra.inputs.base_directory = QCDir
    ds_qc_tra.inputs.regexp_substitutions = [("(\/)[^\/]*$", "_trans.png")]

    #TODO_ready set the proper images which has to be saved in a the datasink specified directory
    # Create a workflow to connect all those nodes
    analysisflow = nipype.Workflow(wf_name)
    analysisflow.connect(inputspec, 'func', mcflirt, 'in_file')
    analysisflow.connect(inputspec, 'func', refvol, 'func')
    analysisflow.connect(inputspec, 'ref_vol', refvol, 'refvol')
    if (reference_vol != "mean"):
        analysisflow.connect(refvol, 'refvol', mcflirt, 'ref_file')
    analysisflow.connect(mcflirt, 'par_file', calc_friston, 'in_file')
    analysisflow.connect(mcflirt, 'par_file', calculate_FD, 'in_file')

    analysisflow.connect(mcflirt, 'out_file', outputspec, 'func_out_file')
    analysisflow.connect(mcflirt, 'mat_file', outputspec, 'mat_file')
    analysisflow.connect(mcflirt, 'par_file', outputspec, 'mc_par_file')
    analysisflow.connect(mcflirt, 'out_file', ds_nii, 'mc_func')
    analysisflow.connect(mcflirt, 'par_file', ds_text, 'mc_par')
    #analysisflow.connect(mcflirt, 'std_img', ds, 'mc.@std_img')
    analysisflow.connect(mcflirt, 'rms_files', ds_text, 'mc_rms')
    #analysisflow.connect(mcflirt, 'variance_img', ds, 'mc.@variance_img')
    analysisflow.connect(calc_friston, 'out_file', outputspec, 'first24_file')
    analysisflow.connect(calc_friston, 'out_file', ds_text, 'mc_first24')
    analysisflow.connect(calculate_FD, 'out_file', outputspec, 'FD_file')
    analysisflow.connect(mcflirt, 'par_file', plot_motion_rot, 'in_file')
    analysisflow.connect(mcflirt, 'par_file', plot_motion_tra, 'in_file')
    analysisflow.connect(plot_motion_rot, 'out_file', ds_qc_rot,
                         'motion_correction')
    analysisflow.connect(plot_motion_tra, 'out_file', ds_qc_tra,
                         'motion_correction')
    analysisflow.connect(mcflirt, 'out_file', myqc, 'inputspec.func')
    # pop-level mean FD
    analysisflow.connect(calculate_FD, 'out_file', meanFD, 'in_file')
    analysisflow.connect(calculate_FD, 'out_file', ds_text, 'mc_fd')
    analysisflow.connect(calculate_FD, 'out_figure', ds_qc_fd, 'FD')
    analysisflow.connect(meanFD, 'mean_file', pop_FD, 'in_list')
    analysisflow.connect(pop_FD, 'txt_file', ds_fd, 'pop')

    analysisflow.connect(calculate_FD, 'out_file', maxFD, 'in_file')
    analysisflow.connect(maxFD, 'max_file', pop_FDmax, 'in_list')
    analysisflow.connect(pop_FDmax, 'txt_file', ds_fd_max, 'pop')

    return analysisflow