Esempio n. 1
0
 def get_roi_perc(self, img, msk, mask_vox):
     roi_stat = fsl.ImageStats(in_file=self.img, op_string='-k ' + msk + ' -V')
     print(roi_stat.cmdline)
     stat_run = roi_stat.run()
     stat = float(list(stat_run.outputs.out_stat)[0])
     perc = (stat / mask_vox) * 100
     return perc
Esempio n. 2
0
 def get_roi_activated_vox(self, msk):
     roi_stat = fsl.ImageStats(in_file=self.img,
                               op_string='-k ' + msk + ' -l 0 -V')
     print(roi_stat.cmdline)
     stat_run = roi_stat.run()
     stat = float(list(stat_run.outputs.out_stat)[0])
     return stat
Esempio n. 3
0
 def get_roi_mean_stat(self, msk):
     mean_zstat = fsl.ImageStats(in_file=self.img,
                                 op_string='-k ' + msk + ' -m')
     print(mean_zstat.cmdline)
     stat_run = mean_zstat.run()
     zscore = float(stat_run.outputs.out_stat)
     return zscore
Esempio n. 4
0
def Get_Volume(timepoint):
    infile = parent_dir+'/FDM/Outputs/Tumor/'+timepoint+'/'+\
    os.listdir( parent_dir+'/FDM/Outputs/Tumor/'+timepoint)[0]
    Volume = fsl.ImageStats()
    Volume.inputs.in_file = infile
    Volume.inputs.op_string = '-V'
    Vout = Volume.run()
    return Vout.outputs.out_stat
Esempio n. 5
0
def Get_Mean_ADC(timepoint):
    infile = parent_dir+'/FDM/Outputs/ADC/'+timepoint+'/'+\
    os.listdir( parent_dir+'/FDM/Outputs/ADC/'+timepoint)[0]
    maskfile = parent_dir+'/FDM/Outputs/Tumor/'+timepoint+'/'+\
    os.listdir( parent_dir+'/FDM/Outputs/Tumor/'+timepoint)[0]
    ADC = fsl.ImageStats()
    ADC.inputs.in_file = infile
    ADC.inputs.op_string = '-k %s -M'
    ADC.inputs.mask_file = maskfile
    ADCout = ADC.run()
    return ADCout.outputs.out_stat
def normalize_image(nifti_input_file):
    #from nipype.interfaces.fsl import ImageStats
    from nipype.interfaces import fsl
    """ This will take an image and scale it so the mean intensity is 100 ...."""

    stats = fsl.ImageStats(in_file=nifti_input_file, op_string='-R')
    stats_results = stats.run()
    ### so the -R flag outputs the min and max robust intensity value
    #       print stats_results.outputs[1]
    # So this gets messy quickly...
    normalize_image = fsl.ImageMaths(in_file=nifti_input_file,
                                     op_string=' -div ' +
                                     str(stats_results.outputs.out_stat[1]) +
                                     ' -mul 1000')
    run_image_norm = normalize_image.run()
    return run_image_norm.outputs.out_file
def get_workflow(parameters, name=0):
    wf = pe.Workflow(name="%04d" % name + "regionGrowing")
    wf.base_dir = "/scratch/henry_temp/keshavan/region_growing_test"
    n = pe.Node(niu.Function(input_names=[
        "inputv", "seeds", "multiplier", "nbhd", "iterations", "timestep",
        "smoothingiterations"
    ],
                             output_names=["outfile"],
                             function=getSRGS),
                name="srgs")
    inputspec = pe.Node(niu.IdentityInterface(fields=["seeds", "in_file"]),
                        name="inputspec")
    n.iterables = [(q, parameters[q].tolist()) for q in [
        "multiplier", "nbhd", "iterations", "timestep", "smoothingiterations"
    ]]
    n.synchronize = True
    wf.connect(inputspec, "seeds", n, "seeds")
    wf.connect(inputspec, "in_file", n, "inputv")

    dt = pe.Node(fsl.ChangeDataType(output_datatype="short"), name="changedt")
    wf.connect(n, "outfile", dt, "in_file")

    stats = pe.Node(fsl.ImageStats(op_string="-c -w"), name="stats")
    wf.connect(dt, "out_file", stats, "in_file")

    avg = pe.JoinNode(ants.AverageImages(dimension=3, normalize=False),
                      name="average",
                      joinsource="srgs",
                      joinfield=["images"])
    wf.connect(dt, "out_file", avg, "images")

    st = pe.JoinNode(niu.Function(input_names=["out_stats", "parameters"],
                                  output_names=["outfile"],
                                  function=combine_stats),
                     name="combiner",
                     joinsource="srgs",
                     joinfield=["out_stats"])
    #wf.connect(dt, "out_file", st, "out_files")
    wf.connect(stats, "out_stat", st, "out_stats")
    st.inputs.parameters = parameters

    outputspec = pe.Node(niu.IdentityInterface(fields=["avg_image", "stats"]),
                         name="outputspec")
    wf.connect(avg, "output_average_image", outputspec, "avg_image")
    wf.connect(st, "outfile", outputspec, "stats")
    return wf, inputspec, outputspec
Esempio n. 8
0
def init_zscore_wf(name="zscore_wf", memcalc=MemoryCalculator()):
    """
    Within-volume z score
    Used for ReHo and ALFF
    """
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        interface=niu.IdentityInterface(fields=["in_file", "mask_file"]),
        name="inputnode",
    )

    stats = pe.Node(
        interface=fsl.ImageStats(),
        name="stats",
    )
    stats.inputs.op_string = "-k %s -m -s"

    def get_zscore_op_string(list):
        """
        creates op_string for fslmaths
        :param list
        :return: op_string for fslmaths
        """
        return "-sub {:f} -div {:f}".format(*list)

    zscore = pe.Node(
        interface=fsl.ImageMaths(),
        name="zscore",
    )

    outputnode = pe.Node(interface=niu.IdentityInterface(fields=["out_file"]),
                         name="outputnode")

    workflow.connect([
        (inputnode, stats, [("in_file", "in_file"),
                            ("mask_file", "mask_file")]),
        (inputnode, zscore, [("in_file", "in_file"),
                             ("mask_file", "mask_file")]),
        (stats, zscore, [(("out_stat", get_zscore_op_string), "op_string")]),
        (zscore, outputnode, [("out_file", "out_file")]),
    ])

    return workflow
Esempio n. 9
0
def compcorr(name='compcorr'):
    from nipype.workflows.rsfmri.fsl.resting import extract_noise_components
    from nipype.algorithms.misc import TSNR

    wkfl = pe.Workflow(name=name)
    inputnode = pe.Node(utility.IdentityInterface(
        fields=['in_file', 'mask', 'num_components']),
                        name='inputspec')
    outputnode = pe.Node(utility.IdentityInterface(fields=['corrected_file']),
                         name='outputspec')

    tsnr = pe.Node(TSNR(), name='tsnr')
    getthresh = pe.Node(interface=fsl.ImageStats(op_string='-k %s -p 98'),
                        name='getthreshold')
    threshold_stddev = pe.Node(fsl.Threshold(), name='threshold')
    compcor = pe.Node(
        utility.Function(input_names=[
            'realigned_file', 'noise_mask_file', 'num_components'
        ],
                         output_names=['noise_components'],
                         function=extract_noise_components),
        name='compcorr',
    )
    remove_noise = pe.Node(
        fsl.FilterRegressor(filter_all=True),
        name='remove_noise',
    )

    wkfl.connect([
        (inputnode, tsnr, [('in_file', 'in_file')]),
        (inputnode, compcor, [('in_file', 'realigned_file'),
                              ('num_components', 'num_components')]),
        (tsnr, threshold_stddev, [('stddev_file', 'in_file')]),
        (tsnr, getthresh, [('stddev_file', 'in_file')]),
        (inputnode, getthresh, [('mask', 'mask_file')]),
        (inputnode, remove_noise, [('in_file', 'in_file')]),
        (getthresh, threshold_stddev, [('out_stat', 'thresh')]),
        (threshold_stddev, compcor, [('out_file', 'noise_mask_file')]),
        (compcor, remove_noise, [('noise_components', 'design_file')]),
        (inputnode, remove_noise, [('mask', 'mask')]),
        (remove_noise, outputnode, [('out_file', 'corrected_file')]),
    ])
    return wkfl
Esempio n. 10
0
def t_compcor(wf_name="t_compcor"):

    cc = pe.Workflow(name=wf_name)

    # Define nodes
    inputnode = pe.Node(interface=util.IdentityInterface(
        fields=['func', 'num_noise_components']),
                        name='inputspec')
    outputnode = pe.Node(interface=util.IdentityInterface(
        fields=['noise_mask_file', 'noise_components', 'residual_file']),
                         name='outputspec')

    tsnr = pe.MapNode(TSNR(regress_poly=2), name='tsnr', iterfield=['in_file'])
    getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 98'),
                           name='getthreshold',
                           iterfield=['in_file'])
    threshold_stddev = pe.MapNode(fsl.Threshold(),
                                  name='threshold',
                                  iterfield=['in_file', 'thresh'])
    compcor = pe.MapNode(util.Function(
        input_names=['realigned_file', 'noise_mask_file', 'num_components'],
        output_names=['noise_components'],
        function=extract_noise_components),
                         name='compcorr',
                         iterfield=['realigned_file', 'noise_mask_file'])
    remove_noise = pe.MapNode(fsl.FilterRegressor(filter_all=True),
                              name='remove_noise',
                              iterfield=['in_file', 'design_file'])

    cc.connect(inputnode, 'func', tsnr, 'in_file')
    cc.connect(tsnr, 'stddev_file', threshold_stddev, 'in_file')
    cc.connect(tsnr, 'stddev_file', getthresh, 'in_file')
    cc.connect(getthresh, 'out_stat', threshold_stddev, 'thresh')
    cc.connect(inputnode, 'func', compcor, 'realigned_file')
    cc.connect(threshold_stddev, 'out_file', compcor, 'noise_mask_file')
    cc.connect(inputnode, 'num_noise_components', compcor, 'num_components')
    cc.connect(tsnr, 'detrended_file', remove_noise, 'in_file')
    cc.connect(compcor, 'noise_components', remove_noise, 'design_file')
    cc.connect(compcor, 'noise_components', outputnode, 'noise_components')
    cc.connect(remove_noise, 'out_file', outputnode, 'residual_file')
    cc.connect(threshold_stddev, 'out_file', outputnode, 'noise_mask_file')

    return cc
Esempio n. 11
0
    def calc_iqms(self):
        # tSNR
        tsnr = TSNR()
        tsnr.inputs.in_file = self.source_img
        tsnr.inputs.mean_file = os.path.join(self.outputdir, self.task,
                                             self.task + "_mean_tsnr.nii.gz")
        tsnr_res = tsnr.run()
        mean_tsnr_img = tsnr_res.outputs.mean_file
        stat = fsl.ImageStats(in_file=mean_tsnr_img, op_string=' -M')
        stat_run = stat.run()
        mean_tsnr = round(stat_run.outputs.out_stat, 2)
        # framewise-displacement
        if type(
                self.confounds
        ) == str:  # ensure self.confounds doesn't refer to empty string
            mean_fd = 'n/a'
        else:
            column_means = self.confounds.mean(axis=0, skipna=True)
            mean_fd = round(column_means['framewise_displacement'], 2)

        return mean_tsnr, mean_fd
Esempio n. 12
0
func_2_template.inputs.float = True

# ========================================================================================================
# In[15]:
# Use nilearn AFNI (thanks Bob Cox), because, as you know, fsl does not support anisotropic smoothing
# 2D smoothing
smoothing_2d = Node(afni.Merge(), name='smoothing_2d')
smoothing_2d.inputs.out_file = 'afni_2d_smoothed.nii.gz'
smoothing_2d.inputs.doall = True
smoothing_2d.inputs.blurfwhm_bx_by_bz = [4, 4, 0]

# ========================================================================================================
# In[16]:

# Getting median intensity
Median_Intensity = Node(fsl.ImageStats(), name='Median_Intensity')
# Put -k before -p 50
Median_Intensity.inputs.op_string = '-k %s -p 50'

# Scale median intensity


def Scale_Median_Intensity(median_intensity):
    scaling = 10000 / median_intensity
    return scaling


Scale_Median_Intensity = Node(name='Scale_Median_Intensity',
                              interface=Function(
                                  input_names=['median_intensity'],
                                  output_names=['scaling'],
Esempio n. 13
0
preproc.connect(meanfunc, 'out_file', meanfuncmask, 'in_file')
"""
Mask the functional runs with the extracted mask
"""

maskfunc = pe.MapNode(interface=fsl.ImageMaths(suffix='_bet',
                                               op_string='-mas'),
                      iterfield=['in_file'],
                      name='maskfunc')
preproc.connect(motion_correct, 'out_file', maskfunc, 'in_file')
preproc.connect(meanfuncmask, 'mask_file', maskfunc, 'in_file2')
"""
Determine the 2nd and 98th percentile intensities of each functional run
"""

getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 2 -p 98'),
                       iterfield=['in_file'],
                       name='getthreshold')
preproc.connect(maskfunc, 'out_file', getthresh, 'in_file')
"""
Threshold the first run of the functional data at 10% of the 98th percentile
"""

threshold = pe.Node(interface=fsl.ImageMaths(out_data_type='char',
                                             suffix='_thresh'),
                    name='threshold')
preproc.connect(maskfunc, ('out_file', pickfirst), threshold, 'in_file')
"""
Define a function to get 10% of the intensity
"""
Esempio n. 14
0
def create_susan_smooth(name="susan_smooth", separate_masks=True):
    """Create a SUSAN smoothing workflow

    Parameters
    ----------

    ::

        name : name of workflow (default: susan_smooth)
        separate_masks : separate masks for each run

    Inputs::

        inputnode.in_files : functional runs (filename or list of filenames)
        inputnode.fwhm : fwhm for smoothing with SUSAN
        inputnode.mask_file : mask used for estimating SUSAN thresholds (but not for smoothing)

    Outputs::

        outputnode.smoothed_files : functional runs (filename or list of filenames)

    Example
    -------

    >>> smooth = create_susan_smooth()
    >>> smooth.inputs.inputnode.in_files = 'f3.nii'
    >>> smooth.inputs.inputnode.fwhm = 5
    >>> smooth.inputs.inputnode.mask_file = 'mask.nii'
    >>> smooth.run() # doctest: +SKIP

    """

    susan_smooth = pe.Workflow(name=name)

    """
    Set up a node to define all inputs required for the preprocessing workflow

    """

    inputnode = pe.Node(interface=util.IdentityInterface(fields=['in_files',
                                                                 'fwhm',
                                                                 'mask_file']),
                        name='inputnode')

    """
    Smooth each run using SUSAN with the brightness threshold set to 75%
    of the median value for each run and a mask consituting the mean
    functional
    """

    smooth = pe.MapNode(interface=fsl.SUSAN(),
                        iterfield=['in_file', 'brightness_threshold','usans'],
                        name='smooth')

    """
    Determine the median value of the functional runs using the mask
    """


    if separate_masks:
        median = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                            iterfield = ['in_file', 'mask_file'],
                            name='median')
    else:
        median = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                            iterfield = ['in_file'],
                            name='median')
    susan_smooth.connect(inputnode, 'in_files', median, 'in_file')
    susan_smooth.connect(inputnode, 'mask_file', median, 'mask_file')

    """
    Mask the motion corrected functional runs with the dilated mask
    """

    if separate_masks:
        mask = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                   op_string='-mas'),
                          iterfield=['in_file', 'in_file2'],
                          name='mask')
    else:
        mask = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                   op_string='-mas'),
                          iterfield=['in_file'],
                          name='mask')
    susan_smooth.connect(inputnode, 'in_files', mask, 'in_file')
    susan_smooth.connect(inputnode, 'mask_file', mask, 'in_file2')

    """
    Determine the mean image from each functional run
    """

    meanfunc = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                    suffix='_mean'),
                           iterfield=['in_file'],
                           name='meanfunc2')
    susan_smooth.connect(mask, 'out_file', meanfunc, 'in_file')

    """
    Merge the median values with the mean functional images into a coupled list
    """

    merge = pe.Node(interface=util.Merge(2, axis='hstack'),
                        name='merge')
    susan_smooth.connect(meanfunc,'out_file', merge, 'in1')
    susan_smooth.connect(median,'out_stat', merge, 'in2')

    """
    Define a function to get the brightness threshold for SUSAN
    """
    susan_smooth.connect(inputnode, 'fwhm', smooth, 'fwhm')
    susan_smooth.connect(inputnode, 'in_files', smooth, 'in_file')
    susan_smooth.connect(median, ('out_stat', getbtthresh), smooth, 'brightness_threshold')
    susan_smooth.connect(merge, ('out', getusans), smooth, 'usans')

    outputnode = pe.Node(interface=util.IdentityInterface(fields=['smoothed_files']),
                    name='outputnode')

    susan_smooth.connect(smooth, 'smoothed_file', outputnode, 'smoothed_files')

    return susan_smooth
Esempio n. 15
0
def create_fsl_fs_preproc(name='preproc', highpass=True, whichvol='middle'):
    """Create a FEAT preprocessing workflow together with freesurfer

    Parameters
    ----------

    ::

        name : name of workflow (default: preproc)
        highpass : boolean (default: True)
        whichvol : which volume of the first run to register to ('first', 'middle', 'mean')

    Inputs::

        inputspec.func : functional runs (filename or list of filenames)
        inputspec.fwhm : fwhm for smoothing with SUSAN
        inputspec.highpass : HWHM in TRs (if created with highpass=True)
        inputspec.subject_id : freesurfer subject id
        inputspec.subjects_dir : freesurfer subjects dir

    Outputs::

        outputspec.reference : volume to which runs are realigned
        outputspec.motion_parameters : motion correction parameters
        outputspec.realigned_files : motion corrected files
        outputspec.motion_plots : plots of motion correction parameters
        outputspec.mask_file : mask file used to mask the brain
        outputspec.smoothed_files : smoothed functional data
        outputspec.highpassed_files : highpassed functional data (if highpass=True)
        outputspec.reg_file : bbregister registration files
        outputspec.reg_cost : bbregister registration cost files

    Example
    -------

    >>> preproc = create_fsl_fs_preproc(whichvol='first')
    >>> preproc.inputs.inputspec.highpass = 128./(2*2.5)
    >>> preproc.inputs.inputspec.func = ['f3.nii', 'f5.nii']
    >>> preproc.inputs.inputspec.subjects_dir = '.'
    >>> preproc.inputs.inputspec.subject_id = 's1'
    >>> preproc.inputs.inputspec.fwhm = 6
    >>> preproc.run() # doctest: +SKIP
    """

    featpreproc = pe.Workflow(name=name)

    """
    Set up a node to define all inputs required for the preprocessing workflow

    """

    if highpass:
        inputnode = pe.Node(interface=util.IdentityInterface(fields=['func',
                                                                     'fwhm',
                                                                     'subject_id',
                                                                     'subjects_dir',
                                                                     'highpass']),
                            name='inputspec')
        outputnode = pe.Node(interface=util.IdentityInterface(fields=['reference',
                                                                  'motion_parameters',
                                                                  'realigned_files',
                                                                  'motion_plots',
                                                                  'mask_file',
                                                                  'smoothed_files',
                                                                  'highpassed_files',
                                                                  'reg_file',
                                                                  'reg_cost'
                                                                  ]),
                         name='outputspec')
    else:
        inputnode = pe.Node(interface=util.IdentityInterface(fields=['func',
                                                                     'fwhm',
                                                                     'subject_id',
                                                                     'subjects_dir'
                                                                     ]),
                            name='inputspec')
        outputnode = pe.Node(interface=util.IdentityInterface(fields=['reference',
                                                                  'motion_parameters',
                                                                  'realigned_files',
                                                                  'motion_plots',
                                                                  'mask_file',
                                                                  'smoothed_files',
                                                                  'reg_file',
                                                                  'reg_cost'
                                                                  ]),
                         name='outputspec')

    """
    Set up a node to define outputs for the preprocessing workflow

    """

    """
    Convert functional images to float representation. Since there can
    be more than one functional run we use a MapNode to convert each
    run.
    """

    img2float = pe.MapNode(interface=fsl.ImageMaths(out_data_type='float',
                                                 op_string = '',
                                                 suffix='_dtype'),
                           iterfield=['in_file'],
                           name='img2float')
    featpreproc.connect(inputnode, 'func', img2float, 'in_file')


    """
    Extract the first volume of the first run as the reference
    """

    if whichvol != 'mean':
        extract_ref = pe.Node(interface=fsl.ExtractROI(t_size=1),
                                 iterfield=['in_file'],
                                 name = 'extractref')
        featpreproc.connect(img2float, ('out_file', pickfirst), extract_ref, 'in_file')
        featpreproc.connect(img2float, ('out_file', pickvol, 0, whichvol), extract_ref, 't_min')
        featpreproc.connect(extract_ref, 'roi_file', outputnode, 'reference')


    """
    Realign the functional runs to the reference (1st volume of first run)
    """

    motion_correct = pe.MapNode(interface=fsl.MCFLIRT(save_mats = True,
                                                      save_plots = True,
                                                      interpolation = 'sinc'),
                                name='realign',
                                iterfield = ['in_file'])
    featpreproc.connect(img2float, 'out_file', motion_correct, 'in_file')
    if whichvol != 'mean':
        featpreproc.connect(extract_ref, 'roi_file', motion_correct, 'ref_file')
    else:
        motion_correct.inputs.mean_vol = True
        featpreproc.connect(motion_correct, 'mean_img', outputnode, 'reference')

    featpreproc.connect(motion_correct, 'par_file', outputnode, 'motion_parameters')
    featpreproc.connect(motion_correct, 'out_file', outputnode, 'realigned_files')

    """
    Plot the estimated motion parameters
    """

    plot_motion = pe.MapNode(interface=fsl.PlotMotionParams(in_source='fsl'),
                            name='plot_motion',
                            iterfield=['in_file'])
    plot_motion.iterables = ('plot_type', ['rotations', 'translations'])
    featpreproc.connect(motion_correct, 'par_file', plot_motion, 'in_file')
    featpreproc.connect(plot_motion, 'out_file', outputnode, 'motion_plots')

    """Get the mask from subject for each run
    """

    maskflow = create_getmask_flow()
    featpreproc.connect([(inputnode, maskflow, [('subject_id','inputspec.subject_id'),
                                             ('subjects_dir', 'inputspec.subjects_dir')])])
    maskflow.inputs.inputspec.contrast_type = 't2'
    if whichvol != 'mean':
        featpreproc.connect(extract_ref, 'roi_file', maskflow, 'inputspec.source_file')
    else:
        featpreproc.connect(motion_correct, ('mean_img', pickfirst), maskflow, 'inputspec.source_file')


    """
    Mask the functional runs with the extracted mask
    """

    maskfunc = pe.MapNode(interface=fsl.ImageMaths(suffix='_bet',
                                                   op_string='-mas'),
                          iterfield=['in_file'],
                          name = 'maskfunc')
    featpreproc.connect(motion_correct, 'out_file', maskfunc, 'in_file')
    featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst), maskfunc, 'in_file2')

    """
    Smooth each run using SUSAN with the brightness threshold set to 75%
    of the median value for each run and a mask consituting the mean
    functional
    """

    smooth = create_susan_smooth(separate_masks=False)

    featpreproc.connect(inputnode, 'fwhm', smooth, 'inputnode.fwhm')
    featpreproc.connect(maskfunc, 'out_file', smooth, 'inputnode.in_files')
    featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst), smooth, 'inputnode.mask_file')

    """
    Mask the smoothed data with the dilated mask
    """

    maskfunc3 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                          iterfield=['in_file'],
                          name='maskfunc3')
    featpreproc.connect(smooth, 'outputnode.smoothed_files', maskfunc3, 'in_file')
    featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst), maskfunc3, 'in_file2')


    concatnode = pe.Node(interface=util.Merge(2),
                         name='concat')
    featpreproc.connect(maskfunc, ('out_file', tolist), concatnode, 'in1')
    featpreproc.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2')

    """
    The following nodes select smooth or unsmoothed data depending on the
    fwhm. This is because SUSAN defaults to smoothing the data with about the
    voxel size of the input data if the fwhm parameter is less than 1/3 of the
    voxel size.
    """
    selectnode = pe.Node(interface=util.Select(),name='select')

    featpreproc.connect(concatnode, 'out', selectnode, 'inlist')

    featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index')
    featpreproc.connect(selectnode, 'out', outputnode, 'smoothed_files')


    """
    Scale the median value of the run is set to 10000
    """

    meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'),
                          iterfield=['in_file','op_string'],
                          name='meanscale')
    featpreproc.connect(selectnode, 'out', meanscale, 'in_file')

    """
    Determine the median value of the functional runs using the mask
    """

    medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield = ['in_file'],
                           name='medianval')
    featpreproc.connect(motion_correct, 'out_file', medianval, 'in_file')
    featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst), medianval, 'mask_file')

    """
    Define a function to get the scaling factor for intensity normalization
    """

    featpreproc.connect(medianval, ('out_stat', getmeanscale), meanscale, 'op_string')

    """
    Perform temporal highpass filtering on the data
    """

    if highpass:
        highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt'),
                              iterfield=['in_file'],
                              name='highpass')
        featpreproc.connect(inputnode, ('highpass', highpass_operand), highpass, 'op_string')
        featpreproc.connect(meanscale, 'out_file', highpass, 'in_file')
        featpreproc.connect(highpass, 'out_file', outputnode, 'highpassed_files')

    featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst), outputnode, 'mask_file')
    featpreproc.connect(maskflow, 'outputspec.reg_file', outputnode, 'reg_file')
    featpreproc.connect(maskflow, 'outputspec.reg_cost', outputnode, 'reg_cost')

    return featpreproc
Esempio n. 16
0
def legacy(
    bids_base,
    template,
    debug=False,
    functional_blur_xy=False,
    functional_match={},
    keep_work=False,
    n_jobs=False,
    n_jobs_percentage=0.8,
    out_base=None,
    realign="time",
    registration_mask=False,
    sessions=[],
    structural_match={},
    subjects=[],
    tr=1,
    workflow_name='legacy',
    enforce_dummy_scans=DUMMY_SCANS,
    exclude={},
):
    '''
	Legacy realignment and registration workflow representative of the tweaks and workarounds commonly used in the pre-SAMRI period.

	Parameters
	----------
	bids_base : str
		Path to the BIDS data set root.
	template : str
		Path to the template to register the data to.
	debug : bool, optional
		Whether to enable nipype debug mode.
		This increases logging.
	exclude : dict
		A dictionary with any combination of "sessions", "subjects", "tasks" as keys and corresponding identifiers as values.
		If this is specified matching entries will be excluded in the analysis.
	functional_blur_xy : float, optional
		Factor by which to smooth data in the xy-plane; if parameter evaluates to false, no smoothing will be applied.
		Ideally this value should correspond to the resolution or smoothness in the z-direction (assuing z represents the lower-resolution slice-encoding direction).
	functional_match : dict, optional
		Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', 'task', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	keep_work : bool, str
		Whether to keep the work directory after workflow conclusion (this directory contains all the intermediary processing commands, inputs, and outputs --- it is invaluable for debugging but many times larger in size than the actual output).
	n_jobs : int, optional
		Number of processors to maximally use for the workflow; if unspecified a best guess will be estimate based on `n_jobs_percentage` and hardware (but not on current load).
	n_jobs_percentage : float, optional
		Percentage of available processors (as in available hardware, not available free load) to maximally use for the workflow (this is overriden by `n_jobs`).
	out_base : str, optional
		Output base directory - inside which a directory named `workflow_name` (as well as associated directories) will be created.
	realign : {"space","time","spacetime",""}, optional
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!
	registration_mask : str, optional
		Mask to use for the registration process.
		This mask will constrain the area for similarity metric evaluation, but the data will not be cropped.
	sessions : list, optional
		A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	structural_match : dict, optional
		Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	subjects : list, optional
		A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	tr : float, optional
		Repetition time, explicitly.
		WARNING! This is a parameter waiting for deprecation.
	workflow_name : str, optional
		Top level name for the output directory.
	'''

    try:
        import nipype.interfaces.ants.legacy as antslegacy
    except ModuleNotFoundError:
        print('''
			The `nipype.interfaces.ants.legacy` was not found on this system.
			You may want to downgrade nipype to e.g. 1.1.1, as this module has been removed in more recent versions:
			https://github.com/nipy/nipype/issues/3197
		''')

    bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind = common_select(
        bids_base,
        out_base,
        workflow_name,
        template,
        registration_mask,
        functional_match,
        structural_match,
        subjects,
        sessions,
        exclude,
    )

    if not n_jobs:
        n_jobs = max(int(round(mp.cpu_count() * n_jobs_percentage)), 2)

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_bids_scan,
                             input_names=inspect.getargspec(get_bids_scan)[0],
                             output_names=[
                                 'scan_path', 'scan_type', 'task', 'nii_path',
                                 'nii_name', 'events_name', 'subject_session',
                                 'metadata_filename', 'dict_slice', 'ind_type'
                             ]))
    get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.bids_base = bids_base
    get_f_scan.iterables = ("ind_type", func_ind)

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file', 'deleted_scans']))
    dummy_scans.inputs.desired_dummy_scans = enforce_dummy_scans

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_bids_events_file,
            input_names=inspect.getargspec(write_bids_events_file)[0],
            output_names=['out_file']))

    temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean")

    f_resize = pe.Node(interface=VoxelResize(), name="f_resize")
    f_resize.inputs.resize_factors = [10, 10, 10]

    f_percentile = pe.Node(interface=fsl.ImageStats(), name="f_percentile")
    f_percentile.inputs.op_string = '-p 98'

    f_threshold = pe.Node(interface=fsl.Threshold(), name="f_threshold")

    f_fast = pe.Node(interface=fsl.FAST(), name="f_fast")
    f_fast.inputs.no_pve = True
    f_fast.inputs.output_biascorrected = True

    f_bet = pe.Node(interface=fsl.BET(), name="f_BET")

    f_swapdim = pe.Node(interface=fsl.SwapDimensions(), name="f_swapdim")
    f_swapdim.inputs.new_dims = ('x', '-z', '-y')

    f_deleteorient = pe.Node(interface=FSLOrient(), name="f_deleteorient")
    f_deleteorient.inputs.main_option = 'deleteorient'

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = out_dir
    datasink.inputs.parameterization = False

    workflow_connections = [
        (get_f_scan, dummy_scans, [('nii_path', 'in_file')]),
        (dummy_scans, events_file, [('deleted_scans', 'forced_dummy_scans')]),
        (dummy_scans, f_resize, [('out_file', 'in_file')]),
        (get_f_scan, events_file, [('nii_path', 'timecourse_file'),
                                   ('task', 'task'),
                                   ('scan_path', 'scan_dir')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (get_f_scan, events_file, [('events_name', 'out_file')]),
        (get_f_scan, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
        (temporal_mean, f_percentile, [('out_file', 'in_file')]),
        # here we divide by 10 assuming 10 percent noise
        (f_percentile, f_threshold, [(('out_stat', divideby_10), 'thresh')]),
        (temporal_mean, f_threshold, [('out_file', 'in_file')]),
        (f_threshold, f_fast, [('out_file', 'in_files')]),
        (f_fast, f_bet, [('restored_image', 'in_file')]),
        (f_resize, f_deleteorient, [('out_file', 'in_file')]),
        (f_deleteorient, f_swapdim, [('out_file', 'in_file')]),
    ]

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        realigner.inputs.time_repetition = tr
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    f_antsintroduction = pe.Node(interface=antslegacy.antsIntroduction(),
                                 name='ants_introduction')
    f_antsintroduction.inputs.dimension = 3
    f_antsintroduction.inputs.reference_image = template
    #will need updating to `1`
    f_antsintroduction.inputs.bias_field_correction = True
    f_antsintroduction.inputs.transformation_model = 'GR'
    f_antsintroduction.inputs.max_iterations = [8, 15, 8]

    f_warp = pe.Node(interface=ants.WarpTimeSeriesImageMultiTransform(),
                     name='f_warp')
    f_warp.inputs.reference_image = template
    f_warp.inputs.dimension = 4

    f_copysform2qform = pe.Node(interface=FSLOrient(),
                                name='f_copysform2qform')
    f_copysform2qform.inputs.main_option = 'copysform2qform'

    warp_merge = pe.Node(util.Merge(2), name='warp_merge')

    workflow_connections.extend([
        (f_bet, f_antsintroduction, [('out_file', 'input_image')]),
        (f_antsintroduction, warp_merge, [('warp_field', 'in1')]),
        (f_antsintroduction, warp_merge, [('affine_transformation', 'in2')]),
        (warp_merge, f_warp, [('out', 'transformation_series')]),
        (f_warp, f_copysform2qform, [('output_image', 'in_file')]),
    ])
    if realign == "space":
        workflow_connections.extend([
            (realigner, temporal_mean, [('realigned_files', 'in_file')]),
            (realigner, f_warp, [('realigned_files', 'input_image')]),
        ])
    elif realign == "spacetime":
        workflow_connections.extend([
            (realigner, temporal_mean, [('out_file', 'in_file')]),
            (realigner, f_warp, [('out_file', 'input_image')]),
        ])
    elif realign == "time":
        workflow_connections.extend([
            (realigner, temporal_mean, [('slice_time_corrected_file',
                                         'in_file')]),
            (realigner, f_warp, [('slice_time_corrected_file', 'input_image')
                                 ]),
        ])
    else:
        workflow_connections.extend([
            (f_resize, temporal_mean, [('out_file', 'in_file')]),
            (f_swapdim, f_warp, [('out_file', 'input_image')]),
        ])

    if functional_blur_xy:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (get_f_scan, blur, [('nii_name', 'out_file')]),
            (f_copysform2qform, blur, [('out_file', 'in_file')]),
            (blur, datasink, [('out_file', 'func')]),
        ])
    else:

        f_rename = pe.Node(util.Rename(), name='f_rename')

        workflow_connections.extend([
            (get_f_scan, f_rename, [('nii_name', 'format_string')]),
            (f_copysform2qform, f_rename, [('out_file', 'in_file')]),
            (f_rename, datasink, [('out_file', 'func')]),
        ])

    workflow_config = {
        'execution': {
            'crashdump_dir': path.join(out_base, 'crashdump'),
        }
    }
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + "_work"
    #this gives the name of the workdir, the output name is passed to the datasink
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = out_base
    workflow.config = workflow_config
    try:
        workflow.write_graph(dotfilename=path.join(workflow.base_dir,
                                                   workdir_name, "graph.dot"),
                             graph2use="hierarchical",
                             format="png")
    except OSError:
        print(
            'We could not write the DOT file for visualization (`dot` function from the graphviz package). This is non-critical to the processing, but you should get this fixed.'
        )

    workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_jobs})
    copy_bids_files(bids_base, os.path.join(out_base, workflow_name))
    if not keep_work:
        workdir = path.join(workflow.base_dir, workdir_name)
        try:
            shutil.rmtree(workdir)
        except OSError as e:
            if str(e) == 'Cannot call rmtree on a symbolic link':
                print(
                    'Not deleting top level workdir (`{}`), as it is a symlink. Deleting only contents instead'
                    .format(workdir))
                for file_object in os.listdir(workdir):
                    file_object_path = os.path.join(workdir, file_object)
                    if os.path.isfile(file_object_path):
                        os.unlink(file_object_path)
                    else:
                        shutil.rmtree(file_object_path)
            else:
                raise OSError(str(e))
Esempio n. 17
0
def create_parallelfeat_preproc(name='featpreproc', highpass=True):
    """Preprocess each run with FSL independently of the others

    Parameters
    ----------

    ::

      name : name of workflow (default: featpreproc)
      highpass : boolean (default: True)

    Inputs::

        inputspec.func : functional runs (filename or list of filenames)
        inputspec.fwhm : fwhm for smoothing with SUSAN
        inputspec.highpass : HWHM in TRs (if created with highpass=True)

    Outputs::

        outputspec.reference : volume to which runs are realigned
        outputspec.motion_parameters : motion correction parameters
        outputspec.realigned_files : motion corrected files
        outputspec.motion_plots : plots of motion correction parameters
        outputspec.mask : mask file used to mask the brain
        outputspec.smoothed_files : smoothed functional data
        outputspec.highpassed_files : highpassed functional data (if highpass=True)
        outputspec.mean : mean file

    Example
    -------

    >>> preproc = create_parallelfeat_preproc()
    >>> preproc.inputs.inputspec.func = ['f3.nii', 'f5.nii']
    >>> preproc.inputs.inputspec.fwhm = 5
    >>> preproc.inputs.inputspec.highpass = 128./(2*2.5)
    >>> preproc.base_dir = '/tmp'
    >>> preproc.run() # doctest: +SKIP

    >>> preproc = create_parallelfeat_preproc(highpass=False)
    >>> preproc.inputs.inputspec.func = 'f3.nii'
    >>> preproc.inputs.inputspec.fwhm = 5
    >>> preproc.base_dir = '/tmp'
    >>> preproc.run() # doctest: +SKIP
    """

    featpreproc = pe.Workflow(name=name)

    """
    Set up a node to define all inputs required for the preprocessing workflow

    """

    if highpass:
        inputnode = pe.Node(interface=util.IdentityInterface(fields=['func',
                                                                     'fwhm',
                                                                     'highpass']),
                            name='inputspec')
        outputnode = pe.Node(interface=util.IdentityInterface(fields=['reference',
                                                                  'motion_parameters',
                                                                  'realigned_files',
                                                                  'motion_plots',
                                                                  'mask',
                                                                  'smoothed_files',
                                                                  'highpassed_files',
                                                                  'mean']),
                         name='outputspec')
    else:
        inputnode = pe.Node(interface=util.IdentityInterface(fields=['func',
                                                                     'fwhm']),
                            name='inputspec')
        outputnode = pe.Node(interface=util.IdentityInterface(fields=['reference',
                                                                  'motion_parameters',
                                                                  'realigned_files',
                                                                  'motion_plots',
                                                                  'mask',
                                                                  'smoothed_files',
                                                                  'mean']),
                         name='outputspec')

    """
    Set up a node to define outputs for the preprocessing workflow

    """

    """
    Convert functional images to float representation. Since there can
    be more than one functional run we use a MapNode to convert each
    run.
    """

    img2float = pe.MapNode(interface=fsl.ImageMaths(out_data_type='float',
                                                 op_string = '',
                                                 suffix='_dtype'),
                           iterfield=['in_file'],
                           name='img2float')
    featpreproc.connect(inputnode, 'func', img2float, 'in_file')

    """
    Extract the first volume of the first run as the reference
    """

    extract_ref = pe.MapNode(interface=fsl.ExtractROI(t_size=1),
                             iterfield=['in_file', 't_min'],
                             name = 'extractref')

    featpreproc.connect(img2float, 'out_file', extract_ref, 'in_file')
    featpreproc.connect(img2float, ('out_file', pickmiddle), extract_ref, 't_min')
    featpreproc.connect(extract_ref, 'roi_file', outputnode, 'reference')

    """
    Realign the functional runs to the reference (1st volume of first run)
    """

    motion_correct = pe.MapNode(interface=fsl.MCFLIRT(save_mats = True,
                                                      save_plots = True),
                                name='realign',
                                iterfield = ['in_file', 'ref_file'])
    featpreproc.connect(img2float, 'out_file', motion_correct, 'in_file')
    featpreproc.connect(extract_ref, 'roi_file', motion_correct, 'ref_file')
    featpreproc.connect(motion_correct, 'par_file', outputnode, 'motion_parameters')
    featpreproc.connect(motion_correct, 'out_file', outputnode, 'realigned_files')

    """
    Plot the estimated motion parameters
    """

    plot_motion = pe.MapNode(interface=fsl.PlotMotionParams(in_source='fsl'),
                            name='plot_motion',
                            iterfield=['in_file'])
    plot_motion.iterables = ('plot_type', ['rotations', 'translations'])
    featpreproc.connect(motion_correct, 'par_file', plot_motion, 'in_file')
    featpreproc.connect(plot_motion, 'out_file', outputnode, 'motion_plots')

    """
    Extract the mean volume of the first functional run
    """

    meanfunc = pe.MapNode(interface=fsl.ImageMaths(op_string = '-Tmean',
                                                   suffix='_mean'),
                          iterfield=['in_file'],
                          name='meanfunc')
    featpreproc.connect(motion_correct, 'out_file', meanfunc, 'in_file')

    """
    Strip the skull from the mean functional to generate a mask
    """

    meanfuncmask = pe.MapNode(interface=fsl.BET(mask = True,
                                             no_output=True,
                                             frac = 0.3),
                              iterfield=['in_file'],
                              name = 'meanfuncmask')
    featpreproc.connect(meanfunc, 'out_file', meanfuncmask, 'in_file')

    """
    Mask the functional runs with the extracted mask
    """

    maskfunc = pe.MapNode(interface=fsl.ImageMaths(suffix='_bet',
                                                   op_string='-mas'),
                          iterfield=['in_file', 'in_file2'],
                          name = 'maskfunc')
    featpreproc.connect(motion_correct, 'out_file', maskfunc, 'in_file')
    featpreproc.connect(meanfuncmask, 'mask_file', maskfunc, 'in_file2')


    """
    Determine the 2nd and 98th percentile intensities of each functional run
    """

    getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 2 -p 98'),
                           iterfield = ['in_file'],
                           name='getthreshold')
    featpreproc.connect(maskfunc, 'out_file', getthresh, 'in_file')


    """
    Threshold the first run of the functional data at 10% of the 98th percentile
    """

    threshold = pe.MapNode(interface=fsl.ImageMaths(out_data_type='char',
                                                 suffix='_thresh'),
                           iterfield=['in_file', 'op_string'],
                           name='threshold')
    featpreproc.connect(maskfunc, 'out_file', threshold, 'in_file')

    """
    Define a function to get 10% of the intensity
    """

    featpreproc.connect(getthresh, ('out_stat', getthreshop), threshold, 'op_string')

    """
    Determine the median value of the functional runs using the mask
    """

    medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield = ['in_file', 'mask_file'],
                           name='medianval')
    featpreproc.connect(motion_correct, 'out_file', medianval, 'in_file')
    featpreproc.connect(threshold, 'out_file', medianval, 'mask_file')

    """
    Dilate the mask
    """

    dilatemask = pe.MapNode(interface=fsl.ImageMaths(suffix='_dil',
                                                  op_string='-dilF'),
                            iterfield=['in_file'],
                            name='dilatemask')
    featpreproc.connect(threshold, 'out_file', dilatemask, 'in_file')
    featpreproc.connect(dilatemask, 'out_file', outputnode, 'mask')

    """
    Mask the motion corrected functional runs with the dilated mask
    """

    maskfunc2 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                          iterfield=['in_file', 'in_file2'],
                          name='maskfunc2')
    featpreproc.connect(motion_correct, 'out_file', maskfunc2, 'in_file')
    featpreproc.connect(dilatemask, 'out_file', maskfunc2, 'in_file2')

    """
    Smooth each run using SUSAN with the brightness threshold set to 75%
    of the median value for each run and a mask consituting the mean
    functional
    """

    smooth = create_susan_smooth()

    featpreproc.connect(inputnode, 'fwhm', smooth, 'inputnode.fwhm')
    featpreproc.connect(maskfunc2, 'out_file', smooth, 'inputnode.in_files')
    featpreproc.connect(dilatemask, 'out_file', smooth, 'inputnode.mask_file')

    """
    Mask the smoothed data with the dilated mask
    """

    maskfunc3 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                          iterfield=['in_file', 'in_file2'],
                          name='maskfunc3')
    featpreproc.connect(smooth, 'outputnode.smoothed_files', maskfunc3, 'in_file')

    featpreproc.connect(dilatemask, 'out_file', maskfunc3, 'in_file2')


    concatnode = pe.Node(interface=util.Merge(2),
                         name='concat')
    featpreproc.connect(maskfunc2,('out_file', tolist), concatnode, 'in1')
    featpreproc.connect(maskfunc3,('out_file', tolist), concatnode, 'in2')

    """
    The following nodes select smooth or unsmoothed data depending on the
    fwhm. This is because SUSAN defaults to smoothing the data with about the
    voxel size of the input data if the fwhm parameter is less than 1/3 of the
    voxel size.
    """
    selectnode = pe.Node(interface=util.Select(),name='select')

    featpreproc.connect(concatnode, 'out', selectnode, 'inlist')

    featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index')
    featpreproc.connect(selectnode, 'out', outputnode, 'smoothed_files')


    """
    Scale the median value of the run is set to 10000
    """

    meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'),
                          iterfield=['in_file','op_string'],
                          name='meanscale')
    featpreproc.connect(selectnode, 'out', meanscale, 'in_file')

    """
    Define a function to get the scaling factor for intensity normalization
    """

    featpreproc.connect(medianval, ('out_stat', getmeanscale), meanscale, 'op_string')

    """
    Perform temporal highpass filtering on the data
    """

    if highpass:
        highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt'),
                              iterfield=['in_file'],
                              name='highpass')
        featpreproc.connect(inputnode, ('highpass', highpass_operand), highpass, 'op_string')
        featpreproc.connect(meanscale, 'out_file', highpass, 'in_file')
        featpreproc.connect(highpass, 'out_file', outputnode, 'highpassed_files')

    """
    Generate a mean functional image from the first run
    """

    meanfunc3 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                    suffix='_mean'),
                           iterfield=['in_file'],
                          name='meanfunc3')
    if highpass:
        featpreproc.connect(highpass, 'out_file', meanfunc3, 'in_file')
    else:
        featpreproc.connect(meanscale, 'out_file', meanfunc3, 'in_file')

    featpreproc.connect(meanfunc3, 'out_file', outputnode, 'mean')


    return featpreproc
Esempio n. 18
0
def easy_thresh(wf_name):
    """
    Workflow for carrying out cluster-based thresholding 
    and colour activation overlaying
    
    Parameters
    ----------
    wf_name : string 
        Workflow name
        
    Returns
    -------
    easy_thresh : object 
        Easy thresh workflow object
    
    Notes
    -----
    
    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/easy_thresh/easy_thresh.py>`_
        
    Workflow Inputs::
    
        inputspec.z_stats : string (nifti file)
            z_score stats output for t or f contrast from flameo
        
        inputspec.merge_mask : string (nifti file)
            mask generated from 4D Merged derivative file
        
        inputspec.z_threshold : float
            Z Statistic threshold value for cluster thresholding. It is used to 
            determine what level of activation would be statistically significant. 
            Increasing this will result in higher estimates of required effect.
        
        inputspec.p_threshold : float
            Probability threshold for cluster thresholding.
        
        inputspec.paramerters : string (tuple)
            tuple containing which MNI and FSLDIR path information
            
    Workflow Outputs::
    
        outputspec.cluster_threshold : string (nifti files)
           the thresholded Z statistic image for each t contrast
        
        outputspec.cluster_index : string (nifti files)
            image of clusters for each t contrast; the values 
            in the clusters are the index numbers as used 
            in the cluster list.
        
        outputspec.overlay_threshold : string (nifti files)
            3D color rendered stats overlay image for t contrast
            After reloading this image, use the Statistics Color 
            Rendering GUI to reload the color look-up-table
        
        outputspec.overlay_rendered_image : string (nifti files)
           2D color rendered stats overlay picture for each t contrast
        
        outputspec.cluster_localmax_txt : string (text files)
            local maxima text file, defines the coordinates of maximum value
            in the cluster
    
    
    Order of commands:
    
    - Estimate smoothness of the image::
        
        smoothest --mask= merge_mask.nii.gz --zstat=.../flameo/stats/zstat1.nii.gz
        
        arguments
        --mask  :  brain mask volume
        --zstat :  filename of zstat/zfstat image
    
    - Create mask. For details see `fslmaths <http://www.fmrib.ox.ac.uk/fslcourse/lectures/practicals/intro/index.htm#fslutils>`_::
        
        fslmaths ../flameo/stats/zstat1.nii.gz 
                 -mas merge_mask.nii.gz 
                 zstat1_mask.nii.gz
        
        arguments
        -mas   : use (following image>0) to mask current image

    - Copy Geometry image dimensions, voxel dimensions, voxel dimensions units string, image orientation/origin or qform/sform info) from one image to another::
    
        fslcpgeom MNI152_T1_2mm_brain.nii.gz zstat1_mask.nii.gz
    
    - Cluster based thresholding. For details see `FEAT <http://www.fmrib.ox.ac.uk/fsl/feat5/detail.html#poststats>`_::
        
        cluster --dlh = 0.0023683100 
                --in = zstat1_mask.nii.gz 
                --oindex = zstat1_cluster_index.nii.gz 
                --olmax = zstat1_cluster_localmax.txt
                --othresh = zstat1_cluster_threshold.nii.gz 
                --pthresh = 0.0500000000 
                --thresh = 2.3000000000 
                --volume = 197071
                
        arguments 
        --in    :    filename of input volume
        --dlh   :    smoothness estimate = sqrt(det(Lambda))
        --oindex  :  filename for output of cluster index
        --othresh :  filename for output of thresholded image
        --olmax   :  filename for output of local maxima text file
        --volume  :  number of voxels in the mask
        --pthresh :  p-threshold for clusters
        --thresh  :  threshold for input volume
        
     Z statistic image is thresholded to show which voxels or clusters of voxels are activated at a particular significance level.
     A Z statistic threshold is used to define contiguous clusters. Then each cluster's estimated significance level (from GRF-theory) 
     is compared with the cluster probability threshold. Significant clusters are then used to mask the original Z statistic image.
    
    - Get the maximum intensity value of the output thresholded image. This used is while rendering the Z statistic image:: 
        
        fslstats zstat1_cluster_threshold.nii.gz -R
        
        arguments
        -R  : output <min intensity> <max intensity>

    - Rendering. For details see `FEAT <http://www.fmrib.ox.ac.uk/fsl/feat5/detail.html#poststats>`_::
         
        overlay 1 0 MNI152_T1_2mm_brain.nii.gz 
               -a zstat1_cluster_threshold.nii.gz 
               2.30 15.67 
               zstat1_cluster_threshold_overlay.nii.gz
               
        slicer zstat1_cluster_threshold_overlay.nii.gz 
               -L  -A 750 
               zstat1_cluster_threshold_overlay.png
    
      The Z statistic range selected for rendering is automatically calculated by default, 
      to run from red (minimum Z statistic after thresholding) to yellow (maximum Z statistic, here 
      maximum intensity).
      
    High Level Workflow Graph:
    
    .. image:: ../images/easy_thresh.dot.png
       :width: 800
    
    
    Detailed Workflow Graph:
    
    .. image:: ../images/easy_thresh_detailed.dot.png
       :width: 800
               
    Examples
    --------
    
    >>> import easy_thresh
    >>> preproc = easy_thresh.easy_thresh("new_workflow")
    >>> preproc.inputs.inputspec.z_stats= 'flameo/stats/zstat1.nii.gz'
    >>> preproc.inputs.inputspec.merge_mask = 'merge_mask/alff_Z_fn2standard_merged_mask.nii.gz'
    >>> preproc.inputs.inputspec.z_threshold = 2.3
    >>> preproc.inputs.inputspec.p_threshold = 0.05
    >>> preproc.inputs.inputspec.parameters = ('/usr/local/fsl/', 'MNI152')
    >>> preporc.run()  -- SKIP doctest
    
    """

    easy_thresh = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(fields=[
        'z_stats', 'merge_mask', 'z_threshold', 'p_threshold', 'parameters'
    ]),
                        name='inputspec')

    outputnode = pe.Node(util.IdentityInterface(fields=[
        'cluster_threshold', 'cluster_index', 'cluster_localmax_txt',
        'overlay_threshold', 'rendered_image'
    ]),
                         name='outputspec')

    ### fsl easythresh
    # estimate image smoothness
    smooth_estimate = pe.MapNode(interface=fsl.SmoothEstimate(),
                                 name='smooth_estimate',
                                 iterfield=['zstat_file'])

    # run clustering after fixing stats header for talspace
    zstat_mask = pe.MapNode(interface=fsl.MultiImageMaths(),
                            name='zstat_mask',
                            iterfield=['in_file'])
    #operations to perform
    #-mas use (following image>0) to mask current image
    zstat_mask.inputs.op_string = '-mas %s'

    #fslcpgeom
    #copy certain parts of the header information (image dimensions,
    #voxel dimensions, voxel dimensions units string, image orientation/origin
    #or qform/sform info) from one image to another
    copy_geometry = pe.MapNode(util.Function(
        input_names=['infile_a', 'infile_b'],
        output_names=['out_file'],
        function=copy_geom),
                               name='copy_geometry',
                               iterfield=['infile_a', 'infile_b'])

    ##cluster-based thresholding
    #After carrying out the initial statistical test, the resulting
    #Z statistic image is then normally thresholded to show which voxels or
    #clusters of voxels are activated at a particular significance level.
    #A Z statistic threshold is used to define contiguous clusters.
    #Then each cluster's estimated significance level (from GRF-theory) is
    #compared with the cluster probability threshold. Significant clusters
    #are then used to mask the original Z statistic image for later production
    #of colour blobs.This method of thresholding is an alternative to
    #Voxel-based correction, and is normally more sensitive to activation.
    #    cluster = pe.MapNode(interface=fsl.Cluster(),
    #                            name='cluster',
    #                            iterfield=['in_file', 'volume', 'dlh'])
    #    #output of cluster index (in size order)
    #    cluster.inputs.out_index_file = True
    #    #thresholded image
    #    cluster.inputs.out_threshold_file = True
    #    #local maxima text file
    #    #defines the cluster cordinates
    #    cluster.inputs.out_localmax_txt_file = True

    cluster = pe.MapNode(util.Function(
        input_names=[
            'in_file', 'volume', 'dlh', 'threshold', 'pthreshold', 'parameters'
        ],
        output_names=['index_file', 'threshold_file', 'localmax_txt_file'],
        function=call_cluster),
                         name='cluster',
                         iterfield=['in_file', 'volume', 'dlh'])

    #max and minimum intensity values
    image_stats = pe.MapNode(interface=fsl.ImageStats(),
                             name='image_stats',
                             iterfield=['in_file'])
    image_stats.inputs.op_string = '-R'

    #create tuple of z_threshold and max intensity value of threshold file
    create_tuple = pe.MapNode(util.Function(
        input_names=['infile_a', 'infile_b'],
        output_names=['out_file'],
        function=get_tuple),
                              name='create_tuple',
                              iterfield=['infile_b'])

    #colour activation overlaying
    overlay = pe.MapNode(interface=fsl.Overlay(),
                         name='overlay',
                         iterfield=['stat_image', 'stat_thresh'])
    overlay.inputs.transparency = True
    overlay.inputs.auto_thresh_bg = True
    overlay.inputs.out_type = 'float'

    #colour rendering
    slicer = pe.MapNode(interface=fsl.Slicer(),
                        name='slicer',
                        iterfield=['in_file'])
    #set max picture width
    slicer.inputs.image_width = 750
    # set output all axial slices into one picture
    slicer.inputs.all_axial = True

    #function mapnode to get the standard fsl brain image
    #based on parameters as FSLDIR,MNI and voxel size
    get_backgroundimage = pe.MapNode(util.Function(
        input_names=['in_file', 'file_parameters'],
        output_names=['out_file'],
        function=get_standard_background_img),
                                     name='get_bckgrndimg1',
                                     iterfield=['in_file'])

    #function node to get the standard fsl brain image
    #outputs single file
    get_backgroundimage2 = pe.Node(util.Function(
        input_names=['in_file', 'file_parameters'],
        output_names=['out_file'],
        function=get_standard_background_img),
                                   name='get_backgrndimg2')

    #connections
    easy_thresh.connect(inputnode, 'z_stats', smooth_estimate, 'zstat_file')
    easy_thresh.connect(inputnode, 'merge_mask', smooth_estimate, 'mask_file')

    easy_thresh.connect(inputnode, 'z_stats', zstat_mask, 'in_file')
    easy_thresh.connect(inputnode, 'merge_mask', zstat_mask, 'operand_files')

    easy_thresh.connect(zstat_mask, 'out_file', get_backgroundimage, 'in_file')
    easy_thresh.connect(inputnode, 'parameters', get_backgroundimage,
                        'file_parameters')

    easy_thresh.connect(get_backgroundimage, 'out_file', copy_geometry,
                        'infile_a')
    easy_thresh.connect(zstat_mask, 'out_file', copy_geometry, 'infile_b')

    easy_thresh.connect(copy_geometry, 'out_file', cluster, 'in_file')
    easy_thresh.connect(inputnode, 'z_threshold', cluster, 'threshold')
    easy_thresh.connect(inputnode, 'p_threshold', cluster, 'pthreshold')
    easy_thresh.connect(smooth_estimate, 'volume', cluster, 'volume')
    easy_thresh.connect(smooth_estimate, 'dlh', cluster, 'dlh')
    easy_thresh.connect(inputnode, 'parameters', cluster, 'parameters')

    easy_thresh.connect(cluster, 'threshold_file', image_stats, 'in_file')

    easy_thresh.connect(image_stats, 'out_stat', create_tuple, 'infile_b')
    easy_thresh.connect(inputnode, 'z_threshold', create_tuple, 'infile_a')

    easy_thresh.connect(cluster, 'threshold_file', overlay, 'stat_image')
    easy_thresh.connect(create_tuple, 'out_file', overlay, 'stat_thresh')

    easy_thresh.connect(inputnode, 'merge_mask', get_backgroundimage2,
                        'in_file')
    easy_thresh.connect(inputnode, 'parameters', get_backgroundimage2,
                        'file_parameters')

    easy_thresh.connect(get_backgroundimage2, 'out_file', overlay,
                        'background_image')

    easy_thresh.connect(overlay, 'out_file', slicer, 'in_file')

    easy_thresh.connect(cluster, 'threshold_file', outputnode,
                        'cluster_threshold')
    easy_thresh.connect(cluster, 'index_file', outputnode, 'cluster_index')
    easy_thresh.connect(cluster, 'localmax_txt_file', outputnode,
                        'cluster_localmax_txt')
    easy_thresh.connect(overlay, 'out_file', outputnode, 'overlay_threshold')
    easy_thresh.connect(slicer, 'out_file', outputnode, 'rendered_image')

    return easy_thresh
Esempio n. 19
0
def create_feat_preproc_wf(name='feat_preproc'):
    feat = pe.Workflow(name=name)
    feat.config['execution']['remove_unnecessary_outputs'] = False

    # define inputs
    inputspec = pe.Node(ul.IdentityInterface(fields=['in_file', 'highpass', 'TR']),
                        name='inputspec')

    # Get 2 and 98th percentiles
    getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 2 -p 98'),
                           iterfield=['in_file'],
                           name='getthreshold')

    # Threshold the first run of the functional data at 10% of the 98th percentile
    threshold = pe.MapNode(interface=fsl.ImageMaths(out_data_type='char',
                                                 suffix='_thresh'),
                        name='threshold', iterfield=['in_file', 'op_string'])

    # get median value using the mask
    medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield=['in_file', 'mask_file'],
                           name='medianval')

    # dilate the mask
    dilatemask = pe.MapNode(interface=fsl.ImageMaths(suffix='_dil',
                                                  op_string='-dilF'),
                         name='dilatemask', iterfield=['in_file'])

    # mask the data with dilated mask
    maskfunc = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                           iterfield=['in_file', 'in_file2'],
                           name='maskfunc')

    # scale the run to have a median of 10000
    intnorm = pe.MapNode(interface=fsl.ImageMaths(suffix='_intnorm'),
                         iterfield=['in_file', 'op_string'],
                         name='intnorm')

    # get the mean from each run
    meanfunc = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                    suffix='_mean'),
                           iterfield=['in_file'],
                           name='meanfunc')

    # calculate the cutoff
    calculatehpcutoff = pe.Node(interface=ul.Function(function=highpasssetup, input_names=["highpass", "TR"],
                                  output_names=["op_string"]), name="calculatehpcutoff")

    # highpass the data
    highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt'),
                          iterfield=['in_file','in_file2'],
                          name='highpass')
    highpass.inputs.suffix = '_hpf'

    # outputspec
    outputspec = pe.Node(ul.IdentityInterface(fields=['filtered_functional_data',
                                                      'mean_file','scalingfactor']),
                        name='outputspec')

    feat.connect([(inputspec, getthresh, [('in_file', 'in_file')]),
                  (inputspec, threshold, [('in_file', 'in_file')]),
                  (inputspec, medianval, [('in_file', 'in_file')]),
                  (inputspec, calculatehpcutoff, [('highpass', 'highpass'),
                                         ('TR', 'TR')]),
                  (getthresh, threshold, [(('out_stat', getthreshop), 'op_string')]),
                  (threshold, medianval, [('out_file', 'mask_file')]),
                  (threshold, dilatemask, [('out_file', 'in_file')]),
                  (inputspec, maskfunc, [('in_file', 'in_file')]),
                  (dilatemask, maskfunc, [('out_file', 'in_file2')]),
                  (maskfunc, intnorm, [('out_file', 'in_file')]),
                  (medianval, intnorm, [(('out_stat', getinormscale), 'op_string')]),
                  (intnorm, meanfunc, [('out_file', 'in_file')]),
                  (calculatehpcutoff, highpass, [('op_string', 'op_string')]),
                  (intnorm, highpass, [('out_file', 'in_file')]),
                  (meanfunc, highpass, [('out_file', 'in_file2')]),
                  (highpass, outputspec, [('out_file','filtered_functional_data')]),
                  (meanfunc, outputspec, [(('out_file', pickfirst), 'mean_file')]),
                  (medianval, outputspec, [('out_stat', 'scalingfactor')])
                  ])

    return feat
Esempio n. 20
0
import os, datetime, sys

import nipype.interfaces.io as nio           # Data i/o
import nipype.interfaces.fsl as fsl          # fsl
import nipype.interfaces.utility as util     # utility
import nipype.pipeline.engine as pe          # pypeline engine

import utils # code by AM specific to this project but multiple workflows

fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

#Set up model fitting workflow (we assume data has been preprocessed with fmriprep)
wf = pe.Workflow(name='assignmp')

applymask = pe.Node(interface=fsl.ApplyMask(), name="applymask", input_names=["in_file", "mask_file"])
get_percentile_threshold = pe.Node(interface=fsl.ImageStats(), name="get_percentile_threshold", input_names=["in_file", "op_string"])
assign_voxels = pe.MapNode(interface=fsl.ImageMaths(), name="assign_voxels", iterfield=["op_string"])

wf.connect([
    (applymask, get_percentile_threshold, [('out_file', 'in_file')]),
    (applymask, assign_voxels, [('out_file', 'in_file')]),
    (get_percentile_threshold, assign_voxels, [(('out_stat', utils.fslmaths_threshold_roi_opstring), 'op_string')]),
])

# Data input and configuration!
infosource = pe.Node(util.IdentityInterface(fields=['cope_file', 'roi_mask', 'percentile_threshold', 'out_files']),
        name="infosource", output_names=["rois"])
# output
datasink = pe.Node(nio.DataSink(), name='datasink')

wf.connect([
Esempio n. 21
0
def plot_vbm_correlation(vbm_4D_image, mat, p_value_image):
    import numpy as np
    import matplotlib.pyplot as plt
    import nipype.interfaces.fsl as fsl
    import ntpath
    import sys
    import matplotlib
    import os

    img_basename_no_ext = ntpath.basename(vbm_4D_image)[:-7]
    mat_basename_no_ext = ntpath.basename(mat)[:-4]

    contrast_no = ntpath.basename(p_value_image)[-9:-7]

# get the coordinates of voxel with highest p value
    fslstats = fsl.ImageStats()
    fslstats.inputs.in_file = p_value_image
    fslstats.inputs.op_string = '-x'  # returns co-ordinates of maximum voxel
    stat_output = fslstats.run()
    voxel_coordinate = stat_output.outputs.out_stat
    # convert coordinates to int from float to suit fslmeants
    voxel_coordinate = [int(dim) for dim in voxel_coordinate]


# we use fslmeants from fsl to get the intensity of the 4D image from a specific coordinate
# fslmeants -i 4d_image -c 47 73 9
    fslmeants = fsl.ImageMeants()
    fslmeants.inputs.in_file = vbm_4D_image
    fslmeants.inputs.spatial_coord = voxel_coordinate
    outputs = fslmeants.run()
    # here the output is not displayed on the screen, rather as a simple txt
    # the return results are in dict format, we extract the name of the file
    ts = outputs.outputs.get()['out_file']
    voxel_values = np.loadtxt(ts)  # returns the values as an array
    voxel_values = list(voxel_values)

    # now we extract the behavior vector from .mat file
    f = open(mat, 'r')
    lines = f.readlines()
    f.close()
    behav = []
    i = 5
    for item in lines:
        while i < 37:
            behav.append(float(lines[i][13:-2]))
            i = i + 1
            # now we have a list
    # sanity check
    print("length of voxel_values -> {0}".format(len(voxel_values)))
    print("length of behav_values -> {0}".format(len(behav)))
    if len(voxel_values) != len(behav):
        sys.error('######ERROR####')

    # the regression line
    coef = np.polyfit(voxel_values, behav, 1)
    poly1d_fn = np.poly1d(coef)

    # get the correlation coeeficient
    # round to 4 digits after the decimal point
    correlation_coef = round(np.corrcoef(voxel_values, behav)[0, 1], 5)

    plt.rcParams['font.family'] = 'Arial'

    ax = plt.axes()
    ax.spines['bottom'].set_color('#ffffffff')
    ax.spines['top'].set_color('#ffffffff')
    ax.spines['right'].set_color('#ffffffff')
    ax.spines['left'].set_color('#ffffffff')
    ax.tick_params(axis='x', colors='#ffffffff')
    ax.tick_params(axis='y', colors='#ffffffff')

    plt.xticks(fontsize=14, rotation=45, color='#ffffffff')
    plt.yticks(fontsize=14, color='#ffffffff')
    plt.scatter(voxel_values[:16], behav[:16], marker='o', color='#e41a1c')  # e41a1c -> red
    plt.scatter(voxel_values[16:], behav[16:], marker='<', color='#377eb8')  # 377eb8 -> blue
    plt.ylabel("{0}".format(mat_basename_no_ext), fontsize=18, fontname='Arial', color='#ffffffff')
    plt.plot(voxel_values, poly1d_fn(voxel_values), color='#ffffffff')  # plot the regression line
    # type the coef on the graph, first two arguments the coordinates of the text (top left corner)
    plt.text(min(voxel_values), max(behav), "r $= {0}$".format(
        correlation_coef), fontname="Arial", style='italic', fontsize=14, color='#ffffffff')

    plt.savefig("/Users/amr/Dropbox/thesis/3D/VBM_corr/{0}_{1}_{2}.svg".format(
        img_basename_no_ext, mat_basename_no_ext, contrast_no), format='svg')
    plt.close()

    os.remove(ts)  # delete the file of the voxel values as it is no longer needed
    os.remove('stat_result.json')
Esempio n. 22
0
def prepare_flair_intNorm(flair_prep_dir, out_dir, wd_dir, crash_dir, subjects_sessions, flair_acq, n_cpu=-1):
    out_dir.mkdir(exist_ok=True, parents=True)
    export_version(out_dir)

    wf = Workflow(name="prepare_flair_intNorm")
    wf.base_dir = wd_dir
    wf.config.remove_unnecessary_outputs = False
    wf.config["execution"]["crashdump_dir"] = crash_dir
    wf.config["monitoring"]["enabled"] = "true"

    subjects, sessions = list(zip(*subjects_sessions))
    infosource = Node(niu.IdentityInterface(fields=["subject", "session", "flair_acq"]), name="infosource")
    infosource.iterables = [("subject", subjects),
                            ("session", sessions),
                            ]
    infosource.synchronize = True

    def subject_info_fnc(flair_prep_dir, subject, session, flair_acq):
        from pathlib import Path

        sub_ses = f"sub-{subject}_ses-{session}"
        flair_files = list(Path(flair_prep_dir).glob(
            f"sub-{subject}/ses-{session}/anat/{sub_ses}_acq-{flair_acq}_*_FLAIR_biascorr.nii.gz"))
        assert len(flair_files) == 1, f"Expected one file, but found {flair_files}"
        flair_file = flair_files[0]

        brain_masks = list(Path(flair_prep_dir).glob(
            f"sub-{subject}/ses-{session}/anat/{sub_ses}_space-flair{flair_acq}_desc-brainmask.nii.gz"))
        assert len(brain_masks) > 0, f"Expected one file, but found {brain_masks}"
        brain_mask = brain_masks[0]

        out_list = [flair_file, brain_mask]
        return [str(o) for o in out_list]  # as Path is not taken everywhere

    grabber = Node(niu.Function(input_names=["flair_prep_dir", "subject", "session", "flair_acq"],
                                output_names=["flair_file", "brain_mask"],
                                function=subject_info_fnc),
                   name="grabber"
                   )
    grabber.inputs.flair_prep_dir = flair_prep_dir
    grabber.inputs.flair_acq = flair_acq

    wf.connect([(infosource, grabber, [("subject", "subject"),
                                       ("session", "session"),
                                       ]
                 )
                ]
               )

    # adapted from https://gist.github.com/lebedov/94f1caf8a792d80cd91e7b99c1a0c1d7
    # Intensity normalization - subtract minimum, then divide by difference of maximum and minimum:
    img_range = Node(interface=fsl.ImageStats(op_string='-k %s -R'), name='img_range')
    wf.connect(grabber, "flair_file", img_range, "in_file")
    wf.connect(grabber, "brain_mask", img_range, "mask_file")

    def func(in_stat):
        min_val, max_val = in_stat
        return '-sub %s -div %s' % (min_val, (max_val - min_val))

    stat_to_op_string = Node(interface=niu.Function(input_names=['in_stat'],
                                                    output_names=['op_string'],
                                                    function=func),
                             name='stat_to_op_string', iterfield=['in_stat'])
    wf.connect(img_range, "out_stat", stat_to_op_string, "in_stat")

    flair_normalized = Node(interface=fsl.ImageMaths(), name='flair_normalized')
    wf.connect(stat_to_op_string, "op_string", flair_normalized, "op_string")
    wf.connect(grabber, "flair_file", flair_normalized, "in_file")

    base_directory = str(out_dir.parent)
    out_path_base = str(out_dir.name)
    ds_flair_biascorr_intNorm = Node(DerivativesDataSink(base_directory=base_directory, out_path_base=out_path_base),
                                     name="ds_flair_biascorr_intNorm")
    ds_flair_biascorr_intNorm.inputs.suffix = "FLAIR_biascorrIntNorm"
    wf.connect(flair_normalized, "out_file", ds_flair_biascorr_intNorm, "in_file")
    wf.connect(grabber, "flair_file", ds_flair_biascorr_intNorm, "source_file")

    wf.run(plugin='MultiProc', plugin_args={'n_procs': n_cpu})
Esempio n. 23
0
 def get_mask_vox(self, msk):
     mask_stat = fsl.ImageStats(in_file=msk, op_string=' -V')
     mask_run = mask_stat.run()
     mask_vox = list(mask_run.outputs.out_stat)
     return mask_vox[0]
Esempio n. 24
0
def create_prep(name='preproc'):
    """ Base preprocessing workflow for task and resting state fMRI
    
    Parameters
    ----------
    name : name of workflow. Default = 'preproc'
    
    Inputs
    ------
    inputspec.fssubject_id : 
    inputspec.fssubject_dir :
    inputspec.func :
    inputspec.highpass :
    inputspec.num_noise_components :
    inputspec.ad_normthresh :
    inputspec.ad_zthresh :
    inputspec.tr :
    inputspec.interleaved :
    inputspec.sliceorder :
    inputspec.compcor_select :
    inputspec.highpass_sigma :
    inputspec.lowpass_sigma :
    inputspec.reg_params :
    inputspec.FM_TEdiff :
    inputspec.FM_Echo_spacing :
    inputspec.FM_sigma :
    
    Outputs
    -------
    outputspec.reference : 
    outputspec.motion_parameters : 
    outputspec.realigned_files :
    outputspec.mask :
    outputspec.smoothed_files :
    outputspec.highpassed_files :
    outputspec.mean :
    outputspec.combined_motion :
    outputspec.outlier_files :
    outputspec.mask :
    outputspec.reg_cost :
    outputspec.reg_file :
    outputspec.noise_components :
    outputspec.tsnr_file :
    outputspec.stddev_file :
    outputspec.filter_file :
    outputspec.scaled_files :
    outputspec.z_img :
    outputspec.motion_plots :
    outputspec.FM_unwarped_mean :
    outputspec.FM_unwarped_epi :
    
    Returns
    -------
    workflow : preprocessing workflow
    """

    import nipype.interfaces.fsl as fsl  # fsl
    import nipype.algorithms.rapidart as ra  # rapid artifact detection
    from nipype.workflows.smri.freesurfer.utils import create_getmask_flow
    from modular_nodes import create_mod_smooth, mod_realign, mod_despike
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util

    preproc = pe.Workflow(name=name)

    # Compcorr node
    compcor = create_compcorr()

    # Input node
    inputnode = pe.Node(util.IdentityInterface(fields=[
        'fssubject_id', 'fssubject_dir', 'func', 'highpass',
        'num_noise_components', 'ad_normthresh', 'ad_zthresh', 'tr',
        'do_slicetime', 'sliceorder', 'compcor_select', 'highpass_freq',
        'lowpass_freq', 'reg_params', 'FM_TEdiff', 'FM_Echo_spacing',
        'FM_sigma', 'motion_correct_node', 'smooth_type', 'surface_fwhm',
        'filter_type', 'timepoints_to_remove', 'do_whitening',
        'regress_before_PCA', 'realign_parameters', 'do_despike', 'anatomical'
    ]),
                        name='inputspec')

    # Separate input node for FWHM
    inputnode_fwhm = pe.Node(util.IdentityInterface(fields=['fwhm']),
                             name='fwhm_input')

    # strip ids
    strip_rois = pe.MapNode(fsl.ExtractROI(),
                            name='extractroi',
                            iterfield='in_file')
    strip_rois.inputs.t_size = -1
    preproc.connect(inputnode, 'timepoints_to_remove', strip_rois, 't_min')

    # convert BOLD images to float
    img2float = pe.MapNode(interface=fsl.ImageMaths(out_data_type='float',
                                                    op_string='',
                                                    suffix='_dtype'),
                           iterfield=['in_file'],
                           name='img2float')

    #afni despike
    despike = pe.MapNode(util.Function(input_names=['in_file', "do_despike"],
                                       output_names=["out_file"],
                                       function=mod_despike),
                         name="despike",
                         iterfield=["in_file"])
    preproc.connect(inputnode, "do_despike", despike, "do_despike")
    # define the motion correction node
    #motion_correct = pe.Node(interface=FmriRealign4d(),
    #                            name='realign')

    motion_correct = pe.Node(util.Function(
        input_names=[
            'node', 'in_file', 'tr', 'do_slicetime', 'sliceorder', "parameters"
        ],
        output_names=['out_file', 'par_file', 'parameter_source'],
        function=mod_realign),
                             name="mod_realign")

    preproc.connect(inputnode, 'motion_correct_node', motion_correct, 'node')

    # construct motion plots
    #plot_motion = pe.MapNode(interface=fsl.PlotMotionParams(in_source='fsl'),
    #                         name='plot_motion',
    #                         iterfield=['in_file'])

    # rapidArt for artifactual timepoint detection
    ad = pe.Node(ra.ArtifactDetect(save_plot=False), name='artifactdetect')

    # extract the mean volume if the first functional run
    meanfunc = art_mean_workflow()

    # generate a freesurfer workflow that will return the mask
    getmask = create_getmask_flow()

    # create a SUSAN smoothing workflow, and smooth each run with
    # 75% of the median value for each run as the brightness
    # threshold.
    smooth = create_mod_smooth(name="modular_smooth", separate_masks=False)
    preproc.connect(inputnode, 'smooth_type', smooth, 'inputnode.smooth_type')
    # choose susan function
    """
    The following node selects smooth or unsmoothed data
    depending on the fwhm. This is because SUSAN defaults
    to smoothing the data with about the voxel size of
    the input data if the fwhm parameter is less than 1/3 of
    the voxel size.
    """
    choosesusan = pe.Node(util.Function(
        input_names=['fwhm', 'motion_files', 'smoothed_files'],
        output_names=['cor_smoothed_files'],
        function=choose_susan),
                          name='select_smooth')

    # scale the median value of each run to 10,000
    meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'),
                           iterfield=['in_file', 'op_string'],
                           name='scale_median')

    # determine the median value of the MASKED functional runs
    medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield=['in_file'],
                           name='compute_median_val')

    # temporal highpass filtering
    highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt'),
                          iterfield=['in_file'],
                          name='highpass')

    # Calculate the z-score of output
    zscore = pe.MapNode(interface=util.Function(
        input_names=['image', 'outliers'],
        output_names=['z_img'],
        function=z_image),
                        name='z_score',
                        iterfield=['image', 'outliers'])

    # declare some node inputs...
    #plot_motion.iterables = ('plot_type', ['rotations', 'translations'])

    #ad.inputs.parameter_source = 'FSL'
    meanfunc.inputs.inputspec.parameter_source = 'FSL'
    ad.inputs.mask_type = 'file'
    ad.inputs.use_differences = [True, False]
    getmask.inputs.inputspec.contrast_type = 't2'
    getmask.inputs.register.out_fsl_file = True
    fssource = getmask.get_node('fssource')

    # make connections...
    preproc.connect(inputnode, 'fssubject_id', getmask, 'inputspec.subject_id')
    preproc.connect(inputnode, 'ad_normthresh', ad, 'norm_threshold')
    preproc.connect(inputnode, 'ad_zthresh', ad, 'zintensity_threshold')
    preproc.connect(inputnode, 'tr', motion_correct, 'tr')
    preproc.connect(inputnode, 'realign_parameters', motion_correct,
                    'parameters')
    preproc.connect(motion_correct, 'parameter_source', ad, 'parameter_source')
    preproc.connect(inputnode, 'do_slicetime', motion_correct, 'do_slicetime')
    preproc.connect(inputnode, 'sliceorder', motion_correct, 'sliceorder')
    preproc.connect(inputnode, 'compcor_select', compcor, 'inputspec.selector')
    preproc.connect(inputnode, 'fssubject_dir', getmask,
                    'inputspec.subjects_dir')

    #preproc.connect(inputnode, 'func',
    #                img2float, 'in_file')
    preproc.connect(inputnode, 'func', strip_rois, 'in_file')
    preproc.connect(strip_rois, 'roi_file', img2float, 'in_file')

    preproc.connect(img2float, 'out_file', despike, "in_file")
    preproc.connect(despike, "out_file", motion_correct, 'in_file')
    #preproc.connect(motion_correct, 'par_file',
    #                plot_motion, 'in_file')
    preproc.connect(motion_correct, 'out_file', meanfunc,
                    'inputspec.realigned_files')
    preproc.connect(motion_correct, 'par_file', meanfunc,
                    'inputspec.realignment_parameters')
    preproc.connect(meanfunc, 'outputspec.mean_image', getmask,
                    'inputspec.source_file')
    preproc.connect(inputnode, 'num_noise_components', compcor,
                    'inputspec.num_components')
    preproc.connect(inputnode, 'regress_before_PCA', compcor,
                    'inputspec.regress_before_PCA')
    preproc.connect(motion_correct, 'out_file', compcor,
                    'inputspec.realigned_file')
    preproc.connect(meanfunc, 'outputspec.mean_image', compcor,
                    'inputspec.mean_file')
    preproc.connect(fssource, 'aseg', compcor, 'inputspec.fsaseg_file')
    preproc.connect(getmask, ('outputspec.reg_file', pickfirst), compcor,
                    'inputspec.reg_file')
    preproc.connect(ad, 'outlier_files', compcor, 'inputspec.outlier_files')
    preproc.connect(motion_correct, 'par_file', compcor,
                    'inputspec.realignment_parameters')
    preproc.connect(motion_correct, 'out_file', ad, 'realigned_files')
    preproc.connect(motion_correct, 'par_file', ad, 'realignment_parameters')
    preproc.connect(getmask, ('outputspec.mask_file', pickfirst), ad,
                    'mask_file')
    preproc.connect(getmask, ('outputspec.mask_file', pickfirst), medianval,
                    'mask_file')
    preproc.connect(inputnode_fwhm, 'fwhm', smooth, 'inputnode.fwhm')
    preproc.connect(motion_correct, 'out_file', smooth, 'inputnode.in_files')
    preproc.connect(getmask, ('outputspec.mask_file', pickfirst), smooth,
                    'inputnode.mask_file')
    preproc.connect(getmask, ('outputspec.reg_file', pickfirst), smooth,
                    'inputnode.reg_file')
    preproc.connect(inputnode, 'surface_fwhm', smooth,
                    'inputnode.surface_fwhm')
    preproc.connect(inputnode, 'fssubject_dir', smooth, 'inputnode.surf_dir')
    preproc.connect(smooth, 'outputnode.smoothed_files', choosesusan,
                    'smoothed_files')
    preproc.connect(motion_correct, 'out_file', choosesusan, 'motion_files')
    preproc.connect(inputnode_fwhm, 'fwhm', choosesusan, 'fwhm')
    preproc.connect(choosesusan, 'cor_smoothed_files', meanscale, 'in_file')
    preproc.connect(choosesusan, 'cor_smoothed_files', medianval, 'in_file')
    preproc.connect(medianval, ('out_stat', getmeanscale), meanscale,
                    'op_string')
    preproc.connect(inputnode, ('highpass', highpass_operand), highpass,
                    'op_string')
    preproc.connect(meanscale, 'out_file', highpass, 'in_file')
    preproc.connect(highpass, 'out_file', zscore, 'image')
    preproc.connect(ad, 'outlier_files', zscore, 'outliers')

    # create output node
    outputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'mean', 'motion_parameters', 'realigned_files', 'smoothed_files',
        'highpassed_files', 'combined_motion', 'outlier_files',
        'outlier_stat_files', 'mask', 'reg_cost', 'reg_file', 'reg_fsl_file',
        'noise_components', 'tsnr_file', 'stddev_file', 'tsnr_detrended',
        'filter_file', 'scaled_files', 'unmasked_fullspectrum', 'z_img',
        'motion_plots', 'FM_unwarped_epi', 'FM_unwarped_mean', 'vsm_file',
        'bandpassed_file', 'intensity_files', 'noise_mask', 'csf_mask'
    ]),
                         name='outputspec')

    # make output connection
    preproc.connect(meanfunc, 'outputspec.mean_image', outputnode, 'mean')
    preproc.connect(motion_correct, 'par_file', outputnode,
                    'motion_parameters')
    preproc.connect(motion_correct, 'out_file', outputnode, 'realigned_files')
    preproc.connect(highpass, 'out_file', outputnode, 'highpassed_files')
    preproc.connect(ad, 'norm_files', outputnode, 'combined_motion')
    preproc.connect(ad, 'outlier_files', outputnode, 'outlier_files')
    preproc.connect(ad, 'intensity_files', outputnode, 'intensity_files')
    preproc.connect(ad, 'statistic_files', outputnode, 'outlier_stat_files')
    preproc.connect(compcor, 'outputspec.noise_components', outputnode,
                    'noise_components')
    preproc.connect(compcor, 'outputspec.noise_mask', outputnode, 'noise_mask')
    preproc.connect(compcor, 'outputspec.csf_mask', outputnode, 'csf_mask')
    preproc.connect(getmask, 'outputspec.mask_file', outputnode, 'mask')
    preproc.connect(getmask, 'register.out_fsl_file', outputnode,
                    'reg_fsl_file')
    preproc.connect(getmask, 'outputspec.reg_file', outputnode, 'reg_file')
    preproc.connect(getmask, 'outputspec.reg_cost', outputnode, 'reg_cost')
    preproc.connect(choosesusan, 'cor_smoothed_files', outputnode,
                    'smoothed_files')
    preproc.connect(compcor, 'outputspec.tsnr_file', outputnode, 'tsnr_file')
    preproc.connect(compcor, 'outputspec.stddev_file', outputnode,
                    'stddev_file')
    preproc.connect(compcor, 'outputspec.tsnr_detrended', outputnode,
                    'tsnr_detrended')
    preproc.connect(zscore, 'z_img', outputnode, 'z_img')
    #preproc.connect(plot_motion,'out_file',
    #                outputnode,'motion_plots')

    return preproc
Esempio n. 25
0
fs_voltransform.inputs.subjects_dir = fs_dir
fs_voltransform.inputs.interp = 'nearest'
psb6351_wf.connect(extractref, 'roi_file', fs_voltransform, 'source_file')
psb6351_wf.connect(fs_register, 'out_reg_file', fs_voltransform, 'reg_file')
psb6351_wf.connect(fs_threshold, 'binary_file', fs_voltransform, 'target_file')

# Mask the functional runs with the extracted mask
maskfunc = pe.MapNode(fsl.ImageMaths(suffix='_bet', op_string='-mas'),
                      iterfield=['in_file'],
                      name='maskfunc')
psb6351_wf.connect(tshifter, 'out_file', maskfunc, 'in_file')
psb6351_wf.connect(fs_voltransform, 'transformed_file', maskfunc, 'in_file2')

# Smooth each run using SUSAn with the brightness threshold set to 75%
# of the median value for each run and a mask constituting the mean functional
smooth_median = pe.MapNode(fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield=['in_file'],
                           name='smooth_median')
psb6351_wf.connect(maskfunc, 'out_file', smooth_median, 'in_file')
psb6351_wf.connect(fs_voltransform, 'transformed_file', smooth_median,
                   'mask_file')

# Calculate the mean functional
smooth_meanfunc = pe.MapNode(fsl.ImageMaths(op_string='-Tmean',
                                            suffix='_mean'),
                             iterfield=['in_file'],
                             name='smooth_meanfunc')
psb6351_wf.connect(maskfunc, 'out_file', smooth_meanfunc, 'in_file')

smooth_merge = pe.Node(util.Merge(2, axis='hstack'), name='smooth_merge')
psb6351_wf.connect(smooth_meanfunc, 'out_file', smooth_merge, 'in1')
Esempio n. 26
0
NodeHash_30f69e0.inputs.anon = True
NodeHash_30f69e0.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
NodeHash_30f69e0.inputs.local_directory = '/tmp'

#Wraps command **slicetimer**
NodeHash_1d000c0 = pe.Node(interface=fsl.SliceTimer(), name='NodeName_1d000c0')

#Wraps command **mcflirt**
NodeHash_22f2e80 = pe.Node(interface=fsl.MCFLIRT(), name='NodeName_22f2e80')

#Computes the time-course SNR for a time series
NodeHash_50c02c0 = pe.Node(interface=confounds.TSNR(), name='NodeName_50c02c0')
NodeHash_50c02c0.inputs.regress_poly = 3

#Wraps command **fslstats**
NodeHash_3ac27f0 = pe.Node(interface=fsl.ImageStats(), name='NodeName_3ac27f0')
NodeHash_3ac27f0.inputs.op_string = '-p 98'

#Wraps command **fslmaths**
NodeHash_30f6760 = pe.Node(interface=fsl.Threshold(), name='NodeName_30f6760')
NodeHash_30f6760.inputs.args = '-bin'

#Anatomical compcor: for inputs and outputs, see CompCor.
NodeHash_325da10 = pe.Node(interface=confounds.ACompCor(),
                           name='NodeName_325da10')
NodeHash_325da10.inputs.num_components = 2

#Wraps command **fsl_regfilt**
NodeHash_430d1e0 = pe.Node(interface=fsl.FilterRegressor(),
                           name='NodeName_430d1e0')
NodeHash_430d1e0.inputs.filter_columns = [1, 2]
Esempio n. 27
0
def create_susan_smooth(name="susan_smooth", separate_masks=True):
    """Create a SUSAN smoothing workflow
    Parameters
    ----------
    ::
        name : name of workflow (default: susan_smooth)
        separate_masks : separate masks for each run
    Inputs::
        inputnode.in_files : functional runs (filename or list of filenames)
        inputnode.fwhm : fwhm for smoothing with SUSAN (float or list of floats)
        inputnode.mask_file : mask used for estimating SUSAN thresholds (but not for smoothing)
    Outputs::
        outputnode.smoothed_files : functional runs (filename or list of filenames)
    Example
    -------
    >>> smooth = create_susan_smooth()
    >>> smooth.inputs.inputnode.in_files = 'f3.nii'
    >>> smooth.inputs.inputnode.fwhm = 5
    >>> smooth.inputs.inputnode.mask_file = 'mask.nii'
    >>> smooth.run() # doctest: +SKIP
    """

    # replaces the functionality of a "for loop"
    def cartesian_product(fwhms, in_files, usans, btthresh):
        from nipype.utils.filemanip import ensure_list

        # ensure all inputs are lists
        in_files = ensure_list(in_files)
        fwhms = [fwhms] if isinstance(fwhms, (int, float)) else fwhms
        # create cartesian product lists (s_<name> = single element of list)
        cart_in_file = [s_in_file for s_in_file in in_files for s_fwhm in fwhms]
        cart_fwhm = [s_fwhm for s_in_file in in_files for s_fwhm in fwhms]
        cart_usans = [s_usans for s_usans in usans for s_fwhm in fwhms]
        cart_btthresh = [s_btthresh for s_btthresh in btthresh for s_fwhm in fwhms]

        return cart_in_file, cart_fwhm, cart_usans, cart_btthresh

    susan_smooth = pe.Workflow(name=name)
    """
    Set up a node to define all inputs required for the preprocessing workflow
    """

    inputnode = pe.Node(
        interface=util.IdentityInterface(fields=["in_files", "fwhm", "mask_file"]),
        name="inputnode",
    )
    """
    Smooth each run using SUSAN with the brightness threshold set to 75%
    of the median value for each run and a mask consituting the mean
    functional
    """

    multi_inputs = pe.Node(
        util.Function(
            function=cartesian_product,
            output_names=["cart_in_file", "cart_fwhm", "cart_usans", "cart_btthresh"],
        ),
        name="multi_inputs",
    )

    smooth = pe.MapNode(
        interface=fsl.SUSAN(),
        iterfield=["in_file", "brightness_threshold", "usans", "fwhm"],
        name="smooth",
    )
    """
    Determine the median value of the functional runs using the mask
    """

    if separate_masks:
        median = pe.MapNode(
            interface=fsl.ImageStats(op_string="-k %s -p 50"),
            iterfield=["in_file", "mask_file"],
            name="median",
        )
    else:
        median = pe.MapNode(
            interface=fsl.ImageStats(op_string="-k %s -p 50"),
            iterfield=["in_file"],
            name="median",
        )
    susan_smooth.connect(inputnode, "in_files", median, "in_file")
    susan_smooth.connect(inputnode, "mask_file", median, "mask_file")
    """
    Mask the motion corrected functional runs with the dilated mask
    """

    if separate_masks:
        mask = pe.MapNode(
            interface=fsl.ImageMaths(suffix="_mask", op_string="-mas"),
            iterfield=["in_file", "in_file2"],
            name="mask",
        )
    else:
        mask = pe.MapNode(
            interface=fsl.ImageMaths(suffix="_mask", op_string="-mas"),
            iterfield=["in_file"],
            name="mask",
        )
    susan_smooth.connect(inputnode, "in_files", mask, "in_file")
    susan_smooth.connect(inputnode, "mask_file", mask, "in_file2")
    """
    Determine the mean image from each functional run
    """

    meanfunc = pe.MapNode(
        interface=fsl.ImageMaths(op_string="-Tmean", suffix="_mean"),
        iterfield=["in_file"],
        name="meanfunc2",
    )
    susan_smooth.connect(mask, "out_file", meanfunc, "in_file")
    """
    Merge the median values with the mean functional images into a coupled list
    """

    merge = pe.Node(interface=util.Merge(2, axis="hstack"), name="merge")
    susan_smooth.connect(meanfunc, "out_file", merge, "in1")
    susan_smooth.connect(median, "out_stat", merge, "in2")
    """
    Define a function to get the brightness threshold for SUSAN
    """

    susan_smooth.connect(
        [
            (inputnode, multi_inputs, [("in_files", "in_files"), ("fwhm", "fwhms")]),
            (median, multi_inputs, [(("out_stat", getbtthresh), "btthresh")]),
            (merge, multi_inputs, [(("out", getusans), "usans")]),
            (
                multi_inputs,
                smooth,
                [
                    ("cart_in_file", "in_file"),
                    ("cart_fwhm", "fwhm"),
                    ("cart_btthresh", "brightness_threshold"),
                    ("cart_usans", "usans"),
                ],
            ),
        ]
    )

    outputnode = pe.Node(
        interface=util.IdentityInterface(fields=["smoothed_files"]), name="outputnode"
    )

    susan_smooth.connect(smooth, "smoothed_file", outputnode, "smoothed_files")

    return susan_smooth
Esempio n. 28
0
susan_smooth = [[] for _ in range(len(kernel_values))]
smooth_median = [[] for _ in range(len(kernel_values))]
smooth_meanfunc = [[] for _ in range(len(kernel_values))]
smooth_merge = [[] for _ in range(len(kernel_values))]
fsl_smooth = [[] for _ in range(len(kernel_values))]
for i, kernel in enumerate(kernel_values):
    #
    #
    ### SUSAN Smoothing
    #
    #
    # Smooth each run using SUSAN with the brightness threshold set to 75%
    # of the median value for each run and a mask constituting the mean
    # functional
    smooth_median[i] = pe.MapNode(
        fsl.ImageStats(op_string='-k %s -p 50'),
        iterfield=['in_file'],
        name='susan_smooth_median_{0}'.format(kernel))
    preproc_wf.connect(maskfunc, 'out_file', smooth_median[i], 'in_file')
    preproc_wf.connect(fs_threshold2, ('binary_file', pickfirst),
                       smooth_median[i], 'mask_file')

    smooth_meanfunc[i] = pe.MapNode(
        fsl.ImageMaths(op_string='-Tmean', suffix='_mean'),
        iterfield=['in_file'],
        name='susan_smooth_meanfunc_{0}'.format(kernel))
    preproc_wf.connect(maskfunc, 'out_file', smooth_meanfunc[i], 'in_file')

    smooth_merge[i] = pe.Node(util.Merge(2, axis='hstack'),
                              name='susan_smooth_merge_{0}'.format(kernel))
    preproc_wf.connect(smooth_meanfunc[i], 'out_file', smooth_merge[i], 'in1')
Esempio n. 29
0
def create_resting_preproc(name='restpreproc'):
    """Create a "resting" time series preprocessing workflow

    The noise removal is based on Behzadi et al. (2007)

    Parameters
    ----------

    name : name of workflow (default: restpreproc)

    Inputs::

        inputspec.func : functional run (filename or list of filenames)

    Outputs::

        outputspec.noise_mask_file : voxels used for PCA to derive noise components
        outputspec.filtered_file : bandpass filtered and noise-reduced time series

    Example
    -------

    >>> TR = 3.0
    >>> wf = create_resting_preproc()
    >>> wf.inputs.inputspec.func = 'f3.nii'
    >>> wf.inputs.inputspec.num_noise_components = 6
    >>> wf.inputs.inputspec.highpass_sigma = 100/(2*TR)
    >>> wf.inputs.inputspec.lowpass_sigma = 12.5/(2*TR)
    >>> wf.run() # doctest: +SKIP

    """

    restpreproc = pe.Workflow(name=name)

    # Define nodes
    inputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'func', 'num_noise_components', 'highpass_sigma', 'lowpass_sigma'
    ]),
                        name='inputspec')
    outputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'noise_mask_file',
        'filtered_file',
    ]),
                         name='outputspec')
    slicetimer = pe.Node(fsl.SliceTimer(), name='slicetimer')
    realigner = create_realign_flow()
    tsnr = pe.Node(TSNR(regress_poly=2), name='tsnr')
    getthresh = pe.Node(interface=fsl.ImageStats(op_string='-p 98'),
                        name='getthreshold')
    threshold_stddev = pe.Node(fsl.Threshold(), name='threshold')
    compcor = pe.Node(util.Function(
        input_names=['realigned_file', 'noise_mask_file', 'num_components'],
        output_names=['noise_components'],
        function=extract_noise_components),
                      name='compcorr')
    remove_noise = pe.Node(fsl.FilterRegressor(filter_all=True),
                           name='remove_noise')
    bandpass_filter = pe.Node(fsl.TemporalFilter(), name='bandpass_filter')

    # Define connections
    restpreproc.connect(inputnode, 'func', slicetimer, 'in_file')
    restpreproc.connect(slicetimer, 'slice_time_corrected_file', realigner,
                        'inputspec.func')
    restpreproc.connect(realigner, 'outputspec.realigned_file', tsnr,
                        'in_file')
    restpreproc.connect(tsnr, 'stddev_file', threshold_stddev, 'in_file')
    restpreproc.connect(tsnr, 'stddev_file', getthresh, 'in_file')
    restpreproc.connect(getthresh, 'out_stat', threshold_stddev, 'thresh')
    restpreproc.connect(realigner, 'outputspec.realigned_file', compcor,
                        'realigned_file')
    restpreproc.connect(threshold_stddev, 'out_file', compcor,
                        'noise_mask_file')
    restpreproc.connect(inputnode, 'num_noise_components', compcor,
                        'num_components')
    restpreproc.connect(tsnr, 'detrended_file', remove_noise, 'in_file')
    restpreproc.connect(compcor, 'noise_components', remove_noise,
                        'design_file')
    restpreproc.connect(inputnode, 'highpass_sigma', bandpass_filter,
                        'highpass_sigma')
    restpreproc.connect(inputnode, 'lowpass_sigma', bandpass_filter,
                        'lowpass_sigma')
    restpreproc.connect(remove_noise, 'out_file', bandpass_filter, 'in_file')
    restpreproc.connect(threshold_stddev, 'out_file', outputnode,
                        'noise_mask_file')
    restpreproc.connect(bandpass_filter, 'out_file', outputnode,
                        'filtered_file')
    return restpreproc
Esempio n. 30
0
def startRegression(input_File, FWHM, cutOff_sec, TR, sl):
    # generate folder regr images
    print("Regression \33[5m...\33[0m (wait!)", end="\r")
    origin_Path = os.path.dirname(os.path.dirname(input_File))
    regr_Path = os.path.join(origin_Path, 'regr')

    if os.path.exists(regr_Path):
        shutil.rmtree(regr_Path)
    os.mkdir(regr_Path)

    # generatre log-File
    sys.stdout = open(os.path.join(regr_Path, 'regress.log'), 'w')

    # delete the first slides
    input_File5Sub = delete5Slides(input_File, regr_Path)

    # perform slice time correction
    if sl == True:
        input_File5Sub = fsl_slicetimeCorrector(input_File5Sub, TR)

    # proof regression files
    txtregr_Path = os.path.join(origin_Path, 'txtRegrPython')

    # slive wise regression with physio data
    regr_FileReal = fsl_RegrSliceWise(input_File5Sub, txtregr_Path, regr_Path)

    # get mean
    meanRegr_File = getMean(regr_FileReal, 'mean2')
    file_nameEPI_BET, mask_file = applyBET(meanRegr_File,
                                           frac=0.35,
                                           radius=45,
                                           vertical_gradient=0.1)
    os.remove(meanRegr_File)
    regr_File = applyMask(regr_FileReal, mask_file, '')

    #  "robust intensity range" which calculates values similar to the 98% percentiles
    myStat = fsl.ImageStats(in_file=regr_File,
                            op_string='-p 98',
                            terminal_output='allatonce')
    print(myStat.cmdline)
    stat_result = myStat.run()
    upperp = stat_result.outputs.out_stat

    # get binary mask
    mask = getMask(regr_File, upperp)

    # "robust intensity range" which calculates values similar to the 50% percentiles with mask
    myStat = fsl.ImageStats(in_file=regr_File,
                            op_string=' -k ' + mask + ' -p 50 ',
                            mask_file=mask,
                            terminal_output='allatonce')
    print(myStat.cmdline)
    stat_result = myStat.run()
    meanintensity = stat_result.outputs.out_stat
    meanintensity = meanintensity * 0.75

    # maxmium filter of mask
    mask = dilF(mask)

    # apply mask on regrFile
    thresRegr_file = applyMask(regr_File, mask, 'thres')

    # get mean of masked regr-Dataset
    mean_func = getMean(thresRegr_file, 'mean_func')

    # FWHM = 3.0
    # sigma = FWHM/(2 * np.sqrt(2 * np.log(2))) = 1.27
    srgr_file = applySusan(thresRegr_file, meanintensity, FWHM, mean_func)

    # apply mask on srgr_file
    smmothSRegr_file = applyMask(srgr_file, mask, '_smooth')
    inscalefactor = 10000.0 / meanintensity

    # multiply image with inscalefactor
    intnormSrgr_file = mathOperation(smmothSRegr_file, inscalefactor)

    # mean of scaled Dataset
    tempMean = getMean(intnormSrgr_file, 'tempMean')

    # filter image cut-off frequency 0.01 Hz
    highpass = (cutOff_sec / (2.0 * TR))
    #highpass = 17.6056338028
    filtered_image = filterFSL(intnormSrgr_file, highpass, tempMean)

    sys.stdout = sys.__stdout__
    print('Regression  \033[0;30;42m COMPLETED \33[0m')
    return regr_FileReal, srgr_file, filtered_image