Example #1
0
def fsl_RegrSliceWise(input_file,txtregr_Path,regr_Path):
    # scale Nifti data by factor 10
    dataName = os.path.basename(input_file).split('.')[0]

    # proof  data existence
    regrTextFiles = findRegData(txtregr_Path)
    if len(regrTextFiles) == 0:
        print('No regression with physio data!')
        output_file = os.path.join(regr_Path,
                                   os.path.basename(input_file).split('.')[0]) + '_RGR.nii.gz'
        shutil.copyfile(input_file, output_file)
        return output_file


    fslPath = scaleBy10(input_file, inv=False)
    # split input_file in slices
    mySplit = fsl.Split(in_file=fslPath, dimension='z', out_base_name=dataName)
    print(mySplit.cmdline)
    mySplit.run()
    os.remove(fslPath)

    # sparate ref and src volume in slices
    sliceFiles = findSlicesData(os.getcwd(), dataName)




    if not len(regrTextFiles) == len(sliceFiles):
        sys.exit('Error: Not enough txt.Files in %s' % txtregr_Path)

    print('Start separate slice Regression ... ')

    # start to regression slice by slice
    print('For all Sices ...')
    for i in range(len(sliceFiles)):
        slc = sliceFiles[i]
        regr = regrTextFiles[i]
        # only take the columns [1,2,7,9,11,12,13] of the reg-.txt Files
        output_file = os.path.join(regr_Path, os.path.basename(slc))
        myRegr = fsl.FilterRegressor(in_file=slc,design_file=regr,out_file=output_file,filter_columns=[1,2,7,9,11,12,13])
        print(myRegr.cmdline)
        myRegr.run()
        os.remove(slc)


    # merge slices to a single volume
    mcf_sliceFiles = findSlicesData(regr_Path, dataName)
    output_file = os.path.join(regr_Path,
                               os.path.basename(input_file).split('.')[0]) + '_RGR.nii.gz'
    myMerge = fsl.Merge(in_files=mcf_sliceFiles, dimension='z', merged_file=output_file)
    print(myMerge.cmdline)
    myMerge.run()

    for slc in mcf_sliceFiles: os.remove(slc)

    # unscale result data by factor 10ˆ(-1)
    output_file = scaleBy10(output_file, inv=True)

    return output_file
Example #2
0
def mod_regressor(design_file, in_file, mask):
    import nipype.interfaces.fsl as fsl
    if "empty_file.txt" in design_file:
        return in_file
    else:
        reg = fsl.FilterRegressor(filter_all=True)
        reg.inputs.in_file = in_file
        reg.inputs.design_file = design_file
        reg.inputs.mask = mask
        res = reg.run()
        out_file = res.outputs.out_file
        return out_file
Example #3
0
def test_FilterRegressor(tmp_path):
    seed(a=0x5E6128C4)

    os.chdir(str(tmp_path))

    array = np.random.rand(10, 10, 10, 100) * 1000 + 10000

    img = nib.Nifti1Image(array, np.eye(4))
    assert isinstance(img.header, nib.Nifti1Header)
    img.header.set_data_dtype(np.float64)

    in_file = "img.nii.gz"
    nib.save(img, in_file)

    x = array.reshape((-1, array.shape[-1]))
    _, _, vh = np.linalg.svd(x, full_matrices=False)
    design = vh[:10, :].T  # first ten pca components
    design_file = "design.txt"
    np.savetxt(design_file, design)

    instance = FilterRegressor()
    instance.inputs.in_file = in_file
    instance.inputs.design_file = design_file
    instance.inputs.filter_columns = [1, 2, 3]
    result = instance.run()
    assert result.outputs is not None

    r0 = nib.load(result.outputs.out_file).get_fdata()

    instance = fsl.FilterRegressor()
    instance.inputs.in_file = in_file
    instance.inputs.design_file = design_file
    instance.inputs.filter_columns = [1, 2, 3]
    result = instance.run()
    assert result.outputs is not None

    r1 = nib.load(result.outputs.out_file).get_fdata()

    # delta = r0 - r1
    # print(r0[np.where(delta == delta.max())[:3]])
    # print(r1[np.where(delta == delta.max())[:3]])
    # print(np.mean(np.abs(r0 - r1)))

    assert np.allclose(r0, r1)
Example #4
0
def compcorr(name='compcorr'):
    from nipype.workflows.rsfmri.fsl.resting import extract_noise_components
    from nipype.algorithms.misc import TSNR

    wkfl = pe.Workflow(name=name)
    inputnode = pe.Node(utility.IdentityInterface(
        fields=['in_file', 'mask', 'num_components']),
                        name='inputspec')
    outputnode = pe.Node(utility.IdentityInterface(fields=['corrected_file']),
                         name='outputspec')

    tsnr = pe.Node(TSNR(), name='tsnr')
    getthresh = pe.Node(interface=fsl.ImageStats(op_string='-k %s -p 98'),
                        name='getthreshold')
    threshold_stddev = pe.Node(fsl.Threshold(), name='threshold')
    compcor = pe.Node(
        utility.Function(input_names=[
            'realigned_file', 'noise_mask_file', 'num_components'
        ],
                         output_names=['noise_components'],
                         function=extract_noise_components),
        name='compcorr',
    )
    remove_noise = pe.Node(
        fsl.FilterRegressor(filter_all=True),
        name='remove_noise',
    )

    wkfl.connect([
        (inputnode, tsnr, [('in_file', 'in_file')]),
        (inputnode, compcor, [('in_file', 'realigned_file'),
                              ('num_components', 'num_components')]),
        (tsnr, threshold_stddev, [('stddev_file', 'in_file')]),
        (tsnr, getthresh, [('stddev_file', 'in_file')]),
        (inputnode, getthresh, [('mask', 'mask_file')]),
        (inputnode, remove_noise, [('in_file', 'in_file')]),
        (getthresh, threshold_stddev, [('out_stat', 'thresh')]),
        (threshold_stddev, compcor, [('out_file', 'noise_mask_file')]),
        (compcor, remove_noise, [('noise_components', 'design_file')]),
        (inputnode, remove_noise, [('mask', 'mask')]),
        (remove_noise, outputnode, [('out_file', 'corrected_file')]),
    ])
    return wkfl
Example #5
0
def t_compcor(wf_name="t_compcor"):

    cc = pe.Workflow(name=wf_name)

    # Define nodes
    inputnode = pe.Node(interface=util.IdentityInterface(
        fields=['func', 'num_noise_components']),
                        name='inputspec')
    outputnode = pe.Node(interface=util.IdentityInterface(
        fields=['noise_mask_file', 'noise_components', 'residual_file']),
                         name='outputspec')

    tsnr = pe.MapNode(TSNR(regress_poly=2), name='tsnr', iterfield=['in_file'])
    getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 98'),
                           name='getthreshold',
                           iterfield=['in_file'])
    threshold_stddev = pe.MapNode(fsl.Threshold(),
                                  name='threshold',
                                  iterfield=['in_file', 'thresh'])
    compcor = pe.MapNode(util.Function(
        input_names=['realigned_file', 'noise_mask_file', 'num_components'],
        output_names=['noise_components'],
        function=extract_noise_components),
                         name='compcorr',
                         iterfield=['realigned_file', 'noise_mask_file'])
    remove_noise = pe.MapNode(fsl.FilterRegressor(filter_all=True),
                              name='remove_noise',
                              iterfield=['in_file', 'design_file'])

    cc.connect(inputnode, 'func', tsnr, 'in_file')
    cc.connect(tsnr, 'stddev_file', threshold_stddev, 'in_file')
    cc.connect(tsnr, 'stddev_file', getthresh, 'in_file')
    cc.connect(getthresh, 'out_stat', threshold_stddev, 'thresh')
    cc.connect(inputnode, 'func', compcor, 'realigned_file')
    cc.connect(threshold_stddev, 'out_file', compcor, 'noise_mask_file')
    cc.connect(inputnode, 'num_noise_components', compcor, 'num_components')
    cc.connect(tsnr, 'detrended_file', remove_noise, 'in_file')
    cc.connect(compcor, 'noise_components', remove_noise, 'design_file')
    cc.connect(compcor, 'noise_components', outputnode, 'noise_components')
    cc.connect(remove_noise, 'out_file', outputnode, 'residual_file')
    cc.connect(threshold_stddev, 'out_file', outputnode, 'noise_mask_file')

    return cc
Example #6
0
def regress_out_confounds(name='regout_confounds'):
    wkfl = pe.Workflow(name=name)
    inputnode = pe.Node(utility.IdentityInterface(
        fields=['in_file', 'mask', 'motion', 'csf_seg', 'wm_seg']),
                        name='inputspec')
    outputnode = pe.Node(utility.IdentityInterface(fields=['corrected_file']),
                         name='outputspec')

    def combine_regressors(motion, motion_source, regressors):
        import nipy.algorithms.utils.preprocess as preproc
        motion = preproc.motion_parameter_standardize(motion, motion_source)

    n_combine_regressors = pe.Node(
        utility.Function(input_names=['motion', 'motion_source', 'regressors'],
                         output_names=['regressors'],
                         function=combine_regressors))
    n_remove_confound = pe.Node(
        fsl.FilterRegressor(filter_all=True),
        name='remove_confound',
    )
Example #7
0
def create_resting_preproc(name='restpreproc'):
    """Create a "resting" time series preprocessing workflow

    The noise removal is based on Behzadi et al. (2007)

    Parameters
    ----------

    name : name of workflow (default: restpreproc)

    Inputs::

        inputspec.func : functional run (filename or list of filenames)

    Outputs::

        outputspec.noise_mask_file : voxels used for PCA to derive noise components
        outputspec.filtered_file : bandpass filtered and noise-reduced time series

    Example
    -------

    >>> TR = 3.0
    >>> wf = create_resting_preproc()
    >>> wf.inputs.inputspec.func = 'f3.nii'
    >>> wf.inputs.inputspec.num_noise_components = 6
    >>> wf.inputs.inputspec.highpass_sigma = 100/(2*TR)
    >>> wf.inputs.inputspec.lowpass_sigma = 12.5/(2*TR)
    >>> wf.run() # doctest: +SKIP

    """

    restpreproc = pe.Workflow(name=name)

    # Define nodes
    inputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'func', 'num_noise_components', 'highpass_sigma', 'lowpass_sigma'
    ]),
                        name='inputspec')
    outputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'noise_mask_file',
        'filtered_file',
    ]),
                         name='outputspec')
    slicetimer = pe.Node(fsl.SliceTimer(), name='slicetimer')
    realigner = create_realign_flow()
    tsnr = pe.Node(TSNR(regress_poly=2), name='tsnr')
    getthresh = pe.Node(interface=fsl.ImageStats(op_string='-p 98'),
                        name='getthreshold')
    threshold_stddev = pe.Node(fsl.Threshold(), name='threshold')
    compcor = pe.Node(util.Function(
        input_names=['realigned_file', 'noise_mask_file', 'num_components'],
        output_names=['noise_components'],
        function=extract_noise_components),
                      name='compcorr')
    remove_noise = pe.Node(fsl.FilterRegressor(filter_all=True),
                           name='remove_noise')
    bandpass_filter = pe.Node(fsl.TemporalFilter(), name='bandpass_filter')

    # Define connections
    restpreproc.connect(inputnode, 'func', slicetimer, 'in_file')
    restpreproc.connect(slicetimer, 'slice_time_corrected_file', realigner,
                        'inputspec.func')
    restpreproc.connect(realigner, 'outputspec.realigned_file', tsnr,
                        'in_file')
    restpreproc.connect(tsnr, 'stddev_file', threshold_stddev, 'in_file')
    restpreproc.connect(tsnr, 'stddev_file', getthresh, 'in_file')
    restpreproc.connect(getthresh, 'out_stat', threshold_stddev, 'thresh')
    restpreproc.connect(realigner, 'outputspec.realigned_file', compcor,
                        'realigned_file')
    restpreproc.connect(threshold_stddev, 'out_file', compcor,
                        'noise_mask_file')
    restpreproc.connect(inputnode, 'num_noise_components', compcor,
                        'num_components')
    restpreproc.connect(tsnr, 'detrended_file', remove_noise, 'in_file')
    restpreproc.connect(compcor, 'noise_components', remove_noise,
                        'design_file')
    restpreproc.connect(inputnode, 'highpass_sigma', bandpass_filter,
                        'highpass_sigma')
    restpreproc.connect(inputnode, 'lowpass_sigma', bandpass_filter,
                        'lowpass_sigma')
    restpreproc.connect(remove_noise, 'out_file', bandpass_filter, 'in_file')
    restpreproc.connect(threshold_stddev, 'out_file', outputnode,
                        'noise_mask_file')
    restpreproc.connect(bandpass_filter, 'out_file', outputnode,
                        'filtered_file')
    return restpreproc
Example #8
0
#Wraps command **fslstats**
NodeHash_3ac27f0 = pe.Node(interface=fsl.ImageStats(), name='NodeName_3ac27f0')
NodeHash_3ac27f0.inputs.op_string = '-p 98'

#Wraps command **fslmaths**
NodeHash_30f6760 = pe.Node(interface=fsl.Threshold(), name='NodeName_30f6760')
NodeHash_30f6760.inputs.args = '-bin'

#Anatomical compcor: for inputs and outputs, see CompCor.
NodeHash_325da10 = pe.Node(interface=confounds.ACompCor(),
                           name='NodeName_325da10')
NodeHash_325da10.inputs.num_components = 2

#Wraps command **fsl_regfilt**
NodeHash_430d1e0 = pe.Node(interface=fsl.FilterRegressor(),
                           name='NodeName_430d1e0')
NodeHash_430d1e0.inputs.filter_columns = [1, 2]

#Wraps command **fslmaths**
NodeHash_77e3220 = pe.Node(interface=fsl.TemporalFilter(),
                           name='NodeName_77e3220')
NodeHash_77e3220.inputs.highpass_sigma = 25

#Generic datasink module to store structured outputs
NodeHash_99576b0 = pe.Node(interface=io.DataSink(), name='NodeName_99576b0')

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(NodeHash_30f69e0, 'outfiles', NodeHash_1d000c0, 'in_file')
analysisflow.connect(NodeHash_1d000c0, 'slice_time_corrected_file',
Example #9
0
def create_confound_removal_workflow(workflow_name="confound_removal"):

    inputnode = pe.Node(util.IdentityInterface(
        fields=["subject_id", "timeseries", "reg_file", "motion_parameters"]),
                        name="inputs")

    # Get the Freesurfer aseg volume from the Subjects Directory
    getaseg = pe.Node(io.FreeSurferSource(subjects_dir=fs.Info.subjectsdir()),
                      name="getaseg")

    # Binarize the Aseg to use as a whole brain mask
    asegmask = pe.Node(fs.Binarize(min=0.5, dilate=2), name="asegmask")

    # Extract and erode a mask of the deep cerebral white matter
    extractwm = pe.Node(fs.Binarize(match=[2, 41], erode=3), name="extractwm")

    # Extract and erode a mask of the ventricles and CSF
    extractcsf = pe.Node(fs.Binarize(match=[4, 5, 14, 15, 24, 31, 43, 44, 63],
                                     erode=1),
                         name="extractcsf")

    # Mean the timeseries across the fourth dimension
    meanfunc = pe.MapNode(fsl.MeanImage(),
                          iterfield=["in_file"],
                          name="meanfunc")

    # Invert the anatomical coregistration and resample the masks
    regwm = pe.MapNode(fs.ApplyVolTransform(inverse=True, interp="nearest"),
                       iterfield=["source_file", "reg_file"],
                       name="regwm")

    regcsf = pe.MapNode(fs.ApplyVolTransform(inverse=True, interp="nearest"),
                        iterfield=["source_file", "reg_file"],
                        name="regcsf")

    regbrain = pe.MapNode(fs.ApplyVolTransform(inverse=True, interp="nearest"),
                          iterfield=["source_file", "reg_file"],
                          name="regbrain")

    # Convert to Nifti for FSL tools
    convertwm = pe.MapNode(fs.MRIConvert(out_type="niigz"),
                           iterfield=["in_file"],
                           name="convertwm")

    convertcsf = pe.MapNode(fs.MRIConvert(out_type="niigz"),
                            iterfield=["in_file"],
                            name="convertcsf")

    convertbrain = pe.MapNode(fs.MRIConvert(out_type="niigz"),
                              iterfield=["in_file"],
                              name="convertbrain")

    # Add the mask images together for a report image
    addconfmasks = pe.MapNode(fsl.ImageMaths(suffix="conf",
                                             op_string="-mul 2 -add",
                                             out_data_type="char"),
                              iterfield=["in_file", "in_file2"],
                              name="addconfmasks")

    # Overlay and slice the confound mask overlaied on mean func for reporting
    confoverlay = pe.MapNode(fsl.Overlay(auto_thresh_bg=True,
                                         stat_thresh=(.7, 2)),
                             iterfield=["background_image", "stat_image"],
                             name="confoverlay")

    confslice = pe.MapNode(fsl.Slicer(image_width=800, label_slices=False),
                           iterfield=["in_file"],
                           name="confslice")
    confslice.inputs.sample_axial = 2

    # Extract the mean signal from white matter and CSF masks
    wmtcourse = pe.MapNode(fs.SegStats(exclude_id=0, avgwf_txt_file=True),
                           iterfield=["segmentation_file", "in_file"],
                           name="wmtcourse")

    csftcourse = pe.MapNode(fs.SegStats(exclude_id=0, avgwf_txt_file=True),
                            iterfield=["segmentation_file", "in_file"],
                            name="csftcourse")

    # Extract the mean signal from over the whole brain
    globaltcourse = pe.MapNode(fs.SegStats(exclude_id=0, avgwf_txt_file=True),
                               iterfield=["segmentation_file", "in_file"],
                               name="globaltcourse")

    # Build the confound design matrix
    conf_inputs = [
        "motion_params", "global_waveform", "wm_waveform", "csf_waveform"
    ]
    confmatrix = pe.MapNode(util.Function(input_names=conf_inputs,
                                          output_names=["confound_matrix"],
                                          function=make_confound_matrix),
                            iterfield=conf_inputs,
                            name="confmatrix")

    # Regress the confounds out of the timeseries
    confregress = pe.MapNode(fsl.FilterRegressor(filter_all=True),
                             iterfield=["in_file", "design_file", "mask"],
                             name="confregress")

    # Rename the confound mask png
    renamepng = pe.MapNode(util.Rename(format_string="confound_sources.png"),
                           iterfield=["in_file"],
                           name="renamepng")

    # Define the outputs
    outputnode = pe.Node(
        util.IdentityInterface(fields=["timeseries", "confound_sources"]),
        name="outputs")

    # Define and connect the confound workflow
    confound = pe.Workflow(name=workflow_name)

    confound.connect([
        (inputnode, meanfunc, [("timeseries", "in_file")]),
        (inputnode, getaseg, [("subject_id", "subject_id")]),
        (getaseg, extractwm, [("aseg", "in_file")]),
        (getaseg, extractcsf, [("aseg", "in_file")]),
        (getaseg, asegmask, [("aseg", "in_file")]),
        (extractwm, regwm, [("binary_file", "target_file")]),
        (extractcsf, regcsf, [("binary_file", "target_file")]),
        (asegmask, regbrain, [("binary_file", "target_file")]),
        (meanfunc, regwm, [("out_file", "source_file")]),
        (meanfunc, regcsf, [("out_file", "source_file")]),
        (meanfunc, regbrain, [("out_file", "source_file")]),
        (inputnode, regwm, [("reg_file", "reg_file")]),
        (inputnode, regcsf, [("reg_file", "reg_file")]),
        (inputnode, regbrain, [("reg_file", "reg_file")]),
        (regwm, convertwm, [("transformed_file", "in_file")]),
        (regcsf, convertcsf, [("transformed_file", "in_file")]),
        (regbrain, convertbrain, [("transformed_file", "in_file")]),
        (convertwm, addconfmasks, [("out_file", "in_file")]),
        (convertcsf, addconfmasks, [("out_file", "in_file2")]),
        (addconfmasks, confoverlay, [("out_file", "stat_image")]),
        (meanfunc, confoverlay, [("out_file", "background_image")]),
        (confoverlay, confslice, [("out_file", "in_file")]),
        (confslice, renamepng, [("out_file", "in_file")]),
        (regwm, wmtcourse, [("transformed_file", "segmentation_file")]),
        (inputnode, wmtcourse, [("timeseries", "in_file")]),
        (regcsf, csftcourse, [("transformed_file", "segmentation_file")]),
        (inputnode, csftcourse, [("timeseries", "in_file")]),
        (regbrain, globaltcourse, [("transformed_file", "segmentation_file")]),
        (inputnode, globaltcourse, [("timeseries", "in_file")]),
        (inputnode, confmatrix, [("motion_parameters", "motion_params")]),
        (wmtcourse, confmatrix, [("avgwf_txt_file", "wm_waveform")]),
        (csftcourse, confmatrix, [("avgwf_txt_file", "csf_waveform")]),
        (globaltcourse, confmatrix, [("avgwf_txt_file", "global_waveform")]),
        (confmatrix, confregress, [("confound_matrix", "design_file")]),
        (inputnode, confregress, [("timeseries", "in_file")]),
        (convertbrain, confregress, [("out_file", "mask")]),
        (confregress, outputnode, [("out_file", "timeseries")]),
        (renamepng, outputnode, [("out_file", "confound_sources")]),
    ])

    return confound
Example #10
0
def create_resting_preproc(name='restpreproc'):
    """Create a "resting" time series preprocessing workflow

    The noise removal is based on Behzadi et al. (2007)

    Parameters
    ----------

    name : name of workflow (default: restpreproc)

    Inputs::

        inputspec.func : functional run (filename or list of filenames)

    Outputs::

        outputspec.noise_mask_file : voxels used for PCA to derive noise components
        outputspec.filtered_file : bandpass filtered and noise-reduced time series

    Example
    -------

    >>> TR = 3.0
    >>> wf = create_resting_preproc()
    >>> wf.inputs.inputspec.func = 'f3.nii'
    >>> wf.inputs.inputspec.num_noise_components = 6
    >>> wf.inputs.inputspec.highpass_sigma = 100/(2*TR)
    >>> wf.inputs.inputspec.lowpass_sigma = 12.5/(2*TR)
    >>> wf.run() # doctest: +SKIP

    """

    restpreproc = pe.Workflow(name=name)

    # Define nodes
    inputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'func', 'num_noise_components', 'highpass_sigma', 'lowpass_sigma'
    ]),
                        name='inputspec')
    outputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'noise_mask_file', 'filtered_file', 'motion_rms_files',
        'motion_par_file', 'realigned_file', 'mask_file', 'outlier_files',
        'intensity_files', 'outlier_plots'
    ]),
                         name='outputspec')
    slicetimer = pe.Node(fsl.SliceTimer(), name='slicetimer')
    realigner = create_realign_flow()

    art_detector = pe.Node(ArtifactDetect(), name='art_detector')
    art_detector.inputs.parameter_source = 'FSL'
    art_detector.inputs.mask_type = 'spm_global'
    art_detector.inputs.global_threshold = .5
    art_detector.inputs.norm_threshold = .6
    art_detector.inputs.use_differences = [True,
                                           True]  ## [Movement, Intensity]
    art_detector.inputs.zintensity_threshold = 3
    art_detector.inputs.intersect_mask = True
    '''Mask smoother node, added by Pablo Polosecki to use EPI mask'''
    mask_smoother = pe.Node(util.Function(input_names=['vol_in'],
                                          output_names=['out_vol'],
                                          function=morph_open_close),
                            name='mask_smoother')
    tsnr = pe.Node(TSNR(regress_poly=2), name='tsnr')
    getthresh = pe.Node(interface=fsl.ImageStats(op_string='-k %s -p 98'),
                        name='getthreshold')
    threshold_stddev = pe.Node(fsl.Threshold(), name='threshold')
    ''' Mask conjunction, to limit noisy voxels to those inside brain mask'''
    conj_masker = pe.Node(fsl.BinaryMaths(operation='mul'), name='conj_masker')

    compcor = pe.Node(util.Function(
        input_names=['realigned_file', 'noise_mask_file', 'num_components'],
        output_names=['noise_components'],
        function=extract_noise_components),
                      name='compcorr')
    #   cat_regressors = pe.Node(util.Function(input_names=['file1',
    #                                                       'file2'],
    #                                          output_names=['out_fn'],
    #                                          function=concatetante_reg_files),
    #                            name='cat_regressors')
    remove_noise = pe.Node(fsl.FilterRegressor(filter_all=True),
                           name='remove_noise')
    bandpass_filter = pe.Node(fsl.TemporalFilter(), name='bandpass_filter')

    # Define connections
    restpreproc.connect(inputnode, 'func', slicetimer, 'in_file')
    restpreproc.connect(slicetimer, 'slice_time_corrected_file', realigner,
                        'inputspec.func')
    restpreproc.connect(realigner, 'outputspec.realigned_file', tsnr,
                        'in_file')
    restpreproc.connect(tsnr, 'stddev_file', threshold_stddev, 'in_file')
    restpreproc.connect(tsnr, 'stddev_file', getthresh, 'in_file')
    restpreproc.connect(mask_smoother, 'out_vol', getthresh, 'mask_file')
    restpreproc.connect(getthresh, 'out_stat', threshold_stddev, 'thresh')
    restpreproc.connect(realigner, 'outputspec.realigned_file', compcor,
                        'realigned_file')
    restpreproc.connect(inputnode, 'num_noise_components', compcor,
                        'num_components')
    restpreproc.connect(tsnr, 'detrended_file', remove_noise, 'in_file')
    # Combiinng compcorr with motion regressors:
    #restpreproc.connect(compcor, 'noise_components',
    #                    cat_regressors, 'file1')
    #restpreproc.connect(realigner, 'outputspec.par_file',
    #                    cat_regressors, 'file2')
    #restpreproc.connect(cat_regressors, 'out_fn',
    #                    remove_noise, 'design_file')
    restpreproc.connect(compcor, 'noise_components', remove_noise,
                        'design_file')
    restpreproc.connect(inputnode, 'highpass_sigma', bandpass_filter,
                        'highpass_sigma')
    restpreproc.connect(inputnode, 'lowpass_sigma', bandpass_filter,
                        'lowpass_sigma')
    restpreproc.connect(remove_noise, 'out_file', bandpass_filter, 'in_file')
    restpreproc.connect(conj_masker, 'out_file', outputnode, 'noise_mask_file')
    restpreproc.connect(bandpass_filter, 'out_file', outputnode,
                        'filtered_file')
    restpreproc.connect(realigner, 'outputspec.rms_files', outputnode,
                        'motion_rms_files')
    restpreproc.connect(realigner, 'outputspec.par_file', outputnode,
                        'motion_par_file')
    restpreproc.connect(realigner, 'outputspec.realigned_file', outputnode,
                        'realigned_file')
    restpreproc.connect(realigner, 'outputspec.realigned_file', art_detector,
                        'realigned_files')
    restpreproc.connect(realigner, 'outputspec.par_file', art_detector,
                        'realignment_parameters')
    restpreproc.connect(art_detector, 'mask_files', mask_smoother, 'vol_in')
    restpreproc.connect(mask_smoother, 'out_vol', outputnode, 'mask_file')
    restpreproc.connect(art_detector, 'outlier_files', outputnode,
                        'outlier_files')
    restpreproc.connect(art_detector, 'intensity_files', outputnode,
                        'intensity_files')
    #restpreproc.connect(art_detector, 'plot_files',
    #                    outputnode, 'outlier_plots')
    restpreproc.connect(mask_smoother, 'out_vol', conj_masker, 'in_file')
    restpreproc.connect(threshold_stddev, 'out_file', conj_masker,
                        'operand_file')
    restpreproc.connect(conj_masker, 'out_file', compcor, 'noise_mask_file')
    return restpreproc
Example #11
0
my_fsl_ImageStats.inputs.op_string = '-p 98'

#Wraps command **fslmaths**
my_fsl_Threshold = pe.Node(interface=fsl.Threshold(),
                           name='my_fsl_Threshold',
                           iterfield=[''])
my_fsl_Threshold.inputs.args = '-bin'

#Anatomical compcor: for inputs and outputs, see CompCor.
my_confounds_ACompCor = pe.Node(interface=confounds.ACompCor(),
                                name='my_confounds_ACompCor',
                                iterfield=[''])
my_confounds_ACompCor.inputs.num_components = 2

#Wraps command **fsl_regfilt**
my_fsl_FilterRegressor = pe.Node(interface=fsl.FilterRegressor(),
                                 name='my_fsl_FilterRegressor',
                                 iterfield=[''])
my_fsl_FilterRegressor.inputs.filter_columns = [1, 2]

#Wraps command **fslmaths**
my_fsl_TemporalFilter = pe.Node(interface=fsl.TemporalFilter(),
                                name='my_fsl_TemporalFilter',
                                iterfield=[''])
my_fsl_TemporalFilter.inputs.highpass_sigma = 25

#Change the name of a file based on a mapped format string.
my_utility_Rename = pe.Node(interface=utility.Rename(),
                            name='my_utility_Rename',
                            iterfield=[''])
my_utility_Rename.inputs.format_string = "/output/filtered.nii.gz"
Example #12
0
def nuissremov_workflow(SinkTag="func_preproc", wf_name="nuisance_correction"):
    """
    The script uses the noise information to regress it out from the data.
   Workflow inputs:
        :param in_file: The reoriented an motion corrected functional data.
        :param desing_file: A matrix which contains all the nuissance regressor(motion+compcor noise+...).
        :param filter_all: To regress out all the columns of the desing matrix (default: True)
        :param SinkDir:
        :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow.

   Workflow outputs:


    :return: nuissremov_workflow


    Balint Kincses
    [email protected]
    2018

    """

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.io as io
    import PUMI.utils.QC as qc
    import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['in_file', 'design_file']),
        name='inputspec')

    # Perform the nuissance regression
    nuisregression = pe.MapNode(interface=fsl.FilterRegressor(filter_all=True),
                                iterfield=['design_file', 'in_file'],
                                name='nuisregression')

    myqc = qc.timecourse2png("timeseries", tag="020_nuiscorr")

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=['out_file']),
                         name='outputspec')

    # save data out with Datasink
    ds = pe.Node(interface=io.DataSink(), name='ds')
    ds.inputs.base_directory = SinkDir

    #TODO_ready: qc timeseries before and after

    # Generate workflow
    analysisflow = nipype.Workflow(wf_name)
    analysisflow.connect(inputspec, 'in_file', nuisregression, 'in_file')
    analysisflow.connect(inputspec, 'design_file', nuisregression,
                         'design_file')
    analysisflow.connect(nuisregression, 'out_file', outputspec, 'out_file')
    analysisflow.connect(nuisregression, 'out_file', ds,
                         'func_nuiss_corrected')
    analysisflow.connect(nuisregression, 'out_file', myqc, 'inputspec.func')

    return analysisflow
Example #13
0
ComputeLowTsnr.inputs.op_string = '-p 98'

#Wraps command **fslmaths**
Threshold = pe.MapNode(interface=fsl.Threshold(),
                       name='Threshold',
                       iterfield=['thresh', 'in_file'])
Threshold.inputs.args = '-bin'

#Anatomical compcor: for inputs and outputs, see CompCor.
NoiseComponents = pe.MapNode(interface=confounds.ACompCor(),
                             name='NoiseComponents',
                             iterfield=['realigned_file', 'mask_files'])
NoiseComponents.inputs.num_components = 2

#Wraps command **fsl_regfilt**
RegressionFilter = pe.MapNode(interface=fsl.FilterRegressor(),
                              name='RegressionFilter',
                              iterfield=['in_file', 'design_file'])
RegressionFilter.inputs.filter_columns = [1, 2]

#Wraps command **fslmaths**
BandpassFilter = pe.MapNode(interface=fsl.TemporalFilter(),
                            name='BandpassFilter',
                            iterfield=['in_file'])
BandpassFilter.inputs.highpass_sigma = 25

#Wraps the executable command ``bet``.
BrainExtraction = pe.Node(interface=fsl.BET(), name='BrainExtraction')

#Generic datasink module to store structured outputs
io_DataSink = pe.Node(interface=io.DataSink(), name='io_DataSink')
Example #14
0
def get_nuisance_regressors_wf(outdir, timepoints, subject_id, global_signal=False, order=0, derivatives=1, comp=3):
    """Creates nipype workflow for nuisance correction composed by:
        Intercept + Drift (cosine transform) + Motion Correction + WhiteMatter&CSF Nuisance Regressors (CompCor)
    Parameters
    ----------
    outdir:
    subject_id:
    global_signal:
    timepoints:
    order:
    derivatives:
    comp:
        
    Returns
    -------
    wf_reg:
    """
    from nipype.algorithms import confounds
    from nipype import Workflow, Node
    from nipype.interfaces import utility, fsl
    import os
    
    if global_signal: 
        gb='_GB'
    else:
        gb=''
        
    wf_reg=Workflow(name=subject_id+gb,base_dir=outdir);
    
    print ("Setting INPUT node...");
    node_input = Node(utility.IdentityInterface(fields=[
           "realign_movpar_txt",        
           'rfmri_unwarped_imgs',
           'mask_wm',
           'mask_csf',
           'global_mask_img',
           'bold_img'
           ]),
            name='input_node'
    ) 
    
    #Merging wm and csf masks
    node_merge_wm_csf = Node(utility.base.Merge(2),name='Merge_wm_csf')
    
    #AcompCor
    node_ACompCor=Node(confounds.ACompCor( 
            num_components=3,
            #save_pre_filter='high_pass_filter.txt',       
            pre_filter=False,
           # high_pass_cutoff=128,
            repetition_time=0.8,
            merge_method='none',
            #use_regress_poly=False,
            #realigned_file= fMRI_BOLD_unwarped,
           # mask_files='/institut/processed_data/BBHI_func/output2/sub-41064/GetMasksInT1Space/binarize_mask/MNI152_WM_09_warp_thresh.nii.gz',
             ),
    name="AcompCor_mask")
    #node_ACompCor.inputs.save_pre_filter=os.path.join(os.path.join(os.path.join(wf_reg.base_dir,wf_reg.name),node_ACompCor.name), 'high_pass_filter.txt')  

    #cosine_filter    
    node_cosine_filter_reg=Node(utility.Function(input_names=["timepoints", "timestep","period_cut","output_dir"],
                             output_names=["cosine_filter_txt"],
                             function=cosine_filter_txt), 
                                name="cosine_filter")    
    node_cosine_filter_reg.inputs.output_dir=os.path.join(os.path.join(os.path.join(wf_reg.base_dir,wf_reg.name)),node_cosine_filter_reg.name) 
    node_cosine_filter_reg.inputs.timepoints=timepoints
    node_cosine_filter_reg.inputs.timestep=0.8
    #node_cosine_filter_reg.overwrite=True
    
    #global_signal    
#    if global_signal :
#        node_global_signal=Node(utility.Function(input_names=["timeseries_file", "label_file", "filename"],
#                                 output_names=["global_signal_txt"],
#                                 function=extract_subrois), 
#                                    name="global_signal")    
#        node_global_signal.inputs.filename=os.path.join(os.path.join(os.path.join(os.path.join(wf_reg.base_dir,wf_reg.name)),node_global_signal.name),'global_signal.txt') 
#        #node_global_signal.overwrite=True

    #motion regressors
    motion_regressors_interface = utility.Function(input_names=["realign_movpar_txt", "output_dir","order","derivatives"],
                             output_names=["motion_reg_txt"],
                             function=motion_regressors)
    node_motion_regressors=Node(motion_regressors_interface, name="motion_regressors_txt")    
    node_motion_regressors.inputs.output_dir=os.path.join(os.path.join(os.path.join(wf_reg.base_dir,wf_reg.name)),node_motion_regressors.name) 
    #node_motion_regressors.overwrite=True
    
    
    #merges all regressors     
    node_merge_txts = Node(utility.base.Merge(4),name='Merge_txt_inputs')    
    
    node_merge_regressors = Node(utility.Function(input_names=["nuisance_txts", "output_dir"],
                             output_names=["nuisance_txt"],
                             function=merge_nuisance_regressors),
    name="merge_nuisance_txt")
    node_merge_regressors.inputs.output_dir=os.path.join(os.path.join(wf_reg.base_dir,wf_reg.name),node_merge_regressors.name) 
    
    node_filter_regressor = Node(fsl.FilterRegressor(
            #design_file (-d) nuissance_txt
            filter_all=True,
            #in_file (-i) bold after SPM coregistration to T1
            #out_file
            ),
    name="filter_regressors_bold")
    
    
    node_output = Node(utility.IdentityInterface(fields=[
        'nuisance_txt', 
        'bold_nuisance_filtered'
    ]),
    name='output_node') 
    
    wf_reg.connect([ (node_input, node_merge_wm_csf, [('mask_wm','in1'),
                                                      ('mask_csf', 'in2')]),
                     (node_input, node_ACompCor,[('rfmri_unwarped_imgs', 'realigned_file')]),
                     (node_merge_wm_csf, node_ACompCor, [('out', 'mask_files')]),
                     (node_input, node_motion_regressors,[('realign_movpar_txt', 'realign_movpar_txt')]),                     
                     
                     (node_motion_regressors,node_merge_txts, [('motion_reg_txt', 'in1')]),
                     (node_ACompCor,node_merge_txts, [('components_file', 'in2')]),
                     (node_cosine_filter_reg,node_merge_txts, [('cosine_filter_txt', 'in3')]),
                     (node_merge_txts, node_merge_regressors, [('out', 'nuisance_txts')]),
                     ])   
#    if global_signal:       
#         wf_reg.connect([
#                         (node_input, node_global_signal,[('rfmri_unwarped_imgs', 'timeseries_file'),
#                                                     ('global_mask_img', 'label_file')]),    
#                        (node_global_signal, node_merge_txts, [('global_signal_txt', 'in4')])                
#                         ])
    
    wf_reg.connect([    (node_merge_regressors, node_filter_regressor, [('nuisance_txt','design_file')]),
                        (node_input, node_filter_regressor, [('bold_img','in_file')]),
                        (node_filter_regressor, node_output, [('out_file','bold_nuisance_filtered')]),
                        (node_merge_regressors, node_output,[('nuisance_txt', 'nuisance_txt')])                
                         ])
    return wf_reg