Ejemplo n.º 1
0
    def calc_iqms(self):
        # tSNR
        tsnr = TSNR()
        tsnr.inputs.in_file = self.source_img
        tsnr.inputs.mean_file = os.path.join(self.outputdir, self.task,
                                             self.task + "_mean_tsnr.nii.gz")
        tsnr_res = tsnr.run()
        mean_tsnr_img = tsnr_res.outputs.mean_file
        stat = fsl.ImageStats(in_file=mean_tsnr_img, op_string=' -M')
        stat_run = stat.run()
        mean_tsnr = round(stat_run.outputs.out_stat, 2)
        # framewise-displacement
        if type(
                self.confounds
        ) == str:  # ensure self.confounds doesn't refer to empty string
            mean_fd = 'n/a'
        else:
            column_means = self.confounds.mean(axis=0, skipna=True)
            mean_fd = round(column_means['framewise_displacement'], 2)

        return mean_tsnr, mean_fd
Ejemplo n.º 2
0
def calc_tsnr(fname, in_file, epi_mask):

    tsnr = TSNR()
    tsnr.inputs.in_file = in_file
    tsnr.inputs.tsnr_file = "{}.nii.gz".format(fname)
    tsnr.inputs.mean_file = "{}_mean.nii.gz".format(fname)
    tsnr.inputs.stddev_file = "{}_stddev.nii.gz".format(fname)
    tsnr.run()

    # FROM MRIQC
    # Get EPI data (with mc done) and get it ready
    msknii = nb.load(epi_mask)
    mskdata = np.nan_to_num(msknii.get_data())
    mskdata = mskdata.astype(np.uint8)
    mskdata[mskdata < 0] = 0
    mskdata[mskdata > 0] = 1

    tsnr_data = nb.load("{}.nii.gz".format(fname)).get_data()
    tsnr_val = float(np.median(tsnr_data[mskdata > 0]))

    return tsnr_val
Ejemplo n.º 3
0
def init_bold_preproc_report_wf(mem_gb,
                                reportlets_dir,
                                name='bold_preproc_report_wf'):
    """
    This workflow generates and saves a reportlet showing the effect of resampling
    the BOLD signal using the standard deviation maps.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold.resampling import init_bold_preproc_report_wf
        wf = init_bold_preproc_report_wf(mem_gb=1, reportlets_dir='.')

    **Parameters**

        mem_gb : float
            Size of BOLD file in GB
        reportlets_dir : str
            Directory in which to save reportlets
        name : str, optional
            Workflow name (default: bold_preproc_report_wf)

    **Inputs**

        in_pre
            BOLD time-series, before resampling
        in_post
            BOLD time-series, after resampling
        name_source
            BOLD series NIfTI file
            Used to recover original information lost during processing

    """

    from nipype.algorithms.confounds import TSNR
    from niworkflows.interfaces import SimpleBeforeAfter

    workflow = Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['in_pre', 'in_post', 'name_source']),
        name='inputnode')

    pre_tsnr = pe.Node(TSNR(), name='pre_tsnr', mem_gb=mem_gb * 4.5)
    pos_tsnr = pe.Node(TSNR(), name='pos_tsnr', mem_gb=mem_gb * 4.5)

    bold_rpt = pe.Node(SimpleBeforeAfter(), name='bold_rpt', mem_gb=0.1)
    ds_report_bold = pe.Node(DerivativesDataSink(base_directory=reportlets_dir,
                                                 desc='preproc',
                                                 keep_dtype=True),
                             name='ds_report_bold',
                             mem_gb=DEFAULT_MEMORY_MIN_GB,
                             run_without_submitting=True)

    workflow.connect([
        (inputnode, ds_report_bold, [('name_source', 'source_file')]),
        (inputnode, pre_tsnr, [('in_pre', 'in_file')]),
        (inputnode, pos_tsnr, [('in_post', 'in_file')]),
        (pre_tsnr, bold_rpt, [('stddev_file', 'before')]),
        (pos_tsnr, bold_rpt, [('stddev_file', 'after')]),
        (bold_rpt, ds_report_bold, [('out_report', 'in_file')]),
    ])

    return workflow
Ejemplo n.º 4
0
def rest_noise_filter_wf(wf_name='rest_noise_removal'):
    """ Create a resting-state fMRI noise removal node.

    Nipype Inputs
    -------------
    rest_noise_input.in_file

    rest_noise_input.brain_mask

    rest_noise_input.wm_mask

    rest_noise_input.csf_mask

    rest_noise_input.motion_params
        Nipy motion parameters.

    Nipype Outputs
    --------------
    rest_noise_output.tsnr_file
        A SNR estimation volume file for QA purposes.

    rest_noise_output.motion_corrected
        The fMRI motion corrected image.

    rest_noise_output.nuis_corrected
        The resulting nuisance corrected image.
        This will be the same as 'motion_corrected' if compcor
        is disabled.

    rest_noise_output.motion_regressors
        Motion regressors file.

    rest_noise_output.compcor_regressors
        CompCor regressors file.

    rest_noise_output.art_displacement_files
        One image file containing the voxel-displacement timeseries.

    rest_noise_output.art_intensity_files
        One file containing the global intensity values determined
        from the brainmask.

    rest_noise_output.art_norm_files
        One file containing the composite norm.

    rest_noise_output.art_outlier_files
         One file containing a list of 0-based indices corresponding
         to outlier volumes.

    rest_noise_output.art_plot_files
        One image file containing the detected outliers.

    rest_noise_output.art_statistic_files
        One file containing information about the different types of
        artifacts and if design info is provided then details of
        stimulus correlated motion and a listing or artifacts by
        event type.

    Returns
    -------
    rm_nuisance_wf: nipype Workflow
    """

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    in_fields = [
        "in_file",
        "brain_mask",
        "wm_mask",
        "csf_mask",
        "motion_params",
    ]

    out_fields = [
        "tsnr_file",
        "motion_corrected",
        "nuis_corrected",
        "motion_regressors",
        "compcor_regressors",
        "gsr_regressors",
        "art_displacement_files",
        "art_intensity_files",
        "art_norm_files",
        "art_outlier_files",
        "art_plot_files",
        "art_statistic_files",
    ]

    # input identities
    rest_noise_input = setup_node(IdentityInterface(fields=in_fields,
                                                    mandatory_inputs=True),
                                  name="rest_noise_input")

    # get the settings for filters
    filters = _get_params_for('rest_filter')

    # Compute TSNR on realigned data regressing polynomial up to order 2
    tsnr = setup_node(TSNR(regress_poly=2), name='tsnr')

    # Use :class:`nipype.algorithms.rapidart` to determine which of the
    # images in the functional series are outliers based on deviations in
    # intensity or movement.
    art = setup_node(rapidart_fmri_artifact_detection(),
                     name="detect_artifacts")

    # Compute motion regressors
    motion_regs = setup_node(Function(
        input_names=[
            'motion_params',
            'order',
            'derivatives',
        ],
        output_names=['out_files'],
        function=motion_regressors,
    ),
                             name='motion_regressors')

    # Create a filter to remove motion and art confounds
    motart_pars = setup_node(Function(
        input_names=['motion_params', 'comp_norm', 'outliers', 'detrend_poly'],
        output_names=['out_files'],
        function=create_regressors),
                             name='motart_parameters')

    motion_filter = setup_node(fsl.GLM(out_f_name='F_mcart.nii.gz',
                                       out_pf_name='pF_mcart.nii.gz',
                                       demean=True),
                               name='motion_filter')

    # Noise confound regressors
    compcor_pars = setup_node(Function(
        input_names=[
            'realigned_file', 'mask_file', 'num_components', 'extra_regressors'
        ],
        output_names=['out_files'],
        function=extract_noise_components,
    ),
                              name='compcor_pars')
    #compcor_pars = setup_node(ACompCor(), name='compcor_pars')
    #compcor_pars.inputs.components_file = 'noise_components.txt'

    compcor_filter = setup_node(fsl.GLM(out_f_name='F.nii.gz',
                                        out_pf_name='pF.nii.gz',
                                        demean=True),
                                name='compcor_filter')

    # Global signal regression
    gsr_pars = setup_node(Function(
        input_names=[
            'realigned_file', 'mask_file', 'num_components', 'extra_regressors'
        ],
        output_names=['out_files'],
        function=extract_noise_components,
    ),
                          name='gsr_pars')

    gsr_filter = setup_node(fsl.GLM(out_f_name='F_gsr.nii.gz',
                                    out_pf_name='pF_gsr.nii.gz',
                                    demean=True),
                            name='gsr_filter')

    # output identities
    rest_noise_output = setup_node(IdentityInterface(fields=out_fields,
                                                     mandatory_inputs=True),
                                   name="rest_noise_output")

    # Connect the nodes
    wf.connect([
        # tsnr
        (rest_noise_input, tsnr, [("in_file", "in_file")]),

        # artifact detection
        (rest_noise_input, art, [
            ("in_file", "realigned_files"),
            ("motion_params", "realignment_parameters"),
            ("brain_mask", "mask_file"),
        ]),

        # calculte motion regressors
        (rest_noise_input, motion_regs, [("motion_params", "motion_params")]),

        # create motion and confound regressors parameters file
        (art, motart_pars, [
            ("norm_files", "comp_norm"),
            ("outlier_files", "outliers"),
        ]),
        (motion_regs, motart_pars, [("out_files", "motion_params")]),

        # motion filtering
        (rest_noise_input, motion_filter, [
            ("in_file", "in_file"),
            (("in_file", rename, "_filtermotart"), "out_res_name"),
        ]),
        (motart_pars, motion_filter, [(("out_files", selectindex, [0]),
                                       "design")]),

        # output
        (tsnr, rest_noise_output, [("tsnr_file", "tsnr_file")]),
        (motart_pars, rest_noise_output, [("out_files", "motion_regressors")]),
        (motion_filter, rest_noise_output, [("out_res", "motion_corrected")]),
        (art, rest_noise_output, [
            ("displacement_files", "art_displacement_files"),
            ("intensity_files", "art_intensity_files"),
            ("norm_files", "art_norm_files"),
            ("outlier_files", "art_outlier_files"),
            ("plot_files", "art_plot_files"),
            ("statistic_files", "art_statistic_files"),
        ]),
    ])

    last_filter = motion_filter

    # compcor filter
    if filters['compcor_csf'] or filters['compcor_wm']:
        wf.connect([
            # calculate compcor regressor and parameters file
            (motart_pars, compcor_pars, [
                (("out_files", selectindex, [0]), "extra_regressors"),
            ]),
            (motion_filter, compcor_pars, [
                ("out_res", "realigned_file"),
            ]),

            # the compcor filter
            (motion_filter, compcor_filter, [
                ("out_res", "in_file"),
                (("out_res", rename, "_cleaned"), "out_res_name"),
            ]),
            (compcor_pars, compcor_filter, [(("out_files", selectindex, [0]),
                                             "design")]),
            #(compcor_pars,     compcor_filter,    [("components_file",  "design")]),
            (rest_noise_input, compcor_filter, [("brain_mask", "mask")]),

            # output
            (compcor_pars, rest_noise_output, [("out_files",
                                                "compcor_regressors")]),
            #(compcor_pars,     rest_noise_output, [("components_file",   "compcor_regressors")]),
        ])
        last_filter = compcor_filter

    # global signal regression
    if filters['gsr']:
        wf.connect([
            # calculate gsr regressors parameters file
            (last_filter, gsr_pars, [("out_res", "realigned_file")]),
            (rest_noise_input, gsr_pars, [("brain_mask", "mask_file")]),

            # the output file name
            (rest_noise_input, gsr_filter, [("brain_mask", "mask")]),
            (last_filter, gsr_filter, [
                ("out_res", "in_file"),
                (("out_res", rename, "_gsr"), "out_res_name"),
            ]),
            (gsr_pars, gsr_filter, [(("out_files", selectindex, [0]), "design")
                                    ]),

            # output
            (gsr_pars, rest_noise_output, [("out_files", "gsr_regressors")]),
        ])
        last_filter = gsr_filter

    # connect the final nuisance correction output node
    wf.connect([
        (last_filter, rest_noise_output, [("out_res", "nuis_corrected")]),
    ])

    if filters['compcor_csf'] and filters['compcor_wm']:
        mask_merge = setup_node(Merge(2), name="mask_merge")
        wf.connect([
            ## the mask for the compcor filter
            (rest_noise_input, mask_merge, [("wm_mask", "in1")]),
            (rest_noise_input, mask_merge, [("csf_mask", "in2")]),
            (mask_merge, compcor_pars, [("out", "mask_file")]),
        ])

    elif filters['compcor_csf']:
        wf.connect([
            ## the mask for the compcor filter
            (rest_noise_input, compcor_pars, [("csf_mask", "mask_file")]),
        ])

    elif filters['compcor_wm']:
        wf.connect([
            ## the mask for the compcor filter
            (rest_noise_input, compcor_pars, [("wm_mask", "mask_file")]),
        ])

    return wf
from os.path import basename
from nipype.algorithms.confounds import TSNR
from nilearn.image import math_img

for res_path in ['res_02_tsnr', 'res_02_mean']:
    if not os.path.exists(res_path):
        os.makedirs(res_path)

task = 'MGT'

for sidx in [1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22]:
    for ridx in range(1, 5):
        for i, in_file in enumerate([
                'fmriflows/preproc_func/sub-{0:03d}/sub-{0:03d}_task-{2}_run-{1:02d}_tFilter_5.0.100.0_sFilter_LP_0.0mm.nii.gz',
                'fmriflows/preproc_func/sub-{0:03d}/sub-{0:03d}_task-{2}_run-{1:02d}_tFilter_None.100.0_sFilter_LP_0.0mm.nii.gz',
                'fmriprep/sub-{0:03d}/func/sub-{0:03d}_task-{2}_run-{1:02d}_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz',
                'fsl_feat/sub-{0:03d}/sub-{0:03d}_task-{2}_run-{1:02d}_bold_norm.nii.gz',
                'spm/sub-{0:03d}/wsub-{0:03d}_task-{2}_run-{1:02d}_bold.nii.gz',
        ]):

            in_file = in_file.format(sidx, ridx, task)
            file_name = basename(in_file).replace('.nii.gz', '')
            out_tsnr = 'res_02_tsnr/tsnr_%s.nii.gz' % file_name
            out_mean = 'res_02_mean/mean_%s.nii.gz' % file_name

            tsnr = TSNR(regress_poly=2,
                        in_file=in_file,
                        tsnr_file=out_tsnr,
                        mean_file=out_mean)
            res = tsnr.run()
Ejemplo n.º 6
0
def init_bold_preproc_report_wf(mem_gb,
                                reportlets_dir,
                                name="bold_preproc_report_wf"):
    """
    Generate a visual report.

    This workflow generates and saves a reportlet showing the effect of resampling
    the BOLD signal using the standard deviation maps.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from fprodents.workflows.bold.resampling import init_bold_preproc_report_wf
            wf = init_bold_preproc_report_wf(mem_gb=1, reportlets_dir='.')

    Parameters
    ----------
    mem_gb : :obj:`float`
        Size of BOLD file in GB
    reportlets_dir : :obj:`str`
        Directory in which to save reportlets
    name : :obj:`str`, optional
        Workflow name (default: bold_preproc_report_wf)

    Inputs
    ------
    in_pre
        BOLD time-series, before resampling
    in_post
        BOLD time-series, after resampling
    name_source
        BOLD series NIfTI file
        Used to recover original information lost during processing

    """
    from nipype.algorithms.confounds import TSNR
    from niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from niworkflows.interfaces.reportlets.registration import (
        SimpleBeforeAfterRPT as SimpleBeforeAfter)
    from ...interfaces import DerivativesDataSink

    workflow = Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(fields=["in_pre", "in_post", "name_source"]),
        name="inputnode",
    )

    pre_tsnr = pe.Node(TSNR(), name="pre_tsnr", mem_gb=mem_gb * 4.5)
    pos_tsnr = pe.Node(TSNR(), name="pos_tsnr", mem_gb=mem_gb * 4.5)

    bold_rpt = pe.Node(SimpleBeforeAfter(), name="bold_rpt", mem_gb=0.1)
    ds_report_bold = pe.Node(
        DerivativesDataSink(
            base_directory=reportlets_dir,
            desc="preproc",
            datatype="figures",
            dismiss_entities=("echo", ),
        ),
        name="ds_report_bold",
        mem_gb=DEFAULT_MEMORY_MIN_GB,
        run_without_submitting=True,
    )

    # fmt:off
    workflow.connect([
        (inputnode, ds_report_bold, [('name_source', 'source_file')]),
        (inputnode, pre_tsnr, [('in_pre', 'in_file')]),
        (inputnode, pos_tsnr, [('in_post', 'in_file')]),
        (pre_tsnr, bold_rpt, [('stddev_file', 'before')]),
        (pos_tsnr, bold_rpt, [('stddev_file', 'after')]),
        (bold_rpt, ds_report_bold, [('out_report', 'in_file')]),
    ])
    # fmt:on

    return workflow
Ejemplo n.º 7
0
def fmri_qc_workflow(name='funcMRIQC'):
    """
    Initialize the (f)MRIQC workflow.

    .. workflow::

        import os.path as op
        from mriqc.workflows.functional import fmri_qc_workflow
        from mriqc.testing import mock_config
        with mock_config():
            wf = fmri_qc_workflow()

    """
    from nipype.interfaces.afni import TStat
    from nipype.algorithms.confounds import TSNR, NonSteadyStateDetector
    from niworkflows.interfaces.utils import SanitizeImage

    workflow = pe.Workflow(name=name)

    mem_gb = config.workflow.biggest_file_gb

    dataset = config.workflow.inputs.get("bold", [])
    config.loggers.workflow.info(f"""\
Building functional MRIQC workflow for files: {', '.join(dataset)}.""")

    # Define workflow, inputs and outputs
    # 0. Get data, put it in RAS orientation
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']),
                        name='inputnode')
    inputnode.iterables = [('in_file', dataset)]

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['qc', 'mosaic', 'out_group', 'out_dvars', 'out_fd']),
                         name='outputnode')

    non_steady_state_detector = pe.Node(NonSteadyStateDetector(),
                                        name="non_steady_state_detector")

    sanitize = pe.Node(SanitizeImage(), name="sanitize", mem_gb=mem_gb * 4.0)
    sanitize.inputs.max_32bit = config.execution.float32

    # Workflow --------------------------------------------------------

    # 1. HMC: head motion correct
    hmcwf = hmc()

    # Set HMC settings
    hmcwf.inputs.inputnode.fd_radius = config.workflow.fd_radius

    # 2. Compute mean fmri
    mean = pe.Node(TStat(options='-mean', outputtype='NIFTI_GZ'),
                   name='mean',
                   mem_gb=mem_gb * 1.5)
    skullstrip_epi = fmri_bmsk_workflow()

    # EPI to MNI registration
    ema = epi_mni_align()

    # Compute TSNR using nipype implementation
    tsnr = pe.Node(TSNR(), name='compute_tsnr', mem_gb=mem_gb * 2.5)

    # 7. Compute IQMs
    iqmswf = compute_iqms()
    # Reports
    repwf = individual_reports()

    workflow.connect([
        (inputnode, iqmswf, [('in_file', 'inputnode.in_file')]),
        (inputnode, sanitize, [('in_file', 'in_file')]),
        (inputnode, non_steady_state_detector, [('in_file', 'in_file')]),
        (non_steady_state_detector, sanitize, [('n_volumes_to_discard',
                                                'n_volumes_to_discard')]),
        (sanitize, hmcwf, [('out_file', 'inputnode.in_file')]),
        (mean, skullstrip_epi, [('out_file', 'inputnode.in_file')]),
        (hmcwf, mean, [('outputnode.out_file', 'in_file')]),
        (hmcwf, tsnr, [('outputnode.out_file', 'in_file')]),
        (mean, ema, [('out_file', 'inputnode.epi_mean')]),
        (skullstrip_epi, ema, [('outputnode.out_file', 'inputnode.epi_mask')]),
        (sanitize, iqmswf, [('out_file', 'inputnode.in_ras')]),
        (mean, iqmswf, [('out_file', 'inputnode.epi_mean')]),
        (hmcwf, iqmswf, [('outputnode.out_file', 'inputnode.hmc_epi'),
                         ('outputnode.out_fd', 'inputnode.hmc_fd')]),
        (skullstrip_epi, iqmswf, [('outputnode.out_file',
                                   'inputnode.brainmask')]),
        (tsnr, iqmswf, [('tsnr_file', 'inputnode.in_tsnr')]),
        (sanitize, repwf, [('out_file', 'inputnode.in_ras')]),
        (mean, repwf, [('out_file', 'inputnode.epi_mean')]),
        (tsnr, repwf, [('stddev_file', 'inputnode.in_stddev')]),
        (skullstrip_epi, repwf, [('outputnode.out_file', 'inputnode.brainmask')
                                 ]),
        (hmcwf, repwf, [('outputnode.out_fd', 'inputnode.hmc_fd'),
                        ('outputnode.out_file', 'inputnode.hmc_epi')]),
        (ema, repwf, [('outputnode.epi_parc', 'inputnode.epi_parc'),
                      ('outputnode.report', 'inputnode.mni_report')]),
        (non_steady_state_detector, iqmswf, [('n_volumes_to_discard',
                                              'inputnode.exclude_index')]),
        (iqmswf, repwf, [('outputnode.out_file', 'inputnode.in_iqms'),
                         ('outputnode.out_dvars', 'inputnode.in_dvars'),
                         ('outputnode.outliers', 'inputnode.outliers')]),
        (hmcwf, outputnode, [('outputnode.out_fd', 'out_fd')]),
    ])

    if config.workflow.fft_spikes_detector:
        workflow.connect([
            (iqmswf, repwf, [('outputnode.out_spikes', 'inputnode.in_spikes'),
                             ('outputnode.out_fft', 'inputnode.in_fft')]),
        ])

    if config.workflow.ica:
        from niworkflows.interfaces import segmentation as nws
        melodic = pe.Node(nws.MELODICRPT(no_bet=True,
                                         no_mask=True,
                                         no_mm=True,
                                         compress_report=False,
                                         generate_report=True),
                          name="ICA",
                          mem_gb=max(mem_gb * 5, 8))
        workflow.connect([(sanitize, melodic, [('out_file', 'in_files')]),
                          (skullstrip_epi, melodic, [('outputnode.out_file',
                                                      'report_mask')]),
                          (melodic, repwf, [('out_report',
                                             'inputnode.ica_report')])])

    # Upload metrics
    if not config.execution.no_sub:
        from ..interfaces.webapi import UploadIQMs
        upldwf = pe.Node(UploadIQMs(), name='UploadMetrics')
        upldwf.inputs.url = config.execution.webapi_url
        upldwf.inputs.strict = config.execution.upload_strict
        if config.execution.webapi_port:
            upldwf.inputs.port = config.execution.webapi_port

        workflow.connect([
            (iqmswf, upldwf, [('outputnode.out_file', 'in_iqms')]),
        ])

    return workflow
Ejemplo n.º 8
0
wf = pe.Workflow(base_dir=workflow_dirs,
                 name='tsnr_{}_{}'.format(subject, session))

inputnode = pe.Node(niu.IdentityInterface(fields=['preproc', 'mask']),
                    name='inputnode')

inputnode.inputs.preproc = preproc
inputnode.inputs.mask = mask

adder = pe.MapNode(fsl.ImageMaths(op_string='-add 100'),
                   iterfield=['in_file'],
                   name='adder')
wf.connect(inputnode, 'preproc', adder, 'in_file')

tsnr = pe.MapNode(TSNR(), iterfield=['in_file'], name='tsnr')

wf.connect(adder, 'out_file', tsnr, 'in_file')

masker = pe.MapNode(fsl.ApplyMask(), iterfield=['in_file'], name='masker')

wf.connect(tsnr, 'tsnr_file', masker, 'in_file')
wf.connect(inputnode, 'mask', masker, 'mask_file')


def invert(in_file):
    from nilearn import image
    from nipype.utils.filemanip import split_filename
    import os.path as op
    _, fn, ext = split_filename(in_file)
Ejemplo n.º 9
0
def fmri_qc_workflow(name="funcMRIQC"):
    """
    Initialize the (f)MRIQC workflow.

    .. workflow::

        import os.path as op
        from mriqc.workflows.functional import fmri_qc_workflow
        from mriqc.testing import mock_config
        with mock_config():
            wf = fmri_qc_workflow()

    """
    from nipype.algorithms.confounds import TSNR, NonSteadyStateDetector
    from nipype.interfaces.afni import TStat
    from niworkflows.interfaces.header import SanitizeImage

    workflow = pe.Workflow(name=name)

    mem_gb = config.workflow.biggest_file_gb

    dataset = config.workflow.inputs.get("bold", [])
    config.loggers.workflow.info(f"""\
Building functional MRIQC workflow for files: {", ".join(dataset)}.""")

    # Define workflow, inputs and outputs
    # 0. Get data, put it in RAS orientation
    inputnode = pe.Node(niu.IdentityInterface(fields=["in_file"]),
                        name="inputnode")
    inputnode.iterables = [("in_file", dataset)]

    outputnode = pe.Node(
        niu.IdentityInterface(
            fields=["qc", "mosaic", "out_group", "out_dvars", "out_fd"]),
        name="outputnode",
    )

    non_steady_state_detector = pe.Node(NonSteadyStateDetector(),
                                        name="non_steady_state_detector")

    sanitize = pe.Node(SanitizeImage(), name="sanitize", mem_gb=mem_gb * 4.0)
    sanitize.inputs.max_32bit = config.execution.float32

    # Workflow --------------------------------------------------------

    # 1. HMC: head motion correct
    hmcwf = hmc()

    # Set HMC settings
    hmcwf.inputs.inputnode.fd_radius = config.workflow.fd_radius

    # 2. Compute mean fmri
    mean = pe.Node(
        TStat(options="-mean", outputtype="NIFTI_GZ"),
        name="mean",
        mem_gb=mem_gb * 1.5,
    )
    skullstrip_epi = fmri_bmsk_workflow()

    # EPI to MNI registration
    ema = epi_mni_align()

    # Compute TSNR using nipype implementation
    tsnr = pe.Node(TSNR(), name="compute_tsnr", mem_gb=mem_gb * 2.5)

    # 7. Compute IQMs
    iqmswf = compute_iqms()
    # Reports
    repwf = individual_reports()

    # fmt: off

    workflow.connect([
        (inputnode, iqmswf, [("in_file", "inputnode.in_file")]),
        (inputnode, sanitize, [("in_file", "in_file")]),
        (inputnode, non_steady_state_detector, [("in_file", "in_file")]),
        (non_steady_state_detector, sanitize, [("n_volumes_to_discard",
                                                "n_volumes_to_discard")]),
        (sanitize, hmcwf, [("out_file", "inputnode.in_file")]),
        (mean, skullstrip_epi, [("out_file", "inputnode.in_file")]),
        (hmcwf, mean, [("outputnode.out_file", "in_file")]),
        (hmcwf, tsnr, [("outputnode.out_file", "in_file")]),
        (mean, ema, [("out_file", "inputnode.epi_mean")]),
        (skullstrip_epi, ema, [("outputnode.out_file", "inputnode.epi_mask")]),
        (sanitize, iqmswf, [("out_file", "inputnode.in_ras")]),
        (mean, iqmswf, [("out_file", "inputnode.epi_mean")]),
        (hmcwf, iqmswf, [("outputnode.out_file", "inputnode.hmc_epi"),
                         ("outputnode.out_fd", "inputnode.hmc_fd")]),
        (skullstrip_epi, iqmswf, [("outputnode.out_file",
                                   "inputnode.brainmask")]),
        (tsnr, iqmswf, [("tsnr_file", "inputnode.in_tsnr")]),
        (sanitize, repwf, [("out_file", "inputnode.in_ras")]),
        (mean, repwf, [("out_file", "inputnode.epi_mean")]),
        (tsnr, repwf, [("stddev_file", "inputnode.in_stddev")]),
        (skullstrip_epi, repwf, [("outputnode.out_file", "inputnode.brainmask")
                                 ]),
        (hmcwf, repwf, [("outputnode.out_fd", "inputnode.hmc_fd"),
                        ("outputnode.out_file", "inputnode.hmc_epi")]),
        (ema, repwf, [("outputnode.epi_parc", "inputnode.epi_parc"),
                      ("outputnode.report", "inputnode.mni_report")]),
        (non_steady_state_detector, iqmswf, [("n_volumes_to_discard",
                                              "inputnode.exclude_index")]),
        (iqmswf, repwf, [("outputnode.out_file", "inputnode.in_iqms"),
                         ("outputnode.out_dvars", "inputnode.in_dvars"),
                         ("outputnode.outliers", "inputnode.outliers")]),
        (hmcwf, outputnode, [("outputnode.out_fd", "out_fd")]),
    ])
    # fmt: on

    if config.workflow.fft_spikes_detector:
        # fmt: off
        workflow.connect([
            (iqmswf, repwf, [("outputnode.out_spikes", "inputnode.in_spikes"),
                             ("outputnode.out_fft", "inputnode.in_fft")]),
        ])
        # fmt: on

    if config.workflow.ica:
        from niworkflows.interfaces.reportlets.segmentation import MELODICRPT

        melodic = pe.Node(
            MELODICRPT(
                no_bet=True,
                no_mask=True,
                no_mm=True,
                compress_report=False,
                generate_report=True,
            ),
            name="ICA",
            mem_gb=max(mem_gb * 5, 8),
        )
        # fmt: off
        workflow.connect([(sanitize, melodic, [("out_file", "in_files")]),
                          (skullstrip_epi, melodic, [("outputnode.out_file",
                                                      "report_mask")]),
                          (melodic, repwf, [("out_report",
                                             "inputnode.ica_report")])])
        # fmt: on

    # Upload metrics
    if not config.execution.no_sub:
        from mriqc.interfaces.webapi import UploadIQMs

        upldwf = pe.Node(UploadIQMs(), name="UploadMetrics")
        upldwf.inputs.url = config.execution.webapi_url
        upldwf.inputs.strict = config.execution.upload_strict
        if config.execution.webapi_port:
            upldwf.inputs.port = config.execution.webapi_port

        # fmt: off
        workflow.connect([
            (iqmswf, upldwf, [("outputnode.out_file", "in_iqms")]),
        ])
        # fmt: on

    return workflow
Ejemplo n.º 10
0
def main(derivatives, ds):

    func_template = {
        'func':
        op.join(
            derivatives, ds, 'fmriprep', 'sub-{subject}', 'func',
            'sub-{subject}_task-randomdotmotion_run-*_space-T1w_desc-preproc_bold.nii.gz'
        ),
        'boldref':
        op.join(
            derivatives, ds, 'fmriprep', 'sub-{subject}', 'func',
            'sub-{subject}_task-randomdotmotion_run-*_space-T1w_boldref.nii.gz'
        ),
    }

    if ds == 'ds-01':
        mask_fn = 'sub-{subject}_space-FLASH_desc-{mask}_space-T1w.nii.gz'
        subjects = ['{:02d}'.format(si) for si in range(1, 20)]
    elif ds == 'ds-02':
        mask_fn = 'sub-{subject}_desc-{mask}_mask.nii.gz'
        subjects = ['{:02d}'.format(si) for si in range(1, 16)]
        subjects.pop(3)
        subjects.pop(0)

    mask_template = {
        'mask':
        op.join(derivatives, ds, 'conjunct_masks', 'sub-{subject}', 'anat',
                mask_fn)
    }

    wf = pe.Workflow(name='get_tsnr_{}'.format(ds),
                     base_dir='/tmp/workflow_folders')

    identity = pe.Node(niu.IdentityInterface(fields=['subject']),
                       name='identity')
    identity.iterables = [('subject', subjects)]

    func_selector = pe.Node(nio.SelectFiles(func_template),
                            name='func_selector')
    func_selector.inputs.subject = '01'

    mask_identity = pe.Node(niu.IdentityInterface(fields=['mask']),
                            name='mask_identity')
    mask_identity.iterables = [('mask', ['stnl', 'stnr'])]

    mask_selector = pe.Node(nio.SelectFiles(mask_template),
                            name='mask_selector')
    wf.connect(identity, 'subject', mask_selector, 'subject')
    wf.connect(mask_identity, 'mask', mask_selector, 'mask')

    tsnr = pe.MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')

    wf.connect(identity, 'subject', func_selector, 'subject')
    wf.connect(func_selector, 'func', tsnr, 'in_file')

    def resample_to_img(source_file, target_file):
        import os.path as op
        from nilearn import image
        from nipype.utils.filemanip import split_filename

        _, fn, ext = split_filename(source_file)

        new_fn = op.abspath('{}_resampled{}'.format(fn, ext))
        im = image.resample_to_img(source_file,
                                   target_file,
                                   interpolation='nearest')

        im.to_filename(new_fn)

        return new_fn

    resampler = pe.Node(niu.Function(
        function=resample_to_img,
        input_names=['source_file', 'target_file'],
        output_names=['resampled_file']),
                        name='resampler')

    wf.connect(mask_selector, 'mask', resampler, 'source_file')
    wf.connect(func_selector, 'boldref', resampler, 'target_file')

    extractor = pe.MapNode(fsl.ImageMeants(),
                           iterfield=['in_file'],
                           name='extractor')
    wf.connect(resampler, 'resampled_file', extractor, 'mask')
    wf.connect(tsnr, 'tsnr_file', extractor, 'in_file')

    ds_tsnrmaps = pe.MapNode(bids.DerivativesDataSink(base_directory=op.join(
        derivatives, ds),
                                                      suffix='tsnr',
                                                      out_path_base='tsnr'),
                             iterfield=['source_file', 'in_file'],
                             name='datasink_tsnrmaps')
    wf.connect(func_selector, 'func', ds_tsnrmaps, 'source_file')
    wf.connect(tsnr, 'tsnr_file', ds_tsnrmaps, 'in_file')

    ds_values_stn = pe.MapNode(bids.DerivativesDataSink(base_directory=op.join(
        derivatives, ds),
                                                        suffix='tsnr',
                                                        out_path_base='tsnr'),
                               iterfield=['source_file', 'in_file'],
                               name='ds_values_stnr')
    wf.connect(func_selector, 'func', ds_values_stn, 'source_file')
    wf.connect(extractor, 'out_file', ds_values_stn, 'in_file')
    wf.connect(mask_identity, 'mask', ds_values_stn, 'desc')

    wf.run(plugin='MultiProc', plugin_args={'n_procs': 6})
Ejemplo n.º 11
0
def make_subject_qc(population, workspace):
    print '========================================================================================'
    print ''
    print '                    Tourettome - 006. QUALITY CONTROL                                   '
    print ''
    print '========================================================================================'

    count = 0
    for subject in population:

        count +=1

        print '%s.Running Quality Control for subject %s' %(count, subject)

        site_id = subject[0:2]
        subdir  = os.path.join(workspace, subject)
        qcdir   = mkdir_path(os.path.join(workspace, subject, 'QUALITY_CONTROL'))
        os.chdir(qcdir)

        df = pd.DataFrame(index=['%s' % subject])

        # EXTRACT ANATOMICAL AND FUNCTIONAL IMAGE QUALITY METRICS

        if not os.path.isfile(os.path.join(qcdir, 'quality_paramters.csv')):

            ############################################################################################
            #  Anatomical measures

            # Load data
            anat       = nb.load(os.path.join(subdir, 'RAW',        'ANATOMICAL.nii.gz' )).get_data()
            anat_mask  = nb.load(os.path.join(subdir, 'ANATOMICAL', 'ANATOMICAL_BRAIN_MASK.nii.gz' )).get_data()
            anat_gm    = nb.load(os.path.join(subdir, 'ANATOMICAL', 'seg_spm/c1ANATOMICAL.nii' )).get_data()
            anat_wm    = nb.load(os.path.join(subdir, 'ANATOMICAL', 'seg_spm/c2ANATOMICAL.nii' )).get_data()
            anat_csf   = nb.load(os.path.join(subdir, 'ANATOMICAL', 'seg_spm/c3ANATOMICAL.nii' )).get_data()

            # Intermediate measures
            anat_fg_mu, anat_fg_sd, anat_fg_size    = summary_mask(anat, anat_mask)
            anat_gm_mu, anat_gm_sd, anat_gm_size    = summary_mask(anat, np.where(anat_gm > 0.5, 1, 0 ))
            anat_wm_mu, anat_wm_sd, anat_wm_size    = summary_mask(anat, np.where(anat_wm > 0.5, 1, 0 ))
            anat_csf_mu, anat_gm_sd, anat_csf_size  = summary_mask(anat, np.where(anat_csf > 0.5, 1, 0 ))
            anat_bg_data, anat_bg_mask              = get_background(anat, anat_mask)
            anat_bg_mu, anat_bg_sd, anat_bg_size    = summary_mask(anat, anat_bg_mask)

            # Calculate spatial anatomical summary measures
            df.loc[subject, 'qc_anat_cjv']  = mriqca.cjv(anat_wm_mu, anat_gm_mu, anat_wm_sd, anat_gm_sd)
            df.loc[subject, 'qc_anat_cnr']  = mriqca.cnr(anat_wm_mu, anat_gm_mu, anat_bg_sd)
            df.loc[subject, 'qc_anat_snr']  = mriqca.snr(anat_fg_mu, anat_fg_sd, anat_fg_size)
            df.loc[subject, 'qc_anat_snrd'] = mriqca.snr_dietrich(anat_fg_mu, anat_bg_sd)
            df.loc[subject, 'qc_anat_efc']  = mriqca.efc(anat)
            df.loc[subject, 'qc_anat_fber'] = mriqca.fber(anat, anat_mask)
            # df.loc[subject]['qc_anat_fwhm'] = fwhm(os.path.join(subdir, 'RAW','ANATOMICAL.nii.gz' ),
            #                                        os.path.join(subdir, 'ANATOMICAL', 'ANATOMICAL_BRAIN_MASK.nii.gz'),out_vox=False)

            ############################################################################################
            # Functional measures

            # Load data
            func      =  np.mean(nb.load(os.path.join(subdir, 'FUNCTIONAL', 'REST_EDIT.nii.gz' )).get_data(), axis =3)
            func_mask =  nb.load(os.path.join(subdir, 'FUNCTIONAL', 'REST_BRAIN_MASK.nii.gz' )).get_data()
            movpar    =  os.path.join(subdir, 'FUNCTIONAL', 'moco/REST_EDIT_moco2.par')

            # Calculate spatial functional summary measures
            func_fg_mu, func_fg_sd, func_fg_size = summary_mask(func, func_mask)
            df.loc[subject, 'qc_func_snr']  = mriqca.snr(func_fg_mu, func_fg_sd, func_fg_size)
            df.loc[subject, 'qc_func_efc']  = mriqca.efc(func)
            df.loc[subject, 'qc_func_fber'] = mriqca.fber(func, func_mask)
            # df.loc[subject]['qc_func_fwhm'] = fwhm(func, func_mask, out_vox=False)

            # Calculate temporal functional summary measures
            FD1D          = np.loadtxt(calculate_FD_Power(movpar))
            frames_in     = [frame for frame, val in enumerate(FD1D) if val < 0.2]
            quat          = int(len(FD1D) / 4)
            fd_in_percent = (float(len(frames_in)) / float(len(FD1D))) * 100.

            df.loc[subject, 'qc_func_fd']     = str(np.round(np.mean(FD1D), 3))
            df.loc[subject, 'qc_func_fd_in']  = str(np.round(fd_in_percent, 2))
            df.loc[subject, 'qc_func_fd']     = str(np.round(np.mean(FD1D), 3))
            df.loc[subject, 'qc_func_fd_max'] = str(np.round(np.max(FD1D), 3))
            df.loc[subject, 'qc_func_fd_q4 '] = str(np.round(np.mean(np.sort(FD1D)[::-1][:quat]), 3))
            df.loc[subject, 'qc_func_fd_rms'] = str(np.round(np.sqrt(np.mean(FD1D)), 3))

            # Calculate DVARS
            func_proc = os.path.join(subdir, 'REGISTRATION', 'REST_EDIT_UNI_BRAIN_MNI2mm.nii.gz')
            func_gm = os.path.join(subdir, 'REGISTRATION', 'ANATOMICAL_GM_MNI2mm.nii.gz')
            df.loc[subject, 'qc_func_dvars']    = np.mean(np.load(calculate_DVARS(func_proc, func_gm)))

            # Calculate TSNR map
            if not os.path.isfile(os.path.join(qcdir, 'tsnr.nii.gz')):
                 tsnr = TSNR()
                 tsnr.inputs.in_file = os.path.join(subdir, 'FUNCTIONAL', 'REST_EDIT.nii.gz')
                 tsnr.run()
                 # os.system('flirt -in tsnr -ref ../ANATOMICAL/ANATOMICAL -applxfm -init ../REGISTRATION/reg_anat/rest2anat_2.mat -out tsnr2anat')

            if not os.path.isfile('TSNR_data.npy'):
                 tsnr_data = nb.load('./tsnr.nii.gz').get_data()
                 nan_mask = np.logical_not(np.isnan(tsnr_data))
                 mask = func_mask > 0
                 data = tsnr_data[np.logical_and(nan_mask, mask)]
                 np.save(os.path.join(os.getcwd(), 'TSNR_data.npy'), data)


            df.loc[subject, 'qc_func_tsnr'] = str(np.round(np.median(np.load('TSNR_data.npy')), 3))

            df.to_csv('quality_paramters.csv')

        ############################################################################################
        #  Make Image Quality Plots

        if not os.path.isfile(os.path.join(qcdir, 'plot_func_tsnr.png')):

            # 1. anat brain mask
            plot_quality(os.path.join(subdir, 'RAW', 'ANATOMICAL.nii.gz'),
                         os.path.join(subdir, 'ANATOMICAL', 'ANATOMICAL_BRAIN_MASK.nii.gz'),
                         subject[0:2], '%s-anat_brain_mask' % subject, 'r', alpha=0.9, title='plot_anat_brain_mask.png')

            # 2. anat gm seg
            plot_quality(os.path.join(subdir, 'RAW', 'ANATOMICAL.nii.gz'),
                         os.path.join(subdir, 'ANATOMICAL', 'ANATOMICAL_GM.nii.gz'),
                         subject[0:2], '%s-anat_gm_seg' % subject, 'r', alpha=0.9, title='plot_anat_gm_seg.png')

            # 3. anat2mni
            plot_quality(mni_head_1mm, os.path.join(subdir, 'REGISTRATION', 'ANATOMICAL_GM_MNI1mm.nii.gz'),
                         'MNI', '%s-anat_gm_seg' % subject, 'r', alpha=0.9, title='plot_anat2mni.png',
                         tissue2=os.path.join(subdir, 'REGISTRATION', 'ANATOMICAL_CSF_MNI1mm.nii.gz'))

            # 4. func2mni
            plot_quality(os.path.join(subdir, 'REGISTRATION', 'REST_EDIT_MOCO_BRAIN_MEAN_BBR_ANAT1mm.nii.gz'),
                         os.path.join(subdir, 'ANATOMICAL', 'ANATOMICAL_GM.nii.gz'),
                         subject[0:2], '%s-func2mni' % subject, 'r', alpha=0.9, title='plot_func2anat.png')

            # 5. func_tsnr
            plot_quality(os.path.join(subdir, 'QUALITY_CONTROL', 'tsnr.nii.gz'), None,
                         'TSNR', '%s-func_tsnr' % subject, 'r', alpha=0.9, title='plot_func_tsnr.png')

        # 6. plot FD, DVARS, CARPET

        resid = nb.load(os.path.join(subdir, 'DENOISE/residuals_compcor/residual_bp_z.nii.gz')).get_data().astype(np.float32)
        gm = resid[nb.load(os.path.join(subdir, 'DENOISE/tissue_signals/gm_mask.nii.gz')).get_data().astype('bool')]
        wm = resid[nb.load(os.path.join(subdir, 'DENOISE/tissue_signals/wm_mask.nii.gz')).get_data().astype('bool')]
        cm = resid[nb.load(os.path.join(subdir, 'DENOISE/tissue_signals/csf_mask.nii.gz')).get_data().astype('bool')]
        fd = np.loadtxt(os.path.join(subdir, 'QUALITY_CONTROL/FD.1D'))
        dv = np.load(os.path.join(subdir, 'QUALITY_CONTROL/DVARS.npy'))

        if not os.path.isfile(os.path.join(qcdir,'xplot_func_motion.png')):
            plot_temporal(gm, wm, cm, fd, dv, os.path.join(qcdir,'plot_func_motion.png'))
Ejemplo n.º 12
0
def init_asl_preproc_report_wf(mem_gb,
                               reportlets_dir,
                               name='asl_preproc_report_wf'):
    """
    Generate a visual report.

    This workflow generates and saves a reportlet showing the effect of resampling
    the ASL signal using the standard deviation maps.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from fmriprep.workflows.asl.resampling import init_asl_preproc_report_wf
            wf = init_asl_preproc_report_wf(mem_gb=1, reportlets_dir='.')

    Parameters
    ----------
    mem_gb : :obj:`float`
        Size of ASL file in GB
    reportlets_dir : :obj:`str`
        Directory in which to save reportlets
    name : :obj:`str`, optional
        Workflow name (default: asl_preproc_report_wf)

    Inputs
    ------
    in_pre
        ASL time-series, before resampling
    in_post
        ASL time-series, after resampling
    name_source
        ASL series NIfTI file
        Used to recover original information lost during processing

    """
    from nipype.algorithms.confounds import TSNR
    from ...niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from ...niworkflows.interfaces import SimpleBeforeAfter
    from ...interfaces import DerivativesDataSink

    workflow = Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['in_pre', 'in_post', 'name_source']),
        name='inputnode')

    pre_tsnr = pe.Node(TSNR(), name='pre_tsnr', mem_gb=mem_gb * 4.5)
    pos_tsnr = pe.Node(TSNR(), name='pos_tsnr', mem_gb=mem_gb * 4.5)

    asl_rpt = pe.Node(SimpleBeforeAfter(), name='asl_rpt', mem_gb=0.1)
    ds_report_asl = pe.Node(DerivativesDataSink(base_directory=reportlets_dir,
                                                desc='preproc',
                                                datatype="figures",
                                                dismiss_entities=("echo", )),
                            name='ds_report_asl',
                            mem_gb=DEFAULT_MEMORY_MIN_GB,
                            run_without_submitting=True)

    workflow.connect([
        (inputnode, ds_report_asl, [('name_source', 'source_file')]),
        (inputnode, pre_tsnr, [('in_pre', 'in_file')]),
        (inputnode, pos_tsnr, [('in_post', 'in_file')]),
        (pre_tsnr, asl_rpt, [('stddev_file', 'before')]),
        (pos_tsnr, asl_rpt, [('stddev_file', 'after')]),
        (asl_rpt, ds_report_asl, [('out_report', 'in_file')]),
    ])

    return workflow
Ejemplo n.º 13
0
filelist=sf.run().outputs
filelist.func






# Magic! Build the default dict with lambda to achieve a callable nested dict easily.
img_dict=lambda:defaultdict(img_dict)
results = img_dict()

for f in filelist.func:
    print(f)
    tsnr=TSNR()
    tsnr.inputs.in_file=f
    scanDir=op.dirname(f)
    scan=op.basename(f).split('.')[0]
#    src = str(Path(scanDir,scan + '.nii'))
#    dest = str(Path(scanDir, "cp_" + scan + '.nii'))
#    copy2(src, dest) # To make sure you don't have to run the normalization again.

    #print(scanDir,name)
    tsnr.inputs.tsnr_file=str(Path(scanDir,"tsnr_"+ scan + '_tsnr.nii'))
    tsnr.inputs.stddev_file=str(Path(scanDir,"tsnr_" + scan + '_std.nii'))
    tsnr.inputs.mean_file=str(Path(scanDir,"tsnr_"+scan+'_mean.nii'))
    tsnr.inputs.detrended_file=str(Path(scanDir,"tsnr_" + scan + '_detrended.nii'))
    res = tsnr.run().outputs

    for key in ['stddev_file','mean_file','tsnr_file']:
                 (realign, art, [('out_file', 'realigned_files')]),
                 (realign, smooth, [('out_file', 'in_file')]),
                 ])

### MVPA preprocessing pipeline

# Transform node - apply volume transformation
applyVolReg = Node(ApplyVolTransform(fs_target = True, no_resample=True),
                   name='applyVolReg')

# Despike node - despike data
despike = Node(Despike(outputtype='NIFTI'),
               name='despike')

# TSNR node - remove polynomials 2nd order
tsnr = Node(TSNR(regress_poly=2),
            name='tsnr')

# Demean node - demean data
demean = Node(BinaryMaths(operation='sub'),
              name='demean')

# Standard deviation node - calculate standard deviation
standev = Node(ImageStats(op_string='-S'), name='standev')

# Z-standardize node - z-standardize data
zstandardize = Node(BinaryMaths(operation = 'div'), name='zstandardize')

# Create the MVPA preprocessing workflow
preproc_ALPACA_MVPA = Workflow(name='preproc_ALPACA_MVPA')
preproc_ALPACA_MVPA.base_dir = opj(experiment_dir, working_dir)