Ejemplo n.º 1
0
def test_dvars(tmpdir):
    ground_truth = np.loadtxt(example_data('ds003_sub-01_mc.DVARS'))
    dvars = ComputeDVARS(in_file=example_data('ds003_sub-01_mc.nii.gz'),
                         in_mask=example_data('ds003_sub-01_mc_brainmask.nii.gz'),
                         save_all=True,
                         intensity_normalization=0)
    os.chdir(str(tmpdir))
    res = dvars.run()

    dv1 = np.loadtxt(res.outputs.out_all, skiprows=1)
    assert (np.abs(dv1[:, 0] - ground_truth[:, 0]).sum()/ len(dv1)) < 0.05

    assert (np.abs(dv1[:, 1] - ground_truth[:, 1]).sum() / len(dv1)) < 0.05

    assert (np.abs(dv1[:, 2] - ground_truth[:, 2]).sum() / len(dv1)) < 0.05

    dvars = ComputeDVARS(in_file=example_data('ds003_sub-01_mc.nii.gz'),
                         in_mask=example_data(
                             'ds003_sub-01_mc_brainmask.nii.gz'),
                         save_all=True)
    res = dvars.run()

    dv1 = np.loadtxt(res.outputs.out_all, skiprows=1)
    assert (np.abs(dv1[:, 0] - ground_truth[:, 0]).sum() / len(dv1)) < 0.05

    assert (np.abs(dv1[:, 1] - ground_truth[:, 1]).sum() / len(dv1)) > 0.05

    assert (np.abs(dv1[:, 2] - ground_truth[:, 2]).sum() / len(dv1)) < 0.05
Ejemplo n.º 2
0
def test_dvars(tmpdir):
    ground_truth = np.loadtxt(example_data('ds003_sub-01_mc.DVARS'))
    dvars = ComputeDVARS(in_file=example_data('ds003_sub-01_mc.nii.gz'),
                         in_mask=example_data('ds003_sub-01_mc_brainmask.nii.gz'),
                         save_all=True)
    os.chdir(str(tmpdir))
    res = dvars.run()

    dv1 = np.loadtxt(res.outputs.out_std)
    assert (np.abs(dv1 - ground_truth).sum()/ len(dv1)) < 0.05
Ejemplo n.º 3
0
def test_dvars(tmpdir):
    ground_truth = np.loadtxt(example_data('ds003_sub-01_mc.DVARS'))
    dvars = ComputeDVARS(
        in_file=example_data('ds003_sub-01_mc.nii.gz'),
        in_mask=example_data('ds003_sub-01_mc_brainmask.nii.gz'),
        save_all=True)
    os.chdir(str(tmpdir))
    res = dvars.run()

    dv1 = np.loadtxt(res.outputs.out_std)
    assert (np.abs(dv1 - ground_truth).sum() / len(dv1)) < 0.05
Ejemplo n.º 4
0
def test_dvars():
    tempdir = mkdtemp()
    ground_truth = np.loadtxt(example_data('ds003_sub-01_mc.DVARS'))
    dvars = ComputeDVARS(in_file=example_data('ds003_sub-01_mc.nii.gz'),
                         in_mask=example_data('ds003_sub-01_mc_brainmask.nii.gz'),
                         save_all=True)

    origdir = os.getcwd()
    os.chdir(tempdir)

    res = dvars.run()

    dv1 = np.loadtxt(res.outputs.out_std)
    yield assert_equal, (np.abs(dv1 - ground_truth).sum()/ len(dv1)) < 0.05, True

    os.chdir(origdir)
    rmtree(tempdir)
Ejemplo n.º 5
0
def test_dvars(tmpdir):
    ground_truth = np.loadtxt(example_data('ds003_sub-01_mc.DVARS'))
    dvars = ComputeDVARS(
        in_file=example_data('ds003_sub-01_mc.nii.gz'),
        in_mask=example_data('ds003_sub-01_mc_brainmask.nii.gz'),
        save_all=True,
        intensity_normalization=0)
    os.chdir(str(tmpdir))
    res = dvars.run()

    dv1 = np.loadtxt(res.outputs.out_all, skiprows=1)
    assert (np.abs(dv1[:, 0] - ground_truth[:, 0]).sum() / len(dv1)) < 0.05

    assert (np.abs(dv1[:, 1] - ground_truth[:, 1]).sum() / len(dv1)) < 0.05

    assert (np.abs(dv1[:, 2] - ground_truth[:, 2]).sum() / len(dv1)) < 0.05

    dvars = ComputeDVARS(
        in_file=example_data('ds003_sub-01_mc.nii.gz'),
        in_mask=example_data('ds003_sub-01_mc_brainmask.nii.gz'),
        save_all=True)
    res = dvars.run()

    dv1 = np.loadtxt(res.outputs.out_all, skiprows=1)
    assert (np.abs(dv1[:, 0] - ground_truth[:, 0]).sum() / len(dv1)) < 0.05

    assert (np.abs(dv1[:, 1] - ground_truth[:, 1]).sum() / len(dv1)) > 0.05

    assert (np.abs(dv1[:, 2] - ground_truth[:, 2]).sum() / len(dv1)) < 0.05
Ejemplo n.º 6
0
def compute_iqms(name='ComputeIQMs'):
    """
    Initialize the workflow that actually computes the IQMs.

    .. workflow::

        from mriqc.workflows.functional import compute_iqms
        from mriqc.testing import mock_config
        with mock_config():
            wf = compute_iqms()

    """
    from nipype.algorithms.confounds import ComputeDVARS
    from nipype.interfaces.afni import QualityIndex, OutlierCount
    from niworkflows.interfaces.bids import ReadSidecarJSON

    from .utils import get_fwhmx, _tofloat
    from ..interfaces.transitional import GCOR
    from ..interfaces import FunctionalQC, IQMFileSink
    from ..interfaces.reports import AddProvenance

    mem_gb = config.workflow.biggest_file_gb

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_file', 'in_ras', 'epi_mean', 'brainmask', 'hmc_epi', 'hmc_fd',
        'fd_thres', 'in_tsnr', 'metadata', 'exclude_index'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_dvars', 'outliers', 'out_spikes', 'out_fft']),
                         name='outputnode')

    # Set FD threshold
    inputnode.inputs.fd_thres = config.workflow.fd_thres

    # Compute DVARS
    dvnode = pe.Node(ComputeDVARS(save_plot=False, save_all=True),
                     name='ComputeDVARS',
                     mem_gb=mem_gb * 3)

    # AFNI quality measures
    fwhm_interface = get_fwhmx()
    fwhm = pe.Node(fwhm_interface, name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16
    outliers = pe.Node(OutlierCount(fraction=True, out_file='outliers.out'),
                       name='outliers',
                       mem_gb=mem_gb * 2.5)

    quality = pe.Node(QualityIndex(automask=True),
                      out_file='quality.out',
                      name='quality',
                      mem_gb=mem_gb * 3)

    gcor = pe.Node(GCOR(), name='gcor', mem_gb=mem_gb * 2)

    measures = pe.Node(FunctionalQC(), name='measures', mem_gb=mem_gb * 3)

    workflow.connect([(inputnode, dvnode, [('hmc_epi', 'in_file'),
                                           ('brainmask', 'in_mask')]),
                      (inputnode, measures, [('epi_mean', 'in_epi'),
                                             ('brainmask', 'in_mask'),
                                             ('hmc_epi', 'in_hmc'),
                                             ('hmc_fd', 'in_fd'),
                                             ('fd_thres', 'fd_thres'),
                                             ('in_tsnr', 'in_tsnr')]),
                      (inputnode, fwhm, [('epi_mean', 'in_file'),
                                         ('brainmask', 'mask')]),
                      (inputnode, quality, [('hmc_epi', 'in_file')]),
                      (inputnode, outliers, [('hmc_epi', 'in_file'),
                                             ('brainmask', 'mask')]),
                      (inputnode, gcor, [('hmc_epi', 'in_file'),
                                         ('brainmask', 'mask')]),
                      (dvnode, measures, [('out_all', 'in_dvars')]),
                      (fwhm, measures, [(('fwhm', _tofloat), 'in_fwhm')]),
                      (dvnode, outputnode, [('out_all', 'out_dvars')]),
                      (outliers, outputnode, [('out_file', 'outliers')])])

    # Add metadata
    meta = pe.Node(ReadSidecarJSON(),
                   name='metadata',
                   run_without_submitting=True)
    addprov = pe.Node(AddProvenance(modality="bold"),
                      name='provenance',
                      run_without_submitting=True)

    # Save to JSON file
    datasink = pe.Node(IQMFileSink(modality='bold',
                                   out_dir=str(config.execution.output_dir),
                                   dataset=config.execution.dsname),
                       name='datasink',
                       run_without_submitting=True)

    workflow.connect([
        (inputnode, datasink, [('in_file', 'in_file'),
                               ('exclude_index', 'dummy_trs')]),
        (inputnode, meta, [('in_file', 'in_file')]),
        (inputnode, addprov, [('in_file', 'in_file')]),
        (meta, datasink, [('subject', 'subject_id'), ('session', 'session_id'),
                          ('task', 'task_id'), ('acquisition', 'acq_id'),
                          ('reconstruction', 'rec_id'), ('run', 'run_id'),
                          ('out_dict', 'metadata')]),
        (addprov, datasink, [('out_prov', 'provenance')]),
        (outliers, datasink, [(('out_file', _parse_tout), 'aor')]),
        (gcor, datasink, [(('out', _tofloat), 'gcor')]),
        (quality, datasink, [(('out_file', _parse_tqual), 'aqi')]),
        (measures, datasink, [('out_qc', 'root')]),
        (datasink, outputnode, [('out_file', 'out_file')])
    ])

    # FFT spikes finder
    if config.workflow.fft_spikes_detector:
        from .utils import slice_wise_fft
        spikes_fft = pe.Node(niu.Function(
            input_names=['in_file'],
            output_names=['n_spikes', 'out_spikes', 'out_fft'],
            function=slice_wise_fft),
                             name='SpikesFinderFFT')

        workflow.connect([
            (inputnode, spikes_fft, [('in_ras', 'in_file')]),
            (spikes_fft, outputnode, [('out_spikes', 'out_spikes'),
                                      ('out_fft', 'out_fft')]),
            (spikes_fft, datasink, [('n_spikes', 'spikes_num')])
        ])
    return workflow
Ejemplo n.º 7
0
def create_confound_workflow(name='confound'):

    input_node = pe.Node(interface=IdentityInterface(fields=[
        'in_file', 'par_file', 'fast_files', 'highres2epi_mat',
        'n_comp_tcompcor', 'n_comp_acompcor', 'output_directory', 'sub_id'
    ]),
                         name='inputspec')

    output_node = pe.Node(interface=IdentityInterface(fields=[
        'all_confounds',
    ]),
                          name='outputspec')

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    compute_DVARS = pe.MapNode(ComputeDVARS(save_all=True,
                                            remove_zerovariance=True),
                               iterfield=['in_file', 'in_mask'],
                               name='compute_DVARS')

    motion_wf = create_motion_confound_workflow(order=2)

    confound_wf = pe.Workflow(name=name)
    confound_wf.connect(input_node, 'par_file', motion_wf,
                        'inputspec.par_file')
    confound_wf.connect(input_node, 'sub_id', motion_wf, 'inputspec.sub_id')
    confound_wf.connect(input_node, 'output_directory', motion_wf,
                        'inputspec.output_directory')

    compcor_wf = create_compcor_workflow()
    confound_wf.connect(input_node, 'in_file', compcor_wf, 'inputspec.in_file')
    confound_wf.connect(input_node, 'fast_files', compcor_wf,
                        'inputspec.fast_files')
    confound_wf.connect(input_node, 'highres2epi_mat', compcor_wf,
                        'inputspec.highres2epi_mat')
    confound_wf.connect(input_node, 'n_comp_tcompcor', compcor_wf,
                        'inputspec.n_comp_tcompcor')
    confound_wf.connect(input_node, 'n_comp_acompcor', compcor_wf,
                        'inputspec.n_comp_acompcor')
    confound_wf.connect(input_node, 'sub_id', compcor_wf, 'inputspec.sub_id')
    confound_wf.connect(input_node, 'output_directory', compcor_wf,
                        'inputspec.output_directory')

    confound_wf.connect(compcor_wf, 'outputspec.epi_mask', compute_DVARS,
                        'in_mask')
    confound_wf.connect(input_node, 'in_file', compute_DVARS, 'in_file')

    concat = pe.MapNode(Concat_confound_files,
                        iterfield=['ext_par_file', 'fd_file', 'dvars_file'],
                        name='concat')

    confound_wf.connect(motion_wf, 'outputspec.out_ext_moco', concat,
                        'ext_par_file')
    confound_wf.connect(motion_wf, 'outputspec.out_fd', concat, 'fd_file')
    confound_wf.connect(compcor_wf, 'outputspec.acompcor_file', concat,
                        'acompcor_file')
    #confound_wf.connect(compcor_wf, 'outputspec.tcompcor_file', concat,
    #                    'tcompcor_file')
    confound_wf.connect(compute_DVARS, 'out_all', concat, 'dvars_file')
    confound_wf.connect(input_node, 'sub_id', datasink, 'sub_id')
    confound_wf.connect(input_node, 'output_directory', datasink,
                        'base_directory')
    confound_wf.connect(concat, 'out_file', datasink, 'confounds')

    return confound_wf
Ejemplo n.º 8
0
def compute_iqms(name="ComputeIQMs"):
    """
    Initialize the workflow that actually computes the IQMs.

    .. workflow::

        from mriqc.workflows.functional import compute_iqms
        from mriqc.testing import mock_config
        with mock_config():
            wf = compute_iqms()

    """
    from nipype.algorithms.confounds import ComputeDVARS
    from nipype.interfaces.afni import OutlierCount, QualityIndex
    from niworkflows.interfaces.bids import ReadSidecarJSON

    from mriqc.interfaces import FunctionalQC, IQMFileSink
    from mriqc.interfaces.reports import AddProvenance
    from mriqc.interfaces.transitional import GCOR
    from mriqc.workflows.utils import _tofloat, get_fwhmx

    mem_gb = config.workflow.biggest_file_gb

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(
        niu.IdentityInterface(fields=[
            "in_file",
            "in_ras",
            "epi_mean",
            "brainmask",
            "hmc_epi",
            "hmc_fd",
            "fd_thres",
            "in_tsnr",
            "metadata",
            "exclude_index",
        ]),
        name="inputnode",
    )
    outputnode = pe.Node(
        niu.IdentityInterface(fields=[
            "out_file",
            "out_dvars",
            "outliers",
            "out_spikes",
            "out_fft",
        ]),
        name="outputnode",
    )

    # Set FD threshold
    inputnode.inputs.fd_thres = config.workflow.fd_thres

    # Compute DVARS
    dvnode = pe.Node(
        ComputeDVARS(save_plot=False, save_all=True),
        name="ComputeDVARS",
        mem_gb=mem_gb * 3,
    )

    # AFNI quality measures
    fwhm_interface = get_fwhmx()
    fwhm = pe.Node(fwhm_interface, name="smoothness")
    # fwhm.inputs.acf = True  # add when AFNI >= 16
    outliers = pe.Node(
        OutlierCount(fraction=True, out_file="outliers.out"),
        name="outliers",
        mem_gb=mem_gb * 2.5,
    )

    quality = pe.Node(
        QualityIndex(automask=True),
        out_file="quality.out",
        name="quality",
        mem_gb=mem_gb * 3,
    )

    gcor = pe.Node(GCOR(), name="gcor", mem_gb=mem_gb * 2)

    measures = pe.Node(FunctionalQC(), name="measures", mem_gb=mem_gb * 3)

    # fmt: off
    workflow.connect([(inputnode, dvnode, [("hmc_epi", "in_file"),
                                           ("brainmask", "in_mask")]),
                      (inputnode, measures, [("epi_mean", "in_epi"),
                                             ("brainmask", "in_mask"),
                                             ("hmc_epi", "in_hmc"),
                                             ("hmc_fd", "in_fd"),
                                             ("fd_thres", "fd_thres"),
                                             ("in_tsnr", "in_tsnr")]),
                      (inputnode, fwhm, [("epi_mean", "in_file"),
                                         ("brainmask", "mask")]),
                      (inputnode, quality, [("hmc_epi", "in_file")]),
                      (inputnode, outliers, [("hmc_epi", "in_file"),
                                             ("brainmask", "mask")]),
                      (inputnode, gcor, [("hmc_epi", "in_file"),
                                         ("brainmask", "mask")]),
                      (dvnode, measures, [("out_all", "in_dvars")]),
                      (fwhm, measures, [(("fwhm", _tofloat), "in_fwhm")]),
                      (dvnode, outputnode, [("out_all", "out_dvars")]),
                      (outliers, outputnode, [("out_file", "outliers")])])
    # fmt: on

    # Add metadata
    meta = pe.Node(ReadSidecarJSON(),
                   name="metadata",
                   run_without_submitting=True)
    addprov = pe.Node(
        AddProvenance(modality="bold"),
        name="provenance",
        run_without_submitting=True,
    )

    # Save to JSON file
    datasink = pe.Node(
        IQMFileSink(
            modality="bold",
            out_dir=str(config.execution.output_dir),
            dataset=config.execution.dsname,
        ),
        name="datasink",
        run_without_submitting=True,
    )

    # fmt: off
    workflow.connect([
        (inputnode, datasink, [("in_file", "in_file"),
                               ("exclude_index", "dummy_trs")]),
        (inputnode, meta, [("in_file", "in_file")]),
        (inputnode, addprov, [("in_file", "in_file")]),
        (meta, datasink, [("subject", "subject_id"), ("session", "session_id"),
                          ("task", "task_id"), ("acquisition", "acq_id"),
                          ("reconstruction", "rec_id"), ("run", "run_id"),
                          ("out_dict", "metadata")]),
        (addprov, datasink, [("out_prov", "provenance")]),
        (outliers, datasink, [(("out_file", _parse_tout), "aor")]),
        (gcor, datasink, [(("out", _tofloat), "gcor")]),
        (quality, datasink, [(("out_file", _parse_tqual), "aqi")]),
        (measures, datasink, [("out_qc", "root")]),
        (datasink, outputnode, [("out_file", "out_file")])
    ])
    # fmt: on

    # FFT spikes finder
    if config.workflow.fft_spikes_detector:
        from .utils import slice_wise_fft

        spikes_fft = pe.Node(
            niu.Function(
                input_names=["in_file"],
                output_names=["n_spikes", "out_spikes", "out_fft"],
                function=slice_wise_fft,
            ),
            name="SpikesFinderFFT",
        )

        # fmt: off
        workflow.connect([
            (inputnode, spikes_fft, [("in_ras", "in_file")]),
            (spikes_fft, outputnode, [("out_spikes", "out_spikes"),
                                      ("out_fft", "out_fft")]),
            (spikes_fft, datasink, [("n_spikes", "spikes_num")])
        ])
        # fmt: on

    return workflow
Ejemplo n.º 9
0
def main(sourcedata,
         derivatives,
         tmp_dir,
         subject=None,
         session=None,
         run=None):

    print(subject, session, run)

    layout = BIDSLayout(sourcedata)
    derivatives_layout = BIDSLayout('/derivatives/spynoza', validate=False)

    cortex_l = get_derivative(derivatives,
                              'nighres',
                              'anat',
                              subject,
                              'dseg',
                              session='anat',
                              space='average',
                              description='cortex',
                              hemi='left')

    cortex_r = get_derivative(derivatives,
                              'nighres',
                              'anat',
                              subject,
                              'dseg',
                              session='anat',
                              space='average',
                              description='cortex',
                              hemi='left')

    mask = derivatives_layout.get(subject=subject,
                                  session=session,
                                  suffix='mask',
                                  return_type='file')
    mask = sorted(mask)
    assert (len(mask) == 1)
    mask = mask[0]

    bold = derivatives_layout.get(subject=subject,
                                  session=session,
                                  suffix='preproc',
                                  return_type='file')
    bold = sorted(bold)
    print('BOLD: {}'.format(bold))
    print('MASK: {}'.format(mask))

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['cortex_l', 'cortex_r', 'bold', 'mask']),
        name='inputnode')

    inputnode.inputs.cortex_l = cortex_l
    inputnode.inputs.cortex_r = cortex_r
    inputnode.inputs.bold = bold
    inputnode.inputs.mask = mask

    get_masks = pe.MapNode(niu.Function(
        function=get_brain_regions_cruise,
        input_names=['cortex_l', 'cortex_r', 'type'],
        output_names=['out']),
                           iterfield=['type'],
                           name='get_masks')
    get_masks.inputs.type = ['csf', 'wm']

    wf = pe.Workflow(name='get_confounds_{}_{}'.format(subject, session),
                     base_dir='/workflow_folders')
    wf.connect(inputnode, 'cortex_l', get_masks, 'cortex_l')
    wf.connect(inputnode, 'cortex_r', get_masks, 'cortex_r')

    resampler = pe.MapNode(niu.Function(
        function=resample_img,
        input_names=['input_image', 'ref_image', 'interpolation'],
        output_names=['resampled_image'],
    ),
                           iterfield=['input_image'],
                           name='resampler')

    wf.connect(inputnode, ('bold', pickfirst), resampler, 'ref_image')
    wf.connect(get_masks, 'out', resampler, 'input_image')

    compcorr = pe.MapNode(ACompCor(merge_method='union'),
                          iterfield=['realigned_file'],
                          name='acompcorr')

    wf.connect(resampler, 'resampled_image', compcorr, 'mask_files')
    wf.connect(inputnode, 'bold', compcorr, 'realigned_file')

    dvars = pe.MapNode(ComputeDVARS(), iterfield=['in_file'], name='dvars')
    wf.connect(inputnode, 'mask', dvars, 'in_mask')
    wf.connect(inputnode, 'bold', dvars, 'in_file')

    add_header = pe.MapNode(AddTSVHeader(columns=["dvars"]),
                            iterfield=['in_file'],
                            name="add_header_dvars")
    wf.connect(dvars, 'out_std', add_header, 'in_file')

    concat = pe.MapNode(GatherConfounds(),
                        iterfield=['acompcor', 'dvars'],
                        name="concat")
    wf.connect(add_header, 'out_file', concat, 'dvars')
    wf.connect(compcorr, 'components_file', concat, 'acompcor')

    ds_confounds = pe.MapNode(DerivativesDataSink(out_path_base='spynoza',
                                                  suffix='confounds_compcor',
                                                  base_directory=derivatives),
                              iterfield=['in_file', 'source_file'],
                              name='ds_reg_report')

    wf.connect(inputnode, 'bold', ds_confounds, 'source_file')
    wf.connect(concat, 'confounds_file', ds_confounds, 'in_file')

    wf.run(plugin='MultiProc', plugin_args={'n_procs': 10})