コード例 #1
0
ファイル: test_bids.py プロジェクト: bpinsard/niworkflows
def test_ReadSidecarJSON_connection(testdata_dir, field):
    """
    This test prevents regressions of #333
    """
    from nipype.pipeline import engine as pe
    from nipype.interfaces import utility as niu
    from niworkflows.interfaces.bids import ReadSidecarJSON

    reg_fields = ['RepetitionTime']
    n = pe.Node(ReadSidecarJSON(fields=reg_fields), name='node')
    n.inputs.in_file = str(testdata_dir / 'ds054' / 'sub-100185' / 'fmap' /
                           'sub-100185_phasediff.nii.gz')
    o = pe.Node(niu.IdentityInterface(fields=['out_port']), name='o')
    wf = pe.Workflow(name='json')

    if field in reg_fields:  # This should work
        wf.connect([
            (n, o, [(field, 'out_port')]),
        ])
    else:
        with pytest.raises(Exception,
                           match=r'.*Some connections were not found.*'):
            wf.connect([
                (n, o, [(field, 'out_port')]),
            ])
コード例 #2
0
def test_ReadSidecarJSON_connection(testdata_dir, field):
    """
    This test prevents regressions of #333
    """
    from nipype.pipeline import engine as pe
    from nipype.interfaces import utility as niu
    from niworkflows.interfaces.bids import ReadSidecarJSON

    reg_fields = ["RepetitionTime"]
    n = pe.Node(ReadSidecarJSON(fields=reg_fields), name="node")
    n.inputs.in_file = str(testdata_dir / "ds054" / "sub-100185" / "fmap" /
                           "sub-100185_phasediff.nii.gz")
    o = pe.Node(niu.IdentityInterface(fields=["out_port"]), name="o")
    wf = pe.Workflow(name="json")

    if field in reg_fields:  # This should work
        wf.connect([(n, o, [(field, "out_port")])])
    else:
        with pytest.raises(Exception,
                           match=r".*Some connections were not found.*"):
            wf.connect([(n, o, [(field, "out_port")])])
コード例 #3
0
def init_phdiff_wf(omp_nthreads, phasetype='phasediff', name='phdiff_wf'):
    """
    Estimates the fieldmap using a phase-difference image and one or more
    magnitude images corresponding to two or more :abbr:`GRE (Gradient Echo sequence)`
    acquisitions. The `original code was taken from nipype
    <https://github.com/nipy/nipype/blob/master/nipype/workflows/dmri/fsl/artifacts.py#L514>`_.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.fieldmap.phdiff import init_phdiff_wf
        wf = init_phdiff_wf(omp_nthreads=1)


    Outputs::

      outputnode.fmap_ref - The average magnitude image, skull-stripped
      outputnode.fmap_mask - The brain mask applied to the fieldmap
      outputnode.fmap - The estimated fieldmap in Hz


    """

    workflow = Workflow(name=name)
    workflow.__desc__ = """\
A deformation field to correct for susceptibility distortions was estimated
based on a field map that was co-registered to the BOLD reference,
using a custom workflow of *fMRIPrep* derived from D. Greve's `epidewarp.fsl`
[script](http://www.nmr.mgh.harvard.edu/~greve/fbirn/b0/epidewarp.fsl) and
further improvements of HCP Pipelines [@hcppipelines].
"""

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['magnitude', 'phasediff']),
        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['fmap', 'fmap_ref', 'fmap_mask']),
        name='outputnode')

    def _pick1st(inlist):
        return inlist[0]

    # Read phasediff echo times
    meta = pe.Node(ReadSidecarJSON(),
                   name='meta',
                   mem_gb=0.01,
                   run_without_submitting=True)

    # Merge input magnitude images
    magmrg = pe.Node(IntraModalMerge(), name='magmrg')

    # de-gradient the fields ("bias/illumination artifact")
    n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3, copy_header=True),
                 name='n4',
                 n_procs=omp_nthreads)
    bet = pe.Node(BETRPT(generate_report=True, frac=0.6, mask=True),
                  name='bet')
    ds_report_fmap_mask = pe.Node(DerivativesDataSink(desc='brain',
                                                      suffix='mask'),
                                  name='ds_report_fmap_mask',
                                  mem_gb=0.01,
                                  run_without_submitting=True)

    # uses mask from bet; outputs a mask
    # dilate = pe.Node(fsl.maths.MathsCommand(
    #     nan2zeros=True, args='-kernel sphere 5 -dilM'), name='MskDilate')

    # phase diff -> radians
    pha2rads = pe.Node(niu.Function(function=siemens2rads), name='pha2rads')

    # FSL PRELUDE will perform phase-unwrapping
    prelude = pe.Node(fsl.PRELUDE(), name='prelude')

    denoise = pe.Node(fsl.SpatialFilter(operation='median',
                                        kernel_shape='sphere',
                                        kernel_size=5),
                      name='denoise')

    demean = pe.Node(niu.Function(function=demean_image), name='demean')

    cleanup_wf = cleanup_edge_pipeline(name="cleanup_wf")

    compfmap = pe.Node(Phasediff2Fieldmap(), name='compfmap')

    # The phdiff2fmap interface is equivalent to:
    # rad2rsec (using rads2radsec from nipype.workflows.dmri.fsl.utils)
    # pre_fugue = pe.Node(fsl.FUGUE(save_fmap=True), name='ComputeFieldmapFUGUE')
    # rsec2hz (divide by 2pi)

    if phasetype == "phasediff":
        # Read phasediff echo times
        meta = pe.Node(ReadSidecarJSON(), name='meta', mem_gb=0.01)

        # phase diff -> radians
        pha2rads = pe.Node(niu.Function(function=siemens2rads),
                           name='pha2rads')
        # Read phasediff echo times
        meta = pe.Node(ReadSidecarJSON(),
                       name='meta',
                       mem_gb=0.01,
                       run_without_submitting=True)
        workflow.connect([
            (meta, compfmap, [('out_dict', 'metadata')]),
            (inputnode, pha2rads, [('phasediff', 'in_file')]),
            (pha2rads, prelude, [('out', 'phase_file')]),
            (inputnode, ds_report_fmap_mask, [('phasediff', 'source_file')]),
        ])

    elif phasetype == "phase":
        workflow.__desc__ += """\
The phase difference used for unwarping was calculated using two separate phase measurements
 [@pncprocessing].
    """
        # Special case for phase1, phase2 images
        meta = pe.MapNode(ReadSidecarJSON(),
                          name='meta',
                          mem_gb=0.01,
                          run_without_submitting=True,
                          iterfield=['in_file'])
        phases2fmap = pe.Node(Phases2Fieldmap(), name='phases2fmap')
        workflow.connect([
            (meta, phases2fmap, [('out_dict', 'metadatas')]),
            (inputnode, phases2fmap, [('phasediff', 'phase_files')]),
            (phases2fmap, prelude, [('out_file', 'phase_file')]),
            (phases2fmap, compfmap, [('phasediff_metadata', 'metadata')]),
            (phases2fmap, ds_report_fmap_mask, [('out_file', 'source_file')])
        ])

    workflow.connect([
        (inputnode, meta, [('phasediff', 'in_file')]),
        (inputnode, magmrg, [('magnitude', 'in_files')]),
        (magmrg, n4, [('out_avg', 'input_image')]),
        (n4, prelude, [('output_image', 'magnitude_file')]),
        (n4, bet, [('output_image', 'in_file')]),
        (bet, prelude, [('mask_file', 'mask_file')]),
        (prelude, denoise, [('unwrapped_phase_file', 'in_file')]),
        (denoise, demean, [('out_file', 'in_file')]),
        (demean, cleanup_wf, [('out', 'inputnode.in_file')]),
        (bet, cleanup_wf, [('mask_file', 'inputnode.in_mask')]),
        (cleanup_wf, compfmap, [('outputnode.out_file', 'in_file')]),
        (compfmap, outputnode, [('out_file', 'fmap')]),
        (bet, outputnode, [('mask_file', 'fmap_mask'),
                           ('out_file', 'fmap_ref')]),
        (bet, ds_report_fmap_mask, [('out_report', 'in_file')]),
    ])

    return workflow
コード例 #4
0
def compute_iqms(name="ComputeIQMs"):
    """
    Setup the workflow that actually computes the IQMs.

    .. workflow::

        from mriqc.workflows.anatomical import compute_iqms
        from mriqc.testing import mock_config
        with mock_config():
            wf = compute_iqms()

    """
    from niworkflows.interfaces.bids import ReadSidecarJSON

    from ..interfaces.anatomical import Harmonize
    from .utils import _tofloat

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(
        niu.IdentityInterface(fields=[
            "in_file",
            "in_ras",
            "brainmask",
            "airmask",
            "artmask",
            "headmask",
            "rotmask",
            "hatmask",
            "segmentation",
            "inu_corrected",
            "in_inu",
            "pvms",
            "metadata",
            "inverse_composite_transform",
        ]),
        name="inputnode",
    )
    outputnode = pe.Node(
        niu.IdentityInterface(fields=["out_file", "noisefit"]),
        name="outputnode",
    )

    # Extract metadata
    meta = pe.Node(ReadSidecarJSON(), name="metadata")

    # Add provenance
    addprov = pe.Node(AddProvenance(),
                      name="provenance",
                      run_without_submitting=True)

    # AFNI check smoothing
    fwhm_interface = get_fwhmx()

    fwhm = pe.Node(fwhm_interface, name="smoothness")

    # Harmonize
    homog = pe.Node(Harmonize(), name="harmonize")
    if config.workflow.species.lower() != "human":
        homog.inputs.erodemsk = False
        homog.inputs.thresh = 0.8

    # Mortamet's QI2
    getqi2 = pe.Node(ComputeQI2(), name="ComputeQI2")

    # Compute python-coded measures
    measures = pe.Node(
        StructuralQC(human=config.workflow.species.lower() == "human"),
        "measures")

    # Project MNI segmentation to T1 space
    invt = pe.MapNode(
        ants.ApplyTransforms(dimension=3,
                             default_value=0,
                             interpolation="Linear",
                             float=True),
        iterfield=["input_image"],
        name="MNItpms2t1",
    )
    if config.workflow.species.lower() == "human":
        invt.inputs.input_image = [
            str(p) for p in get_template(
                config.workflow.template_id,
                suffix="probseg",
                resolution=1,
                label=["CSF", "GM", "WM"],
            )
        ]
    else:
        invt.inputs.input_image = [
            str(p) for p in get_template(
                config.workflow.template_id,
                suffix="probseg",
                label=["CSF", "GM", "WM"],
            )
        ]

    datasink = pe.Node(
        IQMFileSink(
            out_dir=config.execution.output_dir,
            dataset=config.execution.dsname,
        ),
        name="datasink",
        run_without_submitting=True,
    )

    def _getwm(inlist):
        return inlist[-1]

    # fmt: off
    workflow.connect([
        (inputnode, meta, [("in_file", "in_file")]),
        (inputnode, datasink, [("in_file", "in_file"),
                               (("in_file", _get_mod), "modality")]),
        (inputnode, addprov, [(("in_file", _get_mod), "modality")]),
        (meta, datasink, [("subject", "subject_id"), ("session", "session_id"),
                          ("task", "task_id"), ("acquisition", "acq_id"),
                          ("reconstruction", "rec_id"), ("run", "run_id"),
                          ("out_dict", "metadata")]),
        (inputnode, addprov, [("in_file", "in_file"), ("airmask", "air_msk"),
                              ("rotmask", "rot_msk")]),
        (inputnode, getqi2, [("in_ras", "in_file"), ("hatmask", "air_msk")]),
        (inputnode, homog, [("inu_corrected", "in_file"),
                            (("pvms", _getwm), "wm_mask")]),
        (inputnode, measures, [("in_inu", "in_bias"), ("in_ras", "in_file"),
                               ("airmask", "air_msk"),
                               ("headmask", "head_msk"),
                               ("artmask", "artifact_msk"),
                               ("rotmask", "rot_msk"),
                               ("segmentation", "in_segm"),
                               ("pvms", "in_pvms")]),
        (inputnode, fwhm, [("in_ras", "in_file"), ("brainmask", "mask")]),
        (inputnode, invt, [("in_ras", "reference_image"),
                           ("inverse_composite_transform", "transforms")]),
        (homog, measures, [("out_file", "in_noinu")]),
        (invt, measures, [("output_image", "mni_tpms")]),
        (fwhm, measures, [(("fwhm", _tofloat), "in_fwhm")]),
        (measures, datasink, [("out_qc", "root")]),
        (addprov, datasink, [("out_prov", "provenance")]),
        (getqi2, datasink, [("qi2", "qi_2")]),
        (getqi2, outputnode, [("out_file", "noisefit")]),
        (datasink, outputnode, [("out_file", "out_file")]),
    ])
    # fmt: on

    return workflow
コード例 #5
0
def compute_iqms(name='ComputeIQMs'):
    """
    Setup the workflow that actually computes the IQMs.

    .. workflow::

        from mriqc.workflows.anatomical import compute_iqms
        from mriqc.testing import mock_config
        with mock_config():
            wf = compute_iqms()

    """
    from niworkflows.interfaces.bids import ReadSidecarJSON
    from .utils import _tofloat
    from ..interfaces.anatomical import Harmonize

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_file', 'in_ras', 'brainmask', 'airmask', 'artmask', 'headmask',
        'rotmask', 'hatmask', 'segmentation', 'inu_corrected', 'in_inu',
        'pvms', 'metadata', 'inverse_composite_transform'
    ]),
                        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['out_file', 'noisefit']),
        name='outputnode')

    # Extract metadata
    meta = pe.Node(ReadSidecarJSON(), name='metadata')

    # Add provenance
    addprov = pe.Node(AddProvenance(),
                      name='provenance',
                      run_without_submitting=True)

    # AFNI check smoothing
    fwhm_interface = get_fwhmx()

    fwhm = pe.Node(fwhm_interface, name='smoothness')

    # Harmonize
    homog = pe.Node(Harmonize(), name='harmonize')

    # Mortamet's QI2
    getqi2 = pe.Node(ComputeQI2(), name='ComputeQI2')

    # Compute python-coded measures
    measures = pe.Node(StructuralQC(), 'measures')

    # Project MNI segmentation to T1 space
    invt = pe.MapNode(ants.ApplyTransforms(dimension=3,
                                           default_value=0,
                                           interpolation='Linear',
                                           float=True),
                      iterfield=['input_image'],
                      name='MNItpms2t1')
    invt.inputs.input_image = [
        str(templatedir + 'NMT_segmentation_CSF.nii.gz'),
        str(templatedir + 'NMT_segmentation_GM.nii.gz'),
        str(templatedir + 'NMT_segmentation_WM.nii.gz')
    ]

    datasink = pe.Node(IQMFileSink(out_dir=config.execution.output_dir,
                                   dataset=config.execution.dsname),
                       name='datasink',
                       run_without_submitting=True)

    def _getwm(inlist):
        return inlist[-1]

    workflow.connect([
        (inputnode, meta, [('in_file', 'in_file')]),
        (inputnode, datasink, [('in_file', 'in_file'),
                               (('in_file', _get_mod), 'modality')]),
        (inputnode, addprov, [(('in_file', _get_mod), 'modality')]),
        (meta, datasink, [('subject', 'subject_id'), ('session', 'session_id'),
                          ('task', 'task_id'), ('acquisition', 'acq_id'),
                          ('reconstruction', 'rec_id'), ('run', 'run_id'),
                          ('out_dict', 'metadata')]),
        (inputnode, addprov, [('in_file', 'in_file'), ('airmask', 'air_msk'),
                              ('rotmask', 'rot_msk')]),
        (inputnode, getqi2, [('in_ras', 'in_file'), ('hatmask', 'air_msk')]),
        (inputnode, homog, [('inu_corrected', 'in_file'),
                            (('pvms', _getwm), 'wm_mask')]),
        (inputnode, measures, [('in_inu', 'in_bias'), ('in_ras', 'in_file'),
                               ('airmask', 'air_msk'),
                               ('headmask', 'head_msk'),
                               ('artmask', 'artifact_msk'),
                               ('rotmask', 'rot_msk'),
                               ('segmentation', 'in_segm'),
                               ('pvms', 'in_pvms')]),
        (inputnode, fwhm, [('in_ras', 'in_file'), ('brainmask', 'mask')]),
        (inputnode, invt, [('in_ras', 'reference_image'),
                           ('inverse_composite_transform', 'transforms')]),
        (homog, measures, [('out_file', 'in_noinu')]),
        (invt, measures, [('output_image', 'mni_tpms')]),
        (fwhm, measures, [(('fwhm', _tofloat), 'in_fwhm')]),
        (measures, datasink, [('out_qc', 'root')]),
        (addprov, datasink, [('out_prov', 'provenance')]),
        (getqi2, datasink, [('qi2', 'qi_2')]),
        (getqi2, outputnode, [('out_file', 'noisefit')]),
        (datasink, outputnode, [('out_file', 'out_file')]),
    ])
    return workflow
コード例 #6
0
ファイル: functional.py プロジェクト: ylep/mriqc
def compute_iqms(name='ComputeIQMs'):
    """
    Initialize the workflow that actually computes the IQMs.

    .. workflow::

        from mriqc.workflows.functional import compute_iqms
        from mriqc.testing import mock_config
        with mock_config():
            wf = compute_iqms()

    """
    from nipype.algorithms.confounds import ComputeDVARS
    from nipype.interfaces.afni import QualityIndex, OutlierCount
    from niworkflows.interfaces.bids import ReadSidecarJSON

    from .utils import get_fwhmx, _tofloat
    from ..interfaces.transitional import GCOR
    from ..interfaces import FunctionalQC, IQMFileSink
    from ..interfaces.reports import AddProvenance

    mem_gb = config.workflow.biggest_file_gb

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_file', 'in_ras', 'epi_mean', 'brainmask', 'hmc_epi', 'hmc_fd',
        'fd_thres', 'in_tsnr', 'metadata', 'exclude_index'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_dvars', 'outliers', 'out_spikes', 'out_fft']),
                         name='outputnode')

    # Set FD threshold
    inputnode.inputs.fd_thres = config.workflow.fd_thres

    # Compute DVARS
    dvnode = pe.Node(ComputeDVARS(save_plot=False, save_all=True),
                     name='ComputeDVARS',
                     mem_gb=mem_gb * 3)

    # AFNI quality measures
    fwhm_interface = get_fwhmx()
    fwhm = pe.Node(fwhm_interface, name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16
    outliers = pe.Node(OutlierCount(fraction=True, out_file='outliers.out'),
                       name='outliers',
                       mem_gb=mem_gb * 2.5)

    quality = pe.Node(QualityIndex(automask=True),
                      out_file='quality.out',
                      name='quality',
                      mem_gb=mem_gb * 3)

    gcor = pe.Node(GCOR(), name='gcor', mem_gb=mem_gb * 2)

    measures = pe.Node(FunctionalQC(), name='measures', mem_gb=mem_gb * 3)

    workflow.connect([(inputnode, dvnode, [('hmc_epi', 'in_file'),
                                           ('brainmask', 'in_mask')]),
                      (inputnode, measures, [('epi_mean', 'in_epi'),
                                             ('brainmask', 'in_mask'),
                                             ('hmc_epi', 'in_hmc'),
                                             ('hmc_fd', 'in_fd'),
                                             ('fd_thres', 'fd_thres'),
                                             ('in_tsnr', 'in_tsnr')]),
                      (inputnode, fwhm, [('epi_mean', 'in_file'),
                                         ('brainmask', 'mask')]),
                      (inputnode, quality, [('hmc_epi', 'in_file')]),
                      (inputnode, outliers, [('hmc_epi', 'in_file'),
                                             ('brainmask', 'mask')]),
                      (inputnode, gcor, [('hmc_epi', 'in_file'),
                                         ('brainmask', 'mask')]),
                      (dvnode, measures, [('out_all', 'in_dvars')]),
                      (fwhm, measures, [(('fwhm', _tofloat), 'in_fwhm')]),
                      (dvnode, outputnode, [('out_all', 'out_dvars')]),
                      (outliers, outputnode, [('out_file', 'outliers')])])

    # Add metadata
    meta = pe.Node(ReadSidecarJSON(),
                   name='metadata',
                   run_without_submitting=True)
    addprov = pe.Node(AddProvenance(modality="bold"),
                      name='provenance',
                      run_without_submitting=True)

    # Save to JSON file
    datasink = pe.Node(IQMFileSink(modality='bold',
                                   out_dir=str(config.execution.output_dir),
                                   dataset=config.execution.dsname),
                       name='datasink',
                       run_without_submitting=True)

    workflow.connect([
        (inputnode, datasink, [('in_file', 'in_file'),
                               ('exclude_index', 'dummy_trs')]),
        (inputnode, meta, [('in_file', 'in_file')]),
        (inputnode, addprov, [('in_file', 'in_file')]),
        (meta, datasink, [('subject', 'subject_id'), ('session', 'session_id'),
                          ('task', 'task_id'), ('acquisition', 'acq_id'),
                          ('reconstruction', 'rec_id'), ('run', 'run_id'),
                          ('out_dict', 'metadata')]),
        (addprov, datasink, [('out_prov', 'provenance')]),
        (outliers, datasink, [(('out_file', _parse_tout), 'aor')]),
        (gcor, datasink, [(('out', _tofloat), 'gcor')]),
        (quality, datasink, [(('out_file', _parse_tqual), 'aqi')]),
        (measures, datasink, [('out_qc', 'root')]),
        (datasink, outputnode, [('out_file', 'out_file')])
    ])

    # FFT spikes finder
    if config.workflow.fft_spikes_detector:
        from .utils import slice_wise_fft
        spikes_fft = pe.Node(niu.Function(
            input_names=['in_file'],
            output_names=['n_spikes', 'out_spikes', 'out_fft'],
            function=slice_wise_fft),
                             name='SpikesFinderFFT')

        workflow.connect([
            (inputnode, spikes_fft, [('in_ras', 'in_file')]),
            (spikes_fft, outputnode, [('out_spikes', 'out_spikes'),
                                      ('out_fft', 'out_fft')]),
            (spikes_fft, datasink, [('n_spikes', 'spikes_num')])
        ])
    return workflow
コード例 #7
0
ファイル: anatomical.py プロジェクト: verdurin/mriqc
def compute_iqms(settings, modality='T1w', name='ComputeIQMs'):
    """
    Workflow that actually computes the IQMs

    .. workflow::

        from mriqc.workflows.anatomical import compute_iqms
        wf = compute_iqms(settings={'output_dir': 'out'})

    """
    from .utils import _tofloat
    from ..interfaces.anatomical import Harmonize

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_file', 'in_ras', 'brainmask', 'airmask', 'artmask', 'headmask',
        'rotmask', 'hatmask', 'segmentation', 'inu_corrected', 'in_inu',
        'pvms', 'metadata', 'inverse_composite_transform'
    ]),
                        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['out_file', 'noisefit']),
        name='outputnode')

    # Extract metadata
    meta = pe.Node(ReadSidecarJSON(), name='metadata')

    # Add provenance
    addprov = pe.Node(niu.Function(function=_add_provenance),
                      name='provenance')
    addprov.inputs.settings = {
        'testing': settings.get('testing', False),
        'webapi_url': settings.get('webapi_url'),
        'webapi_port': settings.get('webapi_port')
    }

    # AFNI check smoothing
    fwhm_interface = get_fwhmx()

    fwhm = pe.Node(fwhm_interface, name='smoothness')

    # Harmonize
    homog = pe.Node(Harmonize(), name='harmonize')

    # Mortamet's QI2
    getqi2 = pe.Node(ComputeQI2(), name='ComputeQI2')

    # Compute python-coded measures
    measures = pe.Node(StructuralQC(), 'measures')

    # Project MNI segmentation to T1 space
    invt = pe.MapNode(ants.ApplyTransforms(dimension=3,
                                           default_value=0,
                                           interpolation='Linear',
                                           float=True),
                      iterfield=['input_image'],
                      name='MNItpms2t1')
    invt.inputs.input_image = [
        str(p) for p in get_template('MNI152NLin2009cAsym',
                                     suffix='probseg',
                                     resolution=1,
                                     label=['CSF', 'GM', 'WM'])
    ]

    datasink = pe.Node(IQMFileSink(modality=modality,
                                   out_dir=str(settings['output_dir']),
                                   dataset=settings.get(
                                       'dataset_name', 'unknown')),
                       name='datasink',
                       run_without_submitting=True)
    datasink.inputs.modality = modality

    def _getwm(inlist):
        return inlist[-1]

    workflow.connect([
        (inputnode, meta, [('in_file', 'in_file')]),
        (inputnode, datasink, [('in_file', 'in_file')]),
        (meta, datasink, [('subject', 'subject_id'), ('session', 'session_id'),
                          ('task', 'task_id'), ('acquisition', 'acq_id'),
                          ('reconstruction', 'rec_id'), ('run', 'run_id'),
                          ('out_dict', 'metadata')]),
        (inputnode, addprov, [('in_file', 'in_file'), ('airmask', 'air_msk'),
                              ('rotmask', 'rot_msk')]),
        (inputnode, getqi2, [('in_ras', 'in_file'), ('hatmask', 'air_msk')]),
        (inputnode, homog, [('inu_corrected', 'in_file'),
                            (('pvms', _getwm), 'wm_mask')]),
        (inputnode, measures, [('in_inu', 'in_bias'), ('in_ras', 'in_file'),
                               ('airmask', 'air_msk'),
                               ('headmask', 'head_msk'),
                               ('artmask', 'artifact_msk'),
                               ('rotmask', 'rot_msk'),
                               ('segmentation', 'in_segm'),
                               ('pvms', 'in_pvms')]),
        (inputnode, fwhm, [('in_ras', 'in_file'), ('brainmask', 'mask')]),
        (inputnode, invt, [('in_ras', 'reference_image'),
                           ('inverse_composite_transform', 'transforms')]),
        (homog, measures, [('out_file', 'in_noinu')]),
        (invt, measures, [('output_image', 'mni_tpms')]),
        (fwhm, measures, [(('fwhm', _tofloat), 'in_fwhm')]),
        (measures, datasink, [('out_qc', 'root')]),
        (addprov, datasink, [('out', 'provenance')]),
        (getqi2, datasink, [('qi2', 'qi_2')]),
        (getqi2, outputnode, [('out_file', 'noisefit')]),
        (datasink, outputnode, [('out_file', 'out_file')]),
    ])
    return workflow
コード例 #8
0
def compute_iqms(settings, name='ComputeIQMs'):
    """
    Workflow that actually computes the IQMs

    .. workflow::

      from mriqc.workflows.functional import compute_iqms
      wf = compute_iqms(settings={'output_dir': 'out'})


    """
    from .utils import _tofloat
    from ..interfaces.transitional import GCOR

    biggest_file_gb = settings.get("biggest_file_size_gb", 1)

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_file', 'in_ras',
        'epi_mean', 'brainmask', 'hmc_epi', 'hmc_fd', 'fd_thres', 'in_tsnr', 'metadata',
        'exclude_index']), name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_dvars', 'outliers', 'out_spikes', 'out_fft']),
        name='outputnode')

    # Set FD threshold
    inputnode.inputs.fd_thres = settings.get('fd_thres', 0.2)

    # Compute DVARS
    dvnode = pe.Node(nac.ComputeDVARS(save_plot=False, save_all=True), name='ComputeDVARS',
                     mem_gb=biggest_file_gb * 3)

    # AFNI quality measures
    fwhm_interface = get_fwhmx()
    fwhm = pe.Node(fwhm_interface, name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16
    outliers = pe.Node(afni.OutlierCount(fraction=True, out_file='outliers.out'),
                       name='outliers', mem_gb=biggest_file_gb * 2.5)

    quality = pe.Node(afni.QualityIndex(automask=True), out_file='quality.out',
                      name='quality', mem_gb=biggest_file_gb * 3)

    gcor = pe.Node(GCOR(), name='gcor', mem_gb=biggest_file_gb * 2)

    measures = pe.Node(FunctionalQC(), name='measures', mem_gb=biggest_file_gb * 3)

    workflow.connect([
        (inputnode, dvnode, [('hmc_epi', 'in_file'),
                             ('brainmask', 'in_mask')]),
        (inputnode, measures, [('epi_mean', 'in_epi'),
                               ('brainmask', 'in_mask'),
                               ('hmc_epi', 'in_hmc'),
                               ('hmc_fd', 'in_fd'),
                               ('fd_thres', 'fd_thres'),
                               ('in_tsnr', 'in_tsnr')]),
        (inputnode, fwhm, [('epi_mean', 'in_file'),
                           ('brainmask', 'mask')]),
        (inputnode, quality, [('hmc_epi', 'in_file')]),
        (inputnode, outliers, [('hmc_epi', 'in_file'),
                               ('brainmask', 'mask')]),
        (inputnode, gcor, [('hmc_epi', 'in_file'),
                           ('brainmask', 'mask')]),
        (dvnode, measures, [('out_all', 'in_dvars')]),
        (fwhm, measures, [(('fwhm', _tofloat), 'in_fwhm')]),
        (dvnode, outputnode, [('out_all', 'out_dvars')]),
        (outliers, outputnode, [('out_file', 'outliers')])
    ])

    # Add metadata
    meta = pe.Node(ReadSidecarJSON(), name='metadata',
                   run_without_submitting=True)
    addprov = pe.Node(niu.Function(function=_add_provenance), name='provenance',
                      run_without_submitting=True)
    addprov.inputs.settings = {
        'fd_thres': settings.get('fd_thres', 0.2),
        'hmc_fsl': settings.get('hmc_fsl', True),
        'webapi_url': settings.get('webapi_url'),
        'webapi_port': settings.get('webapi_port'),
    }

    # Save to JSON file
    datasink = pe.Node(IQMFileSink(
        modality='bold', out_dir=str(settings['output_dir']),
        dataset=settings.get('dataset_name', 'unknown')),
        name='datasink', run_without_submitting=True)

    workflow.connect([
        (inputnode, datasink, [('in_file', 'in_file'),
                               ('exclude_index', 'dummy_trs')]),
        (inputnode, meta, [('in_file', 'in_file')]),
        (inputnode, addprov, [('in_file', 'in_file')]),
        (meta, datasink, [('subject', 'subject_id'),
                          ('session', 'session_id'),
                          ('task', 'task_id'),
                          ('acquisition', 'acq_id'),
                          ('reconstruction', 'rec_id'),
                          ('run', 'run_id'),
                          ('out_dict', 'metadata')]),
        (addprov, datasink, [('out', 'provenance')]),
        (outliers, datasink, [(('out_file', _parse_tout), 'aor')]),
        (gcor, datasink, [(('out', _tofloat), 'gcor')]),
        (quality, datasink, [(('out_file', _parse_tqual), 'aqi')]),
        (measures, datasink, [('out_qc', 'root')]),
        (datasink, outputnode, [('out_file', 'out_file')])
    ])

    # FFT spikes finder
    if settings.get('fft_spikes_detector', False):
        from .utils import slice_wise_fft
        spikes_fft = pe.Node(niu.Function(
            input_names=['in_file'],
            output_names=['n_spikes', 'out_spikes', 'out_fft'],
            function=slice_wise_fft), name='SpikesFinderFFT')

        workflow.connect([
            (inputnode, spikes_fft, [('in_ras', 'in_file')]),
            (spikes_fft, outputnode, [('out_spikes', 'out_spikes'),
                                      ('out_fft', 'out_fft')]),
            (spikes_fft, datasink, [('n_spikes', 'spikes_num')])
        ])
    return workflow
コード例 #9
0
def compute_iqms(name="ComputeIQMs"):
    """
    Initialize the workflow that actually computes the IQMs.

    .. workflow::

        from mriqc.workflows.functional import compute_iqms
        from mriqc.testing import mock_config
        with mock_config():
            wf = compute_iqms()

    """
    from nipype.algorithms.confounds import ComputeDVARS
    from nipype.interfaces.afni import OutlierCount, QualityIndex
    from niworkflows.interfaces.bids import ReadSidecarJSON

    from mriqc.interfaces import FunctionalQC, IQMFileSink
    from mriqc.interfaces.reports import AddProvenance
    from mriqc.interfaces.transitional import GCOR
    from mriqc.workflows.utils import _tofloat, get_fwhmx

    mem_gb = config.workflow.biggest_file_gb

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(
        niu.IdentityInterface(fields=[
            "in_file",
            "in_ras",
            "epi_mean",
            "brainmask",
            "hmc_epi",
            "hmc_fd",
            "fd_thres",
            "in_tsnr",
            "metadata",
            "exclude_index",
        ]),
        name="inputnode",
    )
    outputnode = pe.Node(
        niu.IdentityInterface(fields=[
            "out_file",
            "out_dvars",
            "outliers",
            "out_spikes",
            "out_fft",
        ]),
        name="outputnode",
    )

    # Set FD threshold
    inputnode.inputs.fd_thres = config.workflow.fd_thres

    # Compute DVARS
    dvnode = pe.Node(
        ComputeDVARS(save_plot=False, save_all=True),
        name="ComputeDVARS",
        mem_gb=mem_gb * 3,
    )

    # AFNI quality measures
    fwhm_interface = get_fwhmx()
    fwhm = pe.Node(fwhm_interface, name="smoothness")
    # fwhm.inputs.acf = True  # add when AFNI >= 16
    outliers = pe.Node(
        OutlierCount(fraction=True, out_file="outliers.out"),
        name="outliers",
        mem_gb=mem_gb * 2.5,
    )

    quality = pe.Node(
        QualityIndex(automask=True),
        out_file="quality.out",
        name="quality",
        mem_gb=mem_gb * 3,
    )

    gcor = pe.Node(GCOR(), name="gcor", mem_gb=mem_gb * 2)

    measures = pe.Node(FunctionalQC(), name="measures", mem_gb=mem_gb * 3)

    # fmt: off
    workflow.connect([(inputnode, dvnode, [("hmc_epi", "in_file"),
                                           ("brainmask", "in_mask")]),
                      (inputnode, measures, [("epi_mean", "in_epi"),
                                             ("brainmask", "in_mask"),
                                             ("hmc_epi", "in_hmc"),
                                             ("hmc_fd", "in_fd"),
                                             ("fd_thres", "fd_thres"),
                                             ("in_tsnr", "in_tsnr")]),
                      (inputnode, fwhm, [("epi_mean", "in_file"),
                                         ("brainmask", "mask")]),
                      (inputnode, quality, [("hmc_epi", "in_file")]),
                      (inputnode, outliers, [("hmc_epi", "in_file"),
                                             ("brainmask", "mask")]),
                      (inputnode, gcor, [("hmc_epi", "in_file"),
                                         ("brainmask", "mask")]),
                      (dvnode, measures, [("out_all", "in_dvars")]),
                      (fwhm, measures, [(("fwhm", _tofloat), "in_fwhm")]),
                      (dvnode, outputnode, [("out_all", "out_dvars")]),
                      (outliers, outputnode, [("out_file", "outliers")])])
    # fmt: on

    # Add metadata
    meta = pe.Node(ReadSidecarJSON(),
                   name="metadata",
                   run_without_submitting=True)
    addprov = pe.Node(
        AddProvenance(modality="bold"),
        name="provenance",
        run_without_submitting=True,
    )

    # Save to JSON file
    datasink = pe.Node(
        IQMFileSink(
            modality="bold",
            out_dir=str(config.execution.output_dir),
            dataset=config.execution.dsname,
        ),
        name="datasink",
        run_without_submitting=True,
    )

    # fmt: off
    workflow.connect([
        (inputnode, datasink, [("in_file", "in_file"),
                               ("exclude_index", "dummy_trs")]),
        (inputnode, meta, [("in_file", "in_file")]),
        (inputnode, addprov, [("in_file", "in_file")]),
        (meta, datasink, [("subject", "subject_id"), ("session", "session_id"),
                          ("task", "task_id"), ("acquisition", "acq_id"),
                          ("reconstruction", "rec_id"), ("run", "run_id"),
                          ("out_dict", "metadata")]),
        (addprov, datasink, [("out_prov", "provenance")]),
        (outliers, datasink, [(("out_file", _parse_tout), "aor")]),
        (gcor, datasink, [(("out", _tofloat), "gcor")]),
        (quality, datasink, [(("out_file", _parse_tqual), "aqi")]),
        (measures, datasink, [("out_qc", "root")]),
        (datasink, outputnode, [("out_file", "out_file")])
    ])
    # fmt: on

    # FFT spikes finder
    if config.workflow.fft_spikes_detector:
        from .utils import slice_wise_fft

        spikes_fft = pe.Node(
            niu.Function(
                input_names=["in_file"],
                output_names=["n_spikes", "out_spikes", "out_fft"],
                function=slice_wise_fft,
            ),
            name="SpikesFinderFFT",
        )

        # fmt: off
        workflow.connect([
            (inputnode, spikes_fft, [("in_ras", "in_file")]),
            (spikes_fft, outputnode, [("out_spikes", "out_spikes"),
                                      ("out_fft", "out_fft")]),
            (spikes_fft, datasink, [("n_spikes", "spikes_num")])
        ])
        # fmt: on

    return workflow