Esempio n. 1
0
def test_TOPUP_inputs():
    input_map = dict(
        args=dict(argstr="%s"),
        config=dict(argstr="--config=%s", usedefault=True),
        encoding_direction=dict(),
        encoding_file=dict(argstr="--datain=%s"),
        environ=dict(nohash=True, usedefault=True),
        estmov=dict(argstr="--estmov=%d"),
        fwhm=dict(argstr="--fwhm=%f"),
        ignore_exception=dict(nohash=True, usedefault=True),
        in_file=dict(argstr="--imain=%s", mandatory=True),
        interp=dict(argstr="--interp=%s"),
        max_iter=dict(argstr="--miter=%d"),
        minmet=dict(argstr="--minmet=%d"),
        numprec=dict(argstr="--numprec=%s"),
        out_base=dict(argstr="--out=%s"),
        out_corrected=dict(argstr="--iout=%s"),
        out_field=dict(argstr="--fout=%s"),
        out_logfile=dict(argstr="--logout=%s"),
        output_type=dict(),
        readout_times=dict(),
        regrid=dict(argstr="--regrid=%d"),
        scale=dict(argstr="--scale=%d"),
        splineorder=dict(argstr="--splineorder=%d"),
        subsamp=dict(argstr="--subsamp=%d"),
        terminal_output=dict(mandatory=True, nohash=True),
        warp_res=dict(argstr="--warpres=%f"),
    )
    inputs = TOPUP.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Esempio n. 2
0
def test_TOPUP_outputs():
    output_map = dict(out_corrected=dict(),
    out_enc_file=dict(),
    out_field=dict(),
    out_fieldcoef=dict(),
    out_logfile=dict(),
    out_movpar=dict(),
    )
    outputs = TOPUP.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Esempio n. 3
0
def test_TOPUP_outputs():
    output_map = dict(
        out_corrected=dict(),
        out_enc_file=dict(),
        out_field=dict(),
        out_fieldcoef=dict(),
        out_logfile=dict(),
        out_movpar=dict(),
    )
    outputs = TOPUP.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Esempio n. 4
0
def test_TOPUP_inputs():
    input_map = dict(
        args=dict(argstr='%s', ),
        config=dict(
            argstr='--config=%s',
            usedefault=True,
        ),
        encoding_direction=dict(),
        encoding_file=dict(argstr='--datain=%s', ),
        environ=dict(
            nohash=True,
            usedefault=True,
        ),
        estmov=dict(argstr='--estmov=%d', ),
        fwhm=dict(argstr='--fwhm=%f', ),
        ignore_exception=dict(
            nohash=True,
            usedefault=True,
        ),
        in_file=dict(
            argstr='--imain=%s',
            mandatory=True,
        ),
        interp=dict(argstr='--interp=%s', ),
        max_iter=dict(argstr='--miter=%d', ),
        minmet=dict(argstr='--minmet=%d', ),
        numprec=dict(argstr='--numprec=%s', ),
        out_base=dict(argstr='--out=%s', ),
        out_corrected=dict(argstr='--iout=%s', ),
        out_field=dict(argstr='--fout=%s', ),
        out_logfile=dict(argstr='--logout=%s', ),
        output_type=dict(),
        readout_times=dict(),
        regrid=dict(argstr='--regrid=%d', ),
        scale=dict(argstr='--scale=%d', ),
        splineorder=dict(argstr='--splineorder=%d', ),
        subsamp=dict(argstr='--subsamp=%d', ),
        terminal_output=dict(
            mandatory=True,
            nohash=True,
        ),
        warp_res=dict(argstr='--warpres=%f', ),
    )
    inputs = TOPUP.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Esempio n. 5
0
def init_pepolar_estimate_wf(debug=False, generate_report=True, name="pepolar_estimate_wf"):
    """Initialize a barebones TOPUP implementation."""
    from nipype.interfaces.afni import Automask
    from nipype.interfaces.fsl.epi import TOPUP
    from niworkflows.interfaces.nibabel import MergeSeries
    from sdcflows.interfaces.fmap import get_trt
    from ...interfaces.images import RescaleB0
    wf = Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=["metadata", "in_data"]),
                        name="inputnode")
    outputnode = pe.Node(niu.IdentityInterface(fields=["fieldmap", "corrected", "corrected_mask"]),
                         name="outputnode")

    concat_blips = pe.Node(MergeSeries(), name="concat_blips")
    readout_time = pe.MapNode(niu.Function(
        input_names=["in_meta", "in_file"], function=get_trt), name="readout_time",
        iterfield=["in_meta", "in_file"], run_without_submitting=True
    )

    topup = pe.Node(TOPUP(config=_pkg_fname(
        "dmriprep", f"data/flirtsch/b02b0{'_quick' * debug}.cnf")), name="topup")

    pre_mask = pe.Node(Automask(dilate=1, outputtype="NIFTI_GZ"),
                       name="pre_mask")
    rescale_corrected = pe.Node(RescaleB0(), name="rescale_corrected")
    post_mask = pe.Node(Automask(outputtype="NIFTI_GZ"),
                        name="post_mask")
    wf.connect([
        (inputnode, concat_blips, [("in_data", "in_files")]),
        (inputnode, readout_time, [("in_data", "in_file"),
                                   ("metadata", "in_meta")]),
        (inputnode, topup, [(("metadata", _get_pedir), "encoding_direction")]),
        (readout_time, topup, [("out", "readout_times")]),
        (concat_blips, topup, [("out_file", "in_file")]),
        (topup, pre_mask, [("out_corrected", "in_file")]),
        (pre_mask, rescale_corrected, [("out_file", "mask_file")]),
        (topup, rescale_corrected, [("out_corrected", "in_file")]),
        (topup, outputnode, [("out_field", "fieldmap")]),
        (rescale_corrected, post_mask, [("out_ref", "in_file")]),
        (rescale_corrected, outputnode, [("out_ref", "corrected")]),
        (post_mask, outputnode, [("out_file", "corrected_mask")]),
    ])

    return wf
Esempio n. 6
0
def init_topup_wf(
    grid_reference=0,
    omp_nthreads=1,
    sloppy=False,
    debug=False,
    name="pepolar_estimate_wf",
):
    """
    Create the PEPOLAR field estimation workflow based on FSL's ``topup``.

    Workflow Graph
        .. workflow ::
            :graph2use: orig
            :simple_form: yes

            from sdcflows.workflows.fit.pepolar import init_topup_wf
            wf = init_topup_wf()

    Parameters
    ----------
    grid_reference : :obj:`int`
        Index of the volume (after flattening) that will be taken for gridding reference.
    sloppy : :obj:`bool`
        Whether a fast configuration of topup (less accurate) should be applied.
    debug : :obj:`bool`
        Run in debug mode
    name : :obj:`str`
        Name for this workflow
    omp_nthreads : :obj:`int`
        Parallelize internal tasks across the number of CPUs given by this option.

    Inputs
    ------
    in_data : :obj:`list` of :obj:`str`
        A list of EPI files that will be fed into TOPUP.
    metadata : :obj:`list` of :obj:`dict`
        A list of dictionaries containing the metadata corresponding to each file
        in ``in_data``.

    Outputs
    -------
    fmap : :obj:`str`
        The path of the estimated fieldmap.
    fmap_ref : :obj:`str`
        The path of an unwarped conversion of files in ``in_data``.
    fmap_mask : :obj:`str`
        The path of mask corresponding to the ``fmap_ref`` output.
    fmap_coeff : :obj:`str` or :obj:`list` of :obj:`str`
        The path(s) of the B-Spline coefficients supporting the fieldmap.
    method: :obj:`str`
        Short description of the estimation method that was run.

    """
    from nipype.interfaces.fsl.epi import TOPUP
    from niworkflows.interfaces.nibabel import MergeSeries
    from niworkflows.interfaces.images import RobustAverage

    from ...utils.misc import front as _front
    from ...interfaces.epi import GetReadoutTime
    from ...interfaces.utils import Flatten, UniformGrid, PadSlices
    from ...interfaces.bspline import TOPUPCoeffReorient
    from ..ancillary import init_brainextraction_wf

    workflow = Workflow(name=name)
    workflow.__desc__ = f"""\
{_PEPOLAR_DESC} with `topup` (@topup; FSL {TOPUP().version}).
"""

    inputnode = pe.Node(niu.IdentityInterface(fields=INPUT_FIELDS),
                        name="inputnode")
    outputnode = pe.Node(
        niu.IdentityInterface(fields=[
            "fmap",
            "fmap_ref",
            "fmap_coeff",
            "fmap_mask",
            "jacobians",
            "xfms",
            "out_warps",
            "method",
        ]),
        name="outputnode",
    )
    outputnode.inputs.method = "PEB/PEPOLAR (phase-encoding based / PE-POLARity)"

    flatten = pe.Node(Flatten(), name="flatten")
    regrid = pe.Node(UniformGrid(reference=grid_reference), name="regrid")
    concat_blips = pe.Node(MergeSeries(), name="concat_blips")
    readout_time = pe.MapNode(
        GetReadoutTime(),
        name="readout_time",
        iterfield=["metadata", "in_file"],
        run_without_submitting=True,
    )
    pad_blip_slices = pe.Node(PadSlices(), name="pad_blip_slices")
    pad_ref_slices = pe.Node(PadSlices(), name="pad_ref_slices")

    topup = pe.Node(
        TOPUP(config=_pkg_fname(
            "sdcflows", f"data/flirtsch/b02b0{'_quick' * sloppy}.cnf")),
        name="topup",
    )
    ref_average = pe.Node(RobustAverage(), name="ref_average")

    fix_coeff = pe.Node(TOPUPCoeffReorient(),
                        name="fix_coeff",
                        run_without_submitting=True)

    brainextraction_wf = init_brainextraction_wf()

    # fmt: off
    workflow.connect([
        (inputnode, flatten, [("in_data", "in_data"),
                              ("metadata", "in_meta")]),
        (flatten, readout_time, [("out_data", "in_file"),
                                 ("out_meta", "metadata")]),
        (flatten, regrid, [("out_data", "in_data")]),
        (regrid, concat_blips, [("out_data", "in_files")]),
        (readout_time, topup, [("readout_time", "readout_times"),
                               ("pe_dir_fsl", "encoding_direction")]),
        (regrid, pad_ref_slices, [("reference", "in_file")]),
        (pad_ref_slices, fix_coeff, [("out_file", "fmap_ref")]),
        (readout_time, fix_coeff, [(("pe_direction", _front), "pe_dir")]),
        (topup, fix_coeff, [("out_fieldcoef", "in_coeff")]),
        (topup, outputnode, [("out_jacs", "jacobians"), ("out_mats", "xfms")]),
        (ref_average, brainextraction_wf, [("out_file", "inputnode.in_file")]),
        (brainextraction_wf, outputnode, [("outputnode.out_file", "fmap_ref"),
                                          ("outputnode.out_mask", "fmap_mask")
                                          ]),
        (fix_coeff, outputnode, [("out_coeff", "fmap_coeff")]),
    ])
    # fmt: on

    if not debug:
        # fmt: off
        workflow.connect([
            (concat_blips, pad_blip_slices, [("out_file", "in_file")]),
            (pad_blip_slices, topup, [("out_file", "in_file")]),
            (topup, ref_average, [("out_corrected", "in_file")]),
            (topup, outputnode, [("out_field", "fmap"),
                                 ("out_warps", "out_warps")]),
        ])
        # fmt: on
        return workflow

    from nipype.interfaces.afni.preprocess import Volreg
    from niworkflows.interfaces.nibabel import SplitSeries
    from ...interfaces.bspline import ApplyCoeffsField

    realign = pe.Node(
        Volreg(args=f"-base {grid_reference}", outputtype="NIFTI_GZ"),
        name="realign_blips",
    )
    split_blips = pe.Node(SplitSeries(), name="split_blips")
    unwarp = pe.Node(ApplyCoeffsField(), name="unwarp")
    unwarp.interface._always_run = True
    concat_corrected = pe.Node(MergeSeries(), name="concat_corrected")

    # fmt:off
    workflow.connect([
        (concat_blips, realign, [("out_file", "in_file")]),
        (realign, pad_blip_slices, [("out_file", "in_file")]),
        (pad_blip_slices, topup, [("out_file", "in_file")]),
        (fix_coeff, unwarp, [("out_coeff", "in_coeff")]),
        (realign, split_blips, [("out_file", "in_file")]),
        (split_blips, unwarp, [("out_files", "in_data")]),
        (readout_time, unwarp, [("readout_time", "ro_time"),
                                ("pe_direction", "pe_dir")]),
        (unwarp, outputnode, [("out_warp", "out_warps"),
                              ("out_field", "fmap")]),
        (unwarp, concat_corrected, [("out_corrected", "in_files")]),
        (concat_corrected, ref_average, [("out_file", "in_file")]),
    ])
    # fmt:on

    return workflow
Esempio n. 7
0
def init_topup_wf(omp_nthreads=1, debug=False, name="pepolar_estimate_wf"):
    """
    Create the PEPOLAR field estimation workflow based on FSL's ``topup``.

    Workflow Graph
        .. workflow ::
            :graph2use: orig
            :simple_form: yes

            from sdcflows.workflows.fit.pepolar import init_topup_wf
            wf = init_topup_wf()

    Parameters
    ----------
    debug : :obj:`bool`
        Whether a fast configuration of topup (less accurate) should be applied.
    name : :obj:`str`
        Name for this workflow
    omp_nthreads : :obj:`int`
        Parallelize internal tasks across the number of CPUs given by this option.

    Inputs
    ------
    in_data : :obj:`list` of :obj:`str`
        A list of EPI files that will be fed into TOPUP.
    metadata : :obj:`list` of :obj:`dict`
        A list of dictionaries containing the metadata corresponding to each file
        in ``in_data``.

    Outputs
    -------
    fmap : :obj:`str`
        The path of the estimated fieldmap.
    fmap_ref : :obj:`str`
        The path of an unwarped conversion of files in ``in_data``.
    fmap_mask : :obj:`str`
        The path of mask corresponding to the ``fmap_ref`` output.
    fmap_coeff : :obj:`str` or :obj:`list` of :obj:`str`
        The path(s) of the B-Spline coefficients supporting the fieldmap.

    """
    from nipype.interfaces.fsl.epi import TOPUP
    from niworkflows.interfaces.nibabel import MergeSeries
    from niworkflows.interfaces.images import IntraModalMerge

    from ...interfaces.epi import GetReadoutTime
    from ...interfaces.utils import Flatten
    from ...interfaces.bspline import TOPUPCoeffReorient
    from ..ancillary import init_brainextraction_wf

    workflow = Workflow(name=name)
    workflow.__postdesc__ = f"""\
{_PEPOLAR_DESC} with `topup` (@topup; FSL {TOPUP().version}).
"""

    inputnode = pe.Node(niu.IdentityInterface(fields=INPUT_FIELDS),
                        name="inputnode")
    outputnode = pe.Node(
        niu.IdentityInterface(fields=[
            "fmap",
            "fmap_ref",
            "fmap_coeff",
            "fmap_mask",
            "jacobians",
            "xfms",
            "out_warps",
        ]),
        name="outputnode",
    )

    flatten = pe.Node(Flatten(), name="flatten")
    concat_blips = pe.Node(MergeSeries(), name="concat_blips")
    readout_time = pe.MapNode(
        GetReadoutTime(),
        name="readout_time",
        iterfield=["metadata", "in_file"],
        run_without_submitting=True,
    )

    topup = pe.Node(
        TOPUP(config=_pkg_fname("sdcflows",
                                f"data/flirtsch/b02b0{'_quick' * debug}.cnf")),
        name="topup",
    )
    merge_corrected = pe.Node(IntraModalMerge(hmc=False, to_ras=False),
                              name="merge_corrected")

    fix_coeff = pe.Node(TOPUPCoeffReorient(),
                        name="fix_coeff",
                        run_without_submitting=True)

    brainextraction_wf = init_brainextraction_wf()

    # fmt: off
    workflow.connect([
        (inputnode, flatten, [("in_data", "in_data"),
                              ("metadata", "in_meta")]),
        (flatten, readout_time, [("out_data", "in_file"),
                                 ("out_meta", "metadata")]),
        (flatten, concat_blips, [("out_data", "in_files")]),
        (flatten, topup, [(("out_meta", _pe2fsl), "encoding_direction")]),
        (readout_time, topup, [("readout_time", "readout_times")]),
        (concat_blips, topup, [("out_file", "in_file")]),
        (topup, merge_corrected, [("out_corrected", "in_files")]),
        (topup, fix_coeff, [("out_fieldcoef", "in_coeff"),
                            ("out_corrected", "fmap_ref")]),
        (topup, outputnode, [("out_field", "fmap"), ("out_jacs", "jacobians"),
                             ("out_mats", "xfms"),
                             ("out_warps", "out_warps")]),
        (merge_corrected, brainextraction_wf, [("out_avg", "inputnode.in_file")
                                               ]),
        (merge_corrected, outputnode, [("out_avg", "fmap_ref")]),
        (brainextraction_wf, outputnode, [("outputnode.out_mask", "fmap_mask")
                                          ]),
        (fix_coeff, outputnode, [("out_coeff", "fmap_coeff")]),
    ])
    # fmt: on

    return workflow
Esempio n. 8
0
def test_TOPUP_inputs():
    input_map = dict(args=dict(argstr='%s',
    ),
    config=dict(argstr='--config=%s',
    usedefault=True,
    ),
    encoding_direction=dict(argstr='--datain=%s',
    mandatory=True,
    requires=['readout_times'],
    xor=['encoding_file'],
    ),
    encoding_file=dict(argstr='--datain=%s',
    mandatory=True,
    xor=['encoding_direction'],
    ),
    environ=dict(nohash=True,
    usedefault=True,
    ),
    estmov=dict(argstr='--estmov=%d',
    ),
    fwhm=dict(argstr='--fwhm=%f',
    ),
    ignore_exception=dict(nohash=True,
    usedefault=True,
    ),
    in_file=dict(argstr='--imain=%s',
    mandatory=True,
    ),
    interp=dict(argstr='--interp=%s',
    ),
    max_iter=dict(argstr='--miter=%d',
    ),
    minmet=dict(argstr='--minmet=%d',
    ),
    numprec=dict(argstr='--numprec=%s',
    ),
    out_base=dict(argstr='--out=%s',
    hash_files=False,
    name_source=['in_file'],
    name_template='%s_base',
    ),
    out_corrected=dict(argstr='--iout=%s',
    hash_files=False,
    name_source=['in_file'],
    name_template='%s_corrected',
    ),
    out_field=dict(argstr='--fout=%s',
    hash_files=False,
    name_source=['in_file'],
    name_template='%s_field',
    ),
    out_logfile=dict(argstr='--logout=%s',
    hash_files=False,
    keep_extension=True,
    name_source=['in_file'],
    name_template='%s_topup.log',
    ),
    output_type=dict(),
    readout_times=dict(mandatory=True,
    requires=['encoding_direction'],
    xor=['encoding_file'],
    ),
    reg_lambda=dict(argstr='--miter=%0.f',
    ),
    regmod=dict(argstr='--regmod=%s',
    ),
    regrid=dict(argstr='--regrid=%d',
    ),
    scale=dict(argstr='--scale=%d',
    ),
    splineorder=dict(argstr='--splineorder=%d',
    ),
    ssqlambda=dict(argstr='--ssqlambda=%d',
    ),
    subsamp=dict(argstr='--subsamp=%d',
    ),
    terminal_output=dict(mandatory=True,
    nohash=True,
    ),
    warp_res=dict(argstr='--warpres=%f',
    ),
    )
    inputs = TOPUP.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Esempio n. 9
0
def test_TOPUP_inputs():
    input_map = dict(
        args=dict(argstr='%s', ),
        config=dict(
            argstr='--config=%s',
            usedefault=True,
        ),
        encoding_direction=dict(
            argstr='--datain=%s',
            mandatory=True,
            requires=['readout_times'],
            xor=['encoding_file'],
        ),
        encoding_file=dict(
            argstr='--datain=%s',
            mandatory=True,
            xor=['encoding_direction'],
        ),
        environ=dict(
            nohash=True,
            usedefault=True,
        ),
        estmov=dict(argstr='--estmov=%d', ),
        fwhm=dict(argstr='--fwhm=%f', ),
        ignore_exception=dict(
            nohash=True,
            usedefault=True,
        ),
        in_file=dict(
            argstr='--imain=%s',
            mandatory=True,
        ),
        interp=dict(argstr='--interp=%s', ),
        max_iter=dict(argstr='--miter=%d', ),
        minmet=dict(argstr='--minmet=%d', ),
        numprec=dict(argstr='--numprec=%s', ),
        out_base=dict(
            argstr='--out=%s',
            hash_files=False,
            name_source=['in_file'],
            name_template='%s_base',
        ),
        out_corrected=dict(
            argstr='--iout=%s',
            hash_files=False,
            name_source=['in_file'],
            name_template='%s_corrected',
        ),
        out_field=dict(
            argstr='--fout=%s',
            hash_files=False,
            name_source=['in_file'],
            name_template='%s_field',
        ),
        out_logfile=dict(
            argstr='--logout=%s',
            hash_files=False,
            keep_extension=True,
            name_source=['in_file'],
            name_template='%s_topup.log',
        ),
        output_type=dict(),
        readout_times=dict(
            mandatory=True,
            requires=['encoding_direction'],
            xor=['encoding_file'],
        ),
        reg_lambda=dict(argstr='--miter=%0.f', ),
        regmod=dict(argstr='--regmod=%s', ),
        regrid=dict(argstr='--regrid=%d', ),
        scale=dict(argstr='--scale=%d', ),
        splineorder=dict(argstr='--splineorder=%d', ),
        ssqlambda=dict(argstr='--ssqlambda=%d', ),
        subsamp=dict(argstr='--subsamp=%d', ),
        terminal_output=dict(
            mandatory=True,
            nohash=True,
        ),
        warp_res=dict(argstr='--warpres=%f', ),
    )
    inputs = TOPUP.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value