Beispiel #1
0
def create_bedpostx_pipeline(
    name='bedpostx',
    params={
        'n_fibres': 2,
        'fudge': 1,
        'burn_in': 1000,
        'n_jumps': 1250,
        'sample_every': 25,
        'model': 1,
        'cnlinear': True
    }):
    """
    Creates a pipeline that does the same as bedpostx script from FSL -
    calculates diffusion model parameters (distributions not MLE) voxelwise for
    the whole volume (by splitting it slicewise).

    Example
    -------

    >>> from nipype.workflows.dmri.fsl.dti import create_bedpostx_pipeline
    >>> params = dict(n_fibres = 2, fudge = 1, burn_in = 1000,
    ...               n_jumps = 1250, sample_every = 25)
    >>> bpwf = create_bedpostx_pipeline('nipype_bedpostx', params)
    >>> bpwf.inputs.inputnode.dwi = 'diffusion.nii'
    >>> bpwf.inputs.inputnode.mask = 'mask.nii'
    >>> bpwf.inputs.inputnode.bvecs = 'bvecs'
    >>> bpwf.inputs.inputnode.bvals = 'bvals'
    >>> bpwf.run() # doctest: +SKIP

    Inputs::

        inputnode.dwi
        inputnode.mask
        inputnode.bvecs
        inputnode.bvals

    Outputs::

        outputnode wraps all XFibres outputs

    """

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['dwi', 'mask', 'bvecs', 'bvals']),
        name='inputnode')

    slice_dwi = pe.Node(fsl.Split(dimension='z'), name='slice_dwi')
    slice_msk = pe.Node(fsl.Split(dimension='z'), name='slice_msk')
    mask_dwi = pe.MapNode(fsl.ImageMaths(op_string='-mas'),
                          iterfield=['in_file', 'in_file2'],
                          name='mask_dwi')

    xfib_if = fsl.XFibres(**params)
    xfibres = pe.MapNode(xfib_if, name='xfibres', iterfield=['dwi', 'mask'])

    make_dyads = pe.MapNode(fsl.MakeDyadicVectors(),
                            name="make_dyads",
                            iterfield=['theta_vol', 'phi_vol'])
    out_fields = [
        'dyads', 'dyads_disp', 'thsamples', 'phsamples', 'fsamples',
        'mean_thsamples', 'mean_phsamples', 'mean_fsamples'
    ]

    outputnode = pe.Node(niu.IdentityInterface(fields=out_fields),
                         name='outputnode')

    wf = pe.Workflow(name=name)
    wf.connect([(inputnode, slice_dwi, [('dwi', 'in_file')]),
                (inputnode, slice_msk, [('mask', 'in_file')]),
                (slice_dwi, mask_dwi, [('out_files', 'in_file')]),
                (slice_msk, mask_dwi, [('out_files', 'in_file2')]),
                (slice_dwi, xfibres, [('out_files', 'dwi')]),
                (mask_dwi, xfibres, [('out_file', 'mask')]),
                (inputnode, xfibres, [('bvecs', 'bvecs'), ('bvals', 'bvals')]),
                (inputnode, make_dyads, [('mask', 'mask')])])

    mms = {}
    for k in ['thsamples', 'phsamples', 'fsamples']:
        mms[k] = merge_and_mean(k)
        wf.connect([(xfibres, mms[k], [(k, 'inputnode.in_files')]),
                    (mms[k], outputnode, [('outputnode.merged', k),
                                          ('outputnode.mean', 'mean_%s' % k)])
                    ])

    # m_mdsamples = pe.Node(fsl.Merge(dimension="z"),
    #                       name="merge_mean_dsamples")
    wf.connect([
        (mms['thsamples'], make_dyads, [('outputnode.merged', 'theta_vol')]),
        (mms['phsamples'], make_dyads, [('outputnode.merged', 'phi_vol')]),
        #(xfibres, m_mdsamples,  [('mean_dsamples', 'in_files')]),
        (make_dyads, outputnode, [('dyads', 'dyads'),
                                  ('dispersion', 'dyads_disp')])
    ])
    return wf
Beispiel #2
0
def create_bedpostx_pipeline(name="bedpostx"):
    """Creates a pipeline that does the same as bedpostx script from FSL -
    calculates diffusion model parameters (distributions not MLE) voxelwise for
    the whole volume (by splitting it slicewise).

    Example
    -------

    >>> nipype_bedpostx = create_bedpostx_pipeline("nipype_bedpostx")
    >>> nipype_bedpostx.inputs.inputnode.dwi = 'diffusion.nii'
    >>> nipype_bedpostx.inputs.inputnode.mask = 'mask.nii'
    >>> nipype_bedpostx.inputs.inputnode.bvecs = 'bvecs'
    >>> nipype_bedpostx.inputs.inputnode.bvals = 'bvals'
    >>> nipype_bedpostx.inputs.xfibres.n_fibres = 2
    >>> nipype_bedpostx.inputs.xfibres.fudge = 1
    >>> nipype_bedpostx.inputs.xfibres.burn_in = 1000
    >>> nipype_bedpostx.inputs.xfibres.n_jumps = 1250
    >>> nipype_bedpostx.inputs.xfibres.sample_every = 25
    >>> nipype_bedpostx.run() # doctest: +SKIP

    Inputs::

        inputnode.dwi
        inputnode.mask

    Outputs::

        outputnode.thsamples
        outputnode.phsamples
        outputnode.fsamples
        outputnode.mean_thsamples
        outputnode.mean_phsamples
        outputnode.mean_fsamples
        outputnode.dyads
        outputnode.dyads_dispersion

    """

    inputnode = pe.Node(
        interface=util.IdentityInterface(fields=["dwi", "mask"]),
        name="inputnode")

    mask_dwi = pe.Node(interface=fsl.ImageMaths(op_string="-mas"),
                       name="mask_dwi")
    slice_dwi = pe.Node(interface=fsl.Split(dimension="z"), name="slice_dwi")
    slice_mask = pe.Node(interface=fsl.Split(dimension="z"),
                         name="slice_mask")

    preproc = pe.Workflow(name="preproc")

    preproc.connect([(inputnode, mask_dwi, [('dwi', 'in_file')]),
                     (inputnode, mask_dwi, [('mask', 'in_file2')]),
                     (mask_dwi, slice_dwi, [('out_file', 'in_file')]),
                     (inputnode, slice_mask, [('mask', 'in_file')])
                     ])

    xfibres = pe.MapNode(interface=fsl.XFibres(), name="xfibres",
                         iterfield=['dwi', 'mask'])

    # Normal set of parameters
    xfibres.inputs.n_fibres = 2
    xfibres.inputs.fudge = 1
    xfibres.inputs.burn_in = 1000
    xfibres.inputs.n_jumps = 1250
    xfibres.inputs.sample_every = 25
    xfibres.inputs.model = 1
    xfibres.inputs.non_linear = True
    xfibres.inputs.update_proposal_every = 24

    inputnode = pe.Node(interface=util.IdentityInterface(fields=["thsamples",
                                                                 "phsamples",
                                                                 "fsamples",
                                                                 "dyads",
                                                                 "mean_dsamples",
                                                                 "mask"]),
                        name="inputnode")

    merge_thsamples = pe.MapNode(fsl.Merge(dimension="z"),
                                 name="merge_thsamples", iterfield=['in_files'])
    merge_phsamples = pe.MapNode(fsl.Merge(dimension="z"),
                                 name="merge_phsamples", iterfield=['in_files'])
    merge_fsamples = pe.MapNode(fsl.Merge(dimension="z"),
                                name="merge_fsamples", iterfield=['in_files'])

    merge_mean_dsamples = pe.Node(fsl.Merge(dimension="z"),
                                  name="merge_mean_dsamples")

    mean_thsamples = pe.MapNode(fsl.ImageMaths(op_string="-Tmean"),
                                name="mean_thsamples", iterfield=['in_file'])
    mean_phsamples = pe.MapNode(fsl.ImageMaths(op_string="-Tmean"),
                                name="mean_phsamples", iterfield=['in_file'])
    mean_fsamples = pe.MapNode(fsl.ImageMaths(op_string="-Tmean"),
                               name="mean_fsamples", iterfield=['in_file'])
    make_dyads = pe.MapNode(fsl.MakeDyadicVectors(), name="make_dyads",
                            iterfield=['theta_vol', 'phi_vol'])

    postproc = pe.Workflow(name="postproc")

    postproc.connect(
        [(inputnode, merge_thsamples, [(('thsamples', transpose), 'in_files')]),
         (inputnode, merge_phsamples, [((
                                        'phsamples', transpose), 'in_files')]),
         (inputnode, merge_fsamples, [((
                                       'fsamples', transpose), 'in_files')]),
         (inputnode, merge_mean_dsamples, [
          ('mean_dsamples', 'in_files')]),

         (merge_thsamples, mean_thsamples, [
          ('merged_file', 'in_file')]),
         (merge_phsamples, mean_phsamples, [
          ('merged_file', 'in_file')]),
         (merge_fsamples, mean_fsamples, [
          ('merged_file', 'in_file')]),
         (merge_thsamples, make_dyads, [
          ('merged_file', 'theta_vol')]),
         (merge_phsamples, make_dyads, [
          ('merged_file', 'phi_vol')]),
         (inputnode, make_dyads, [('mask', 'mask')]),
         ])

    inputnode = pe.Node(interface=util.IdentityInterface(fields=["dwi",
                                                                 "mask",
                                                                 "bvecs",
                                                                 "bvals"]),
                        name="inputnode")

    bedpostx = pe.Workflow(name=name)
    bedpostx.connect([(inputnode, preproc, [('mask', 'inputnode.mask')]),
                      (inputnode, preproc, [('dwi', 'inputnode.dwi')]),

                      (preproc, xfibres, [('slice_dwi.out_files', 'dwi'),
                                          ('slice_mask.out_files', 'mask')]),
                      (inputnode, xfibres, [('bvals', 'bvals')]),
                      (inputnode, xfibres, [('bvecs', 'bvecs')]),

                      (inputnode, postproc, [('mask', 'inputnode.mask')]),
                      (xfibres, postproc, [
                          ('thsamples', 'inputnode.thsamples'),
                       ('phsamples',
                        'inputnode.phsamples'),
                    ('fsamples', 'inputnode.fsamples'),
                          ('dyads', 'inputnode.dyads'),
                          ('mean_dsamples', 'inputnode.mean_dsamples')]),
    ])

    outputnode = pe.Node(
        interface=util.IdentityInterface(fields=["thsamples",
                                                 "phsamples",
                                                 "fsamples",
                                                 "mean_thsamples",
                                                 "mean_phsamples",
                                                 "mean_fsamples",
                                                 "dyads",
                                                 "dyads_dispersion"]),
        name="outputnode")
    bedpostx.connect(
        [(postproc, outputnode, [("merge_thsamples.merged_file", "thsamples"),
                    ("merge_phsamples.merged_file",
                     "phsamples"),
          ("merge_fsamples.merged_file",
           "fsamples"),
          ("mean_thsamples.out_file",
           "mean_thsamples"),
          ("mean_phsamples.out_file",
           "mean_phsamples"),
          ("mean_fsamples.out_file",
                                               "mean_fsamples"),
                                              ("make_dyads.dyads", "dyads"),
                                              ("make_dyads.dispersion", "dyads_dispersion")])
                      ])
    return bedpostx
Beispiel #3
0
def bedpostx_parallel(
    name='bedpostx_parallel',
    compute_all_outputs=True,
    params={
        'n_fibres': 2,
        'fudge': 1,
        'burn_in': 1000,
        'n_jumps': 1250,
        'sample_every': 25,
        'model': 1,
        'cnlinear': True
    }):
    """
    Does the same as :func:`.create_bedpostx_pipeline` by splitting
    the input dMRI in small ROIs that are better suited for parallel
    processing).

    Example
    -------

    >>> from nipype.workflows.dmri.fsl.dti import bedpostx_parallel
    >>> params = dict(n_fibres = 2, fudge = 1, burn_in = 1000,
    ...               n_jumps = 1250, sample_every = 25)
    >>> bpwf = bedpostx_parallel('nipype_bedpostx_parallel', params)
    >>> bpwf.inputs.inputnode.dwi = 'diffusion.nii'
    >>> bpwf.inputs.inputnode.mask = 'mask.nii'
    >>> bpwf.inputs.inputnode.bvecs = 'bvecs'
    >>> bpwf.inputs.inputnode.bvals = 'bvals'
    >>> bpwf.run(plugin='CondorDAGMan') # doctest: +SKIP

    Inputs::

        inputnode.dwi
        inputnode.mask
        inputnode.bvecs
        inputnode.bvals

    Outputs::

        outputnode wraps all XFibres outputs

    """

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['dwi', 'mask', 'bvecs', 'bvals']),
        name='inputnode')
    slice_dwi = pe.Node(misc.SplitROIs(roi_size=(5, 5, 1)), name='slice_dwi')
    if params is not None:
        xfib_if = fsl.XFibres5(**params)
    else:
        xfib_if = fsl.XFibres5()
    xfibres = pe.MapNode(xfib_if, name='xfibres', iterfield=['dwi', 'mask'])

    mrg_dyads = pe.MapNode(misc.MergeROIs(),
                           name='Merge_dyads',
                           iterfield=['in_files'])
    mrg_fsamp = pe.MapNode(misc.MergeROIs(),
                           name='Merge_mean_fsamples',
                           iterfield=['in_files'])
    out_fields = ['dyads', 'fsamples']

    if compute_all_outputs:
        out_fields += [
            'dyads_disp', 'thsamples', 'phsamples', 'mean_fsamples',
            'mean_thsamples', 'mean_phsamples', 'merged_fsamples',
            'merged_thsamples', 'merged_phsamples'
        ]

    outputnode = pe.Node(niu.IdentityInterface(fields=out_fields),
                         name='outputnode')

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode, slice_dwi, [('dwi', 'in_file'), ('mask', 'in_mask')]),
        (slice_dwi, xfibres, [('out_files', 'dwi'), ('out_masks', 'mask')]),
        (inputnode, xfibres, [('bvecs', 'bvecs'), ('bvals', 'bvals')]),
        (inputnode, mrg_dyads, [('mask', 'in_reference')]),
        (xfibres, mrg_dyads, [(('dyads', transpose), 'in_files')]),
        (slice_dwi, mrg_dyads, [('out_index', 'in_index')]),
        (inputnode, mrg_fsamp, [('mask', 'in_reference')]),
        (xfibres, mrg_fsamp, [(('mean_fsamples', transpose), 'in_files')]),
        (slice_dwi, mrg_fsamp, [('out_index', 'in_index')]),
        (mrg_dyads, outputnode, [('merged_file', 'dyads')]),
        (mrg_fsamp, outputnode, [('merged_file', 'fsamples')])
    ])

    if compute_all_outputs:
        make_dyads = pe.MapNode(fsl.MakeDyadicVectors(),
                                name="Make_dyads",
                                iterfield=['theta_vol', 'phi_vol'])

        wf.connect([(inputnode, make_dyads, [('mask', 'mask')])])
        mms = {}
        for k in ['thsamples', 'phsamples', 'fsamples']:
            mms[k] = merge_and_mean_parallel(k)
            wf.connect([
                (slice_dwi, mms[k], [('out_index', 'inputnode.in_index')]),
                (inputnode, mms[k], [('mask', 'inputnode.in_reference')]),
                (xfibres, mms[k], [(k, 'inputnode.in_files')]),
                (mms[k], outputnode, [('outputnode.merged', 'merged_%s' % k),
                                      ('outputnode.mean', 'mean_%s' % k)])
            ])

        # m_mdsamples = pe.Node(fsl.Merge(dimension="z"),
        #                       name="merge_mean_dsamples")
        wf.connect([
            (mms['thsamples'], make_dyads, [('outputnode.merged', 'theta_vol')
                                            ]),
            (mms['phsamples'], make_dyads, [('outputnode.merged', 'phi_vol')]),
            #(xfibres, m_mdsamples,  [('mean_dsamples', 'in_files')]),
            (make_dyads, outputnode, [('dispersion', 'dyads_disp')])
        ])

    return wf