def create_dwi_pipeline(name="proc_dwi"):
    
    inputnode = pe.Node(interface=util.IdentityInterface(fields=['dwi', "bvecs", "bvals"]), name="inputnode")
    
    preprocess = create_dwi_preprocess_pipeline()
    
    estimate_bedpost = create_bedpostx_pipeline()
    
    dtifit = pe.Node(interface=fsl.DTIFit(),name='dtifit')
    
    pipeline = pe.Workflow(name=name)
    
    pipeline.connect([(inputnode, preprocess, [("dwi", "inputnode.dwi")]),
                      (preprocess, dtifit, [('eddycorrect.outputnode.eddy_corrected','dwi'),
                                            ("bet.mask_file", "mask")]),
                      (inputnode, dtifit, [("bvals","bvals"),
                                           ("bvecs", "bvecs")]),
                      (preprocess, estimate_bedpost, [('eddycorrect.outputnode.eddy_corrected','inputnode.dwi'),
                                                      ("bet.mask_file", "inputnode.mask")]),
                      (inputnode, estimate_bedpost, [("bvals","inputnode.bvals"),
                                                     ("bvecs", "inputnode.bvecs")]),
                                            ])
    return pipeline
    
Esempio n. 2
0
"""
Setup for Tracktography
-----------------------

Here we will create a workflow to enable probabilistic tracktography
and hard segmentation of the seed region
"""

tractography = pe.Workflow(name='tractography')
tractography.base_dir = os.path.abspath('fsl_dti_tutorial')

"""
estimate the diffusion parameters: phi, theta, and so on
"""

bedpostx = fsl_wf.create_bedpostx_pipeline()
bedpostx.get_node("xfibres").iterables = ("n_fibres",[1,2])


flirt = pe.Node(interface=fsl.FLIRT(), name='flirt')
flirt.inputs.in_file = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
flirt.inputs.dof = 12

"""
perform probabilistic tracktography
"""

probtrackx = pe.Node(interface=fsl.ProbTrackX(),name='probtrackx')
probtrackx.inputs.mode='seedmask'
probtrackx.inputs.c_thresh = 0.2
probtrackx.inputs.n_steps=2000
Esempio n. 3
0
def test_create_bedpostx_pipeline():
    fsl_course_dir = os.environ["FSL_COURSE_DATA"]

    mask_file = os.path.join(fsl_course_dir, "fsl_course_data/fdt/subj1.bedpostX/nodif_brain_mask.nii.gz")
    bvecs_file = os.path.join(fsl_course_dir, "fsl_course_data/fdt/subj1/bvecs")
    bvals_file = os.path.join(fsl_course_dir, "fsl_course_data/fdt/subj1/bvals")
    dwi_file = os.path.join(fsl_course_dir, "fsl_course_data/fdt/subj1/data.nii.gz")

    nipype_bedpostx = fsl_wf.create_bedpostx_pipeline("nipype_bedpostx")
    nipype_bedpostx.inputs.inputnode.dwi = dwi_file
    nipype_bedpostx.inputs.inputnode.mask = mask_file
    nipype_bedpostx.inputs.inputnode.bvecs = bvecs_file
    nipype_bedpostx.inputs.inputnode.bvals = bvals_file
    nipype_bedpostx.inputs.xfibres.n_fibres = 2
    nipype_bedpostx.inputs.xfibres.fudge = 1
    nipype_bedpostx.inputs.xfibres.burn_in = 1000
    nipype_bedpostx.inputs.xfibres.n_jumps = 1250
    nipype_bedpostx.inputs.xfibres.sample_every = 25

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        original_bedpostx = pe.Node(interface = fsl.BEDPOSTX(), name="original_bedpostx")
    original_bedpostx.inputs.dwi = dwi_file
    original_bedpostx.inputs.mask = mask_file
    original_bedpostx.inputs.bvecs = bvecs_file
    original_bedpostx.inputs.bvals = bvals_file
    original_bedpostx.inputs.environ['FSLPARALLEL']=""
    original_bedpostx.inputs.fibres = 2
    original_bedpostx.inputs.weight = 1
    original_bedpostx.inputs.burn_period = 1000
    original_bedpostx.inputs.jumps = 1250
    original_bedpostx.inputs.sampling = 25

    test_f1 = pe.Node(util.AssertEqual(), name="mean_f1_test")
    test_f2 = pe.Node(util.AssertEqual(), name="mean_f2_test")
    test_th1 = pe.Node(util.AssertEqual(), name="mean_th1_test")
    test_th2 = pe.Node(util.AssertEqual(), name="mean_th2_test")
    test_ph1 = pe.Node(util.AssertEqual(), name="mean_ph1_test")
    test_ph2 = pe.Node(util.AssertEqual(), name="mean_ph2_test")

    pipeline = pe.Workflow(name="test_bedpostx")
    pipeline.base_dir = test_dir

    def pickFirst(l):
        return l[0]

    def pickSecond(l):
        return l[1]

    pipeline.connect([(nipype_bedpostx, test_f1, [(("outputnode.mean_fsamples", pickFirst), "inputnode.volume1")]),
                      (nipype_bedpostx, test_f2, [(("outputnode.mean_fsamples", pickSecond), "inputnode.volume1")]),
                      (nipype_bedpostx, test_th1, [(("outputnode.mean_thsamples", pickFirst), "inputnode.volume1")]),
                      (nipype_bedpostx, test_th2, [(("outputnode.mean_thsamples", pickSecond), "inputnode.volume1")]),
                      (nipype_bedpostx, test_ph1, [(("outputnode.mean_phsamples", pickFirst), "inputnode.volume1")]),
                      (nipype_bedpostx, test_ph2, [(("outputnode.mean_phsamples", pickSecond), "inputnode.volume1")]),

                      (original_bedpostx, test_f1, [(("mean_fsamples", pickFirst), "inputnode.volume2")]),
                      (original_bedpostx, test_f2, [(("mean_fsamples", pickSecond), "inputnode.volume2")]),
                      (original_bedpostx, test_th1, [(("mean_thsamples", pickFirst), "inputnode.volume2")]),
                      (original_bedpostx, test_th2, [(("mean_thsamples", pickSecond), "inputnode.volume2")]),
                      (original_bedpostx, test_ph1, [(("mean_phsamples", pickFirst), "inputnode.volume2")]),
                      (original_bedpostx, test_ph2, [(("mean_phsamples", pickSecond), "inputnode.volume2")])
                      ])

    pipeline.run(inseries=True)