Exemplo n.º 1
0
def create_dwi_pipeline(name="proc_dwi"):
    
    inputnode = pe.Node(interface=util.IdentityInterface(fields=['dwi', "bvecs", "bvals"]), name="inputnode")
    
    preprocess = create_dwi_preprocess_pipeline()
    
    estimate_bedpost = create_bedpostx_pipeline()
    
    dtifit = pe.Node(interface=fsl.DTIFit(),name='dtifit')
    
    pipeline = pe.Workflow(name=name)
    
    pipeline.connect([(inputnode, preprocess, [("dwi", "inputnode.dwi")]),
                      (preprocess, dtifit, [('eddycorrect.outputnode.eddy_corrected','dwi'),
                                            ("bet.mask_file", "mask")]),
                      (inputnode, dtifit, [("bvals","bvals"),
                                           ("bvecs", "bvecs")]),
                      (preprocess, estimate_bedpost, [('eddycorrect.outputnode.eddy_corrected','inputnode.dwi'),
                                                      ("bet.mask_file", "inputnode.mask")]),
                      (inputnode, estimate_bedpost, [("bvals","inputnode.bvals"),
                                                     ("bvecs", "inputnode.bvecs")]),
                                            ])
    return pipeline
    
Exemplo n.º 2
0
def test_create_bedpostx_pipeline():
    fsl_course_dir = os.path.abspath('fsl_course_data')

    mask_file = os.path.join(fsl_course_dir, "fdt/subj1.bedpostX/nodif_brain_mask.nii.gz")
    bvecs_file = os.path.join(fsl_course_dir, "fdt/subj1/bvecs")
    bvals_file = os.path.join(fsl_course_dir, "fdt/subj1/bvals")
    dwi_file = os.path.join(fsl_course_dir, "fdt/subj1/data.nii.gz")

    nipype_bedpostx = create_bedpostx_pipeline("nipype_bedpostx")
    nipype_bedpostx.inputs.inputnode.dwi = dwi_file
    nipype_bedpostx.inputs.inputnode.mask = mask_file
    nipype_bedpostx.inputs.inputnode.bvecs = bvecs_file
    nipype_bedpostx.inputs.inputnode.bvals = bvals_file
    nipype_bedpostx.inputs.xfibres.n_fibres = 2
    nipype_bedpostx.inputs.xfibres.fudge = 1
    nipype_bedpostx.inputs.xfibres.burn_in = 1000
    nipype_bedpostx.inputs.xfibres.n_jumps = 1250
    nipype_bedpostx.inputs.xfibres.sample_every = 25

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        original_bedpostx = pe.Node(interface=fsl.BEDPOSTX(), name="original_bedpostx")
    original_bedpostx.inputs.dwi = dwi_file
    original_bedpostx.inputs.mask = mask_file
    original_bedpostx.inputs.bvecs = bvecs_file
    original_bedpostx.inputs.bvals = bvals_file
    original_bedpostx.inputs.environ['FSLPARALLEL'] = ""
    original_bedpostx.inputs.fibres = 2
    original_bedpostx.inputs.weight = 1
    original_bedpostx.inputs.burn_period = 1000
    original_bedpostx.inputs.jumps = 1250
    original_bedpostx.inputs.sampling = 25

    test_f1 = pe.Node(util.AssertEqual(), name="mean_f1_test")
    test_f2 = pe.Node(util.AssertEqual(), name="mean_f2_test")
    test_th1 = pe.Node(util.AssertEqual(), name="mean_th1_test")
    test_th2 = pe.Node(util.AssertEqual(), name="mean_th2_test")
    test_ph1 = pe.Node(util.AssertEqual(), name="mean_ph1_test")
    test_ph2 = pe.Node(util.AssertEqual(), name="mean_ph2_test")

    pipeline = pe.Workflow(name="test_bedpostx")
    pipeline.base_dir = tempfile.mkdtemp(prefix="nipype_test_bedpostx_")

    def pickFirst(l):
        return l[0]

    def pickSecond(l):
        return l[1]

    pipeline.connect([(nipype_bedpostx, test_f1, [(("outputnode.mean_fsamples", pickFirst), "volume1")]),
                      (nipype_bedpostx, test_f2, [(("outputnode.mean_fsamples", pickSecond), "volume1")]),
                      (nipype_bedpostx, test_th1, [(("outputnode.mean_thsamples", pickFirst), "volume1")]),
                      (nipype_bedpostx, test_th2, [(("outputnode.mean_thsamples", pickSecond), "volume1")]),
                      (nipype_bedpostx, test_ph1, [(("outputnode.mean_phsamples", pickFirst), "volume1")]),
                      (nipype_bedpostx, test_ph2, [(("outputnode.mean_phsamples", pickSecond), "volume1")]),

                      (original_bedpostx, test_f1, [(("mean_fsamples", pickFirst), "volume2")]),
                      (original_bedpostx, test_f2, [(("mean_fsamples", pickSecond), "volume2")]),
                      (original_bedpostx, test_th1, [(("mean_thsamples", pickFirst), "volume2")]),
                      (original_bedpostx, test_th2, [(("mean_thsamples", pickSecond), "volume2")]),
                      (original_bedpostx, test_ph1, [(("mean_phsamples", pickFirst), "volume2")]),
                      (original_bedpostx, test_ph2, [(("mean_phsamples", pickSecond), "volume2")])
                      ])

    pipeline.run(plugin='Linear')
    shutil.rmtree(pipeline.base_dir)
Exemplo n.º 3
0
def test_create_bedpostx_pipeline():
    fsl_course_dir = os.path.abspath(os.environ['FSL_COURSE_DATA'])

    mask_file = os.path.join(fsl_course_dir, "fdt2/subj1.bedpostX/nodif_brain_mask.nii.gz")
    bvecs_file = os.path.join(fsl_course_dir, "fdt2/subj1/bvecs")
    bvals_file = os.path.join(fsl_course_dir, "fdt2/subj1/bvals")
    dwi_file = os.path.join(fsl_course_dir, "fdt2/subj1/data.nii.gz")
    z_min = 62
    z_size = 2

    slice_mask = pe.Node(fsl.ExtractROI(x_min=0,
                                        x_size=-1,
                                        y_min=0,
                                        y_size=-1,
                                        z_min=z_min,
                                        z_size=z_size), name="slice_mask")
    slice_mask.inputs.in_file = mask_file

    slice_dwi = pe.Node(fsl.ExtractROI(x_min=0,
                                       x_size=-1,
                                       y_min=0,
                                       y_size=-1,
                                       z_min=z_min,
                                       z_size=z_size), name="slice_dwi")
    slice_dwi.inputs.in_file = dwi_file

    nipype_bedpostx = create_bedpostx_pipeline("nipype_bedpostx")
    nipype_bedpostx.inputs.inputnode.bvecs = bvecs_file
    nipype_bedpostx.inputs.inputnode.bvals = bvals_file
    nipype_bedpostx.inputs.xfibres.n_fibres = 1
    nipype_bedpostx.inputs.xfibres.fudge = 1
    nipype_bedpostx.inputs.xfibres.burn_in = 0
    nipype_bedpostx.inputs.xfibres.n_jumps = 1
    nipype_bedpostx.inputs.xfibres.sample_every = 1
    nipype_bedpostx.inputs.xfibres.cnlinear = True
    nipype_bedpostx.inputs.xfibres.seed = 0
    nipype_bedpostx.inputs.xfibres.model = 2

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        original_bedpostx = pe.Node(interface=fsl.BEDPOSTX(), name="original_bedpostx")
    original_bedpostx.inputs.bvecs = bvecs_file
    original_bedpostx.inputs.bvals = bvals_file
    original_bedpostx.inputs.environ['FSLPARALLEL'] = ""
    original_bedpostx.inputs.n_fibres = 1
    original_bedpostx.inputs.fudge = 1
    original_bedpostx.inputs.burn_in = 0
    original_bedpostx.inputs.n_jumps = 1
    original_bedpostx.inputs.sample_every = 1
    original_bedpostx.inputs.seed = 0
    original_bedpostx.inputs.model = 2

    test_f1 = pe.Node(util.AssertEqual(), name="mean_f1_test")

    pipeline = pe.Workflow(name="test_bedpostx")
    pipeline.base_dir = tempfile.mkdtemp(prefix="nipype_test_bedpostx_")

    pipeline.connect([(slice_mask, original_bedpostx, [("roi_file", "mask")]),
                      (slice_mask, nipype_bedpostx, [("roi_file", "inputnode.mask")]),

                      (slice_dwi, original_bedpostx, [("roi_file", "dwi")]),
                      (slice_dwi, nipype_bedpostx, [("roi_file", "inputnode.dwi")]),

                      (nipype_bedpostx, test_f1, [(("outputnode.mean_fsamples",list_to_filename), "volume1")]),
                      (original_bedpostx, test_f1, [("mean_fsamples", "volume2")]),
                      ])

    pipeline.run(plugin='Linear')
    shutil.rmtree(pipeline.base_dir)
Exemplo n.º 4
0
    #PBS -l nodes=1:ppn=1
    #PBS -l pmem=600mb
    #PBS -l walltime=4:00:00
    #PBS -e ''' + base_dir + '''my_job.err
    #PBS -o ''' + base_dir + '''my_job.log
    module load python/2.7.6
    module load nipype/0.8
    module load fsl/5.0.5
    '''

    with open(os.path.join(base_dir, JOB_TEMPLATE_NAME), "w") as temp_file:
        temp_file.write(template)

    # ---------------Creating nipype workflow for bedpostx-------------------

    nipype_bedpostx = create_bedpostx_pipeline("nipype_bedpostx")

    nipype_bedpostx.inputs.inputnode.dwi = os.path.join(input_dir, 'data.nii')
    nipype_bedpostx.inputs.inputnode.mask = os.path.join(
        input_dir, 'nodif_brain_mask.nii')
    nipype_bedpostx.inputs.inputnode.bvecs = os.path.join(input_dir, 'bvecs')
    nipype_bedpostx.inputs.inputnode.bvals = os.path.join(input_dir, 'bvals')
    nipype_bedpostx.inputs.xfibres.n_fibres = 1
    nipype_bedpostx.inputs.xfibres.fudge = 1
    nipype_bedpostx.inputs.xfibres.burn_in = 1000
    nipype_bedpostx.inputs.xfibres.n_jumps = 1250
    nipype_bedpostx.inputs.xfibres.sample_every = 25

    workflow = nipype.pipeline.engine.Workflow(name=OUTPUT_DIR)
    workflow.add_nodes([nipype_bedpostx])
    workflow.base_dir = base_dir
Exemplo n.º 5
0
def test_create_bedpostx_pipeline():
    fsl_course_dir = os.path.abspath(os.environ['FSL_COURSE_DATA'])

    mask_file = os.path.join(fsl_course_dir,
                             "fdt2/subj1.bedpostX/nodif_brain_mask.nii.gz")
    bvecs_file = os.path.join(fsl_course_dir, "fdt2/subj1/bvecs")
    bvals_file = os.path.join(fsl_course_dir, "fdt2/subj1/bvals")
    dwi_file = os.path.join(fsl_course_dir, "fdt2/subj1/data.nii.gz")
    z_min = 62
    z_size = 2

    slice_mask = pe.Node(fsl.ExtractROI(x_min=0,
                                        x_size=-1,
                                        y_min=0,
                                        y_size=-1,
                                        z_min=z_min,
                                        z_size=z_size),
                         name="slice_mask")
    slice_mask.inputs.in_file = mask_file

    slice_dwi = pe.Node(fsl.ExtractROI(x_min=0,
                                       x_size=-1,
                                       y_min=0,
                                       y_size=-1,
                                       z_min=z_min,
                                       z_size=z_size),
                        name="slice_dwi")
    slice_dwi.inputs.in_file = dwi_file

    nipype_bedpostx = create_bedpostx_pipeline("nipype_bedpostx")
    nipype_bedpostx.inputs.inputnode.bvecs = bvecs_file
    nipype_bedpostx.inputs.inputnode.bvals = bvals_file
    nipype_bedpostx.inputs.xfibres.n_fibres = 1
    nipype_bedpostx.inputs.xfibres.fudge = 1
    nipype_bedpostx.inputs.xfibres.burn_in = 0
    nipype_bedpostx.inputs.xfibres.n_jumps = 1
    nipype_bedpostx.inputs.xfibres.sample_every = 1
    nipype_bedpostx.inputs.xfibres.cnlinear = True
    nipype_bedpostx.inputs.xfibres.seed = 0

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        original_bedpostx = pe.Node(interface=fsl.BEDPOSTX(),
                                    name="original_bedpostx")
    original_bedpostx.inputs.bvecs = bvecs_file
    original_bedpostx.inputs.bvals = bvals_file
    original_bedpostx.inputs.environ['FSLPARALLEL'] = ""
    original_bedpostx.inputs.n_fibres = 1
    original_bedpostx.inputs.fudge = 1
    original_bedpostx.inputs.burn_in = 0
    original_bedpostx.inputs.n_jumps = 1
    original_bedpostx.inputs.sample_every = 1
    original_bedpostx.inputs.seed = 0

    test_f1 = pe.Node(util.AssertEqual(), name="mean_f1_test")

    pipeline = pe.Workflow(name="test_bedpostx")
    pipeline.base_dir = tempfile.mkdtemp(prefix="nipype_test_bedpostx_")

    pipeline.connect([
        (slice_mask, original_bedpostx, [("roi_file", "mask")]),
        (slice_mask, nipype_bedpostx, [("roi_file", "inputnode.mask")]),
        (slice_dwi, original_bedpostx, [("roi_file", "dwi")]),
        (slice_dwi, nipype_bedpostx, [("roi_file", "inputnode.dwi")]),
        (nipype_bedpostx, test_f1, [(("outputnode.mean_fsamples",
                                      list_to_filename), "volume1")]),
        (original_bedpostx, test_f1, [("mean_fsamples", "volume2")]),
    ])

    pipeline.run(plugin='Linear')
    shutil.rmtree(pipeline.base_dir)
Exemplo n.º 6
0
def test_create_bedpostx_pipeline():
    fsl_course_dir = os.path.abspath('fsl_course_data')

    mask_file = os.path.join(fsl_course_dir,
                             "fdt/subj1.bedpostX/nodif_brain_mask.nii.gz")
    bvecs_file = os.path.join(fsl_course_dir, "fdt/subj1/bvecs")
    bvals_file = os.path.join(fsl_course_dir, "fdt/subj1/bvals")
    dwi_file = os.path.join(fsl_course_dir, "fdt/subj1/data.nii.gz")

    nipype_bedpostx = create_bedpostx_pipeline("nipype_bedpostx")
    nipype_bedpostx.inputs.inputnode.dwi = dwi_file
    nipype_bedpostx.inputs.inputnode.mask = mask_file
    nipype_bedpostx.inputs.inputnode.bvecs = bvecs_file
    nipype_bedpostx.inputs.inputnode.bvals = bvals_file
    nipype_bedpostx.inputs.xfibres.n_fibres = 2
    nipype_bedpostx.inputs.xfibres.fudge = 1
    nipype_bedpostx.inputs.xfibres.burn_in = 1000
    nipype_bedpostx.inputs.xfibres.n_jumps = 1250
    nipype_bedpostx.inputs.xfibres.sample_every = 25

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        original_bedpostx = pe.Node(interface=fsl.BEDPOSTX(),
                                    name="original_bedpostx")
    original_bedpostx.inputs.dwi = dwi_file
    original_bedpostx.inputs.mask = mask_file
    original_bedpostx.inputs.bvecs = bvecs_file
    original_bedpostx.inputs.bvals = bvals_file
    original_bedpostx.inputs.environ['FSLPARALLEL'] = ""
    original_bedpostx.inputs.fibres = 2
    original_bedpostx.inputs.weight = 1
    original_bedpostx.inputs.burn_period = 1000
    original_bedpostx.inputs.jumps = 1250
    original_bedpostx.inputs.sampling = 25

    test_f1 = pe.Node(util.AssertEqual(), name="mean_f1_test")
    test_f2 = pe.Node(util.AssertEqual(), name="mean_f2_test")
    test_th1 = pe.Node(util.AssertEqual(), name="mean_th1_test")
    test_th2 = pe.Node(util.AssertEqual(), name="mean_th2_test")
    test_ph1 = pe.Node(util.AssertEqual(), name="mean_ph1_test")
    test_ph2 = pe.Node(util.AssertEqual(), name="mean_ph2_test")

    pipeline = pe.Workflow(name="test_bedpostx")
    pipeline.base_dir = tempfile.mkdtemp(prefix="nipype_test_bedpostx_")

    def pickFirst(l):
        return l[0]

    def pickSecond(l):
        return l[1]

    pipeline.connect([
        (nipype_bedpostx, test_f1, [(("outputnode.mean_fsamples", pickFirst),
                                     "volume1")]),
        (nipype_bedpostx, test_f2, [(("outputnode.mean_fsamples", pickSecond),
                                     "volume1")]),
        (nipype_bedpostx, test_th1, [(("outputnode.mean_thsamples", pickFirst),
                                      "volume1")]),
        (nipype_bedpostx, test_th2, [(("outputnode.mean_thsamples",
                                       pickSecond), "volume1")]),
        (nipype_bedpostx, test_ph1, [(("outputnode.mean_phsamples", pickFirst),
                                      "volume1")]),
        (nipype_bedpostx, test_ph2, [(("outputnode.mean_phsamples",
                                       pickSecond), "volume1")]),
        (original_bedpostx, test_f1, [(("mean_fsamples", pickFirst), "volume2")
                                      ]),
        (original_bedpostx, test_f2, [(("mean_fsamples", pickSecond),
                                       "volume2")]),
        (original_bedpostx, test_th1, [(("mean_thsamples", pickFirst),
                                        "volume2")]),
        (original_bedpostx, test_th2, [(("mean_thsamples", pickSecond),
                                        "volume2")]),
        (original_bedpostx, test_ph1, [(("mean_phsamples", pickFirst),
                                        "volume2")]),
        (original_bedpostx, test_ph2, [(("mean_phsamples", pickSecond),
                                        "volume2")])
    ])

    pipeline.run(plugin='Linear')
    shutil.rmtree(pipeline.base_dir)
Exemplo n.º 7
0
      [['subject_id', 'base_name']]), (bet, dtifit, [('mask_file', 'mask')])])
"""
Setup for Tracktography
-----------------------

Here we will create a workflow to enable probabilistic tracktography
and hard segmentation of the seed region
"""

tractography = pe.Workflow(name='tractography')
tractography.base_dir = os.path.abspath('fsl_dti_tutorial')
"""
estimate the diffusion parameters: phi, theta, and so on
"""

bedpostx = create_bedpostx_pipeline()
bedpostx.get_node("xfibres").iterables = ("n_fibres", [1, 2])

flirt = pe.Node(interface=fsl.FLIRT(), name='flirt')
flirt.inputs.in_file = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
flirt.inputs.dof = 12
"""
perform probabilistic tracktography
"""

probtrackx = pe.Node(interface=fsl.ProbTrackX(), name='probtrackx')
probtrackx.inputs.mode = 'seedmask'
probtrackx.inputs.c_thresh = 0.2
probtrackx.inputs.n_steps = 2000
probtrackx.inputs.step_length = 0.5
probtrackx.inputs.n_samples = 5000
Exemplo n.º 8
0
    #PBS -l pmem=600mb
    #PBS -l walltime=4:00:00
    #PBS -e ''' + base_dir + '''my_job.err
    #PBS -o ''' + base_dir + '''my_job.log
    module load python/2.7.6
    module load nipype/0.8
    module load fsl/5.0.5
    '''

    with open(os.path.join(base_dir, JOB_TEMPLATE_NAME), "w") as temp_file:
        temp_file.write(template)


    # ---------------Creating nipype workflow for bedpostx-------------------

    nipype_bedpostx = create_bedpostx_pipeline("nipype_bedpostx")

    nipype_bedpostx.inputs.inputnode.dwi = os.path.join(input_dir, 'data.nii')
    nipype_bedpostx.inputs.inputnode.mask = os.path.join(
        input_dir, 'nodif_brain_mask.nii')
    nipype_bedpostx.inputs.inputnode.bvecs = os.path.join(input_dir, 'bvecs')
    nipype_bedpostx.inputs.inputnode.bvals = os.path.join(input_dir, 'bvals')
    nipype_bedpostx.inputs.xfibres.n_fibres = 1
    nipype_bedpostx.inputs.xfibres.fudge = 1
    nipype_bedpostx.inputs.xfibres.burn_in = 1000
    nipype_bedpostx.inputs.xfibres.n_jumps = 1250
    nipype_bedpostx.inputs.xfibres.sample_every = 25

    workflow = nipype.pipeline.engine.Workflow(name = OUTPUT_DIR)
    workflow.add_nodes([nipype_bedpostx])
    workflow.base_dir = base_dir
Exemplo n.º 9
0
"""
anat2b0 = pe.Node(interface=fsl.FLIRT(dof=6), name="anat2b0")
anat2b0.inputs.cost_func = "mutualinfo"  # between-modality
mni2anat = pe.Node(interface=fsl.FLIRT(dof=12), name="mni2anat")
mni2anat.inputs.in_file = fsl.Info.standard_image("MNI152_T1_2mm_brain.nii.gz")
mni2anat.inputs.cost_func = "corratio"

"""
register MNI152-->b0 using matrixes we have got above 
"""
xfmconcat = pe.Node(interface=fsl.ConvertXFM(concat_xfm=True), name="xfmconcat")

"""
estimate the diffusion parameters: phi, theta, and so on
"""
bedpostx = create_bedpostx_pipeline(name="bedpostx")
bedpostx.inputs.xfibres.n_fibres = 2


preTract = pe.Workflow(name="preTract")
preTract.base_dir = workingDir
preTract.connect(
    [
        (infosource, datasource, [("subject_id", "subject_id"), (("subject_id", subjrlf), "template_args")]),
        (datasource, fslroi, [("dwi", "in_file")]),
        (datasource, eddycorrect, [("dwi", "inputnode.in_file")]),
        (fslroi, bet, [("roi_file", "in_file")]),
        (fslroi, anat2b0, [("roi_file", "reference")]),
        (datasource, anat2b0, [("mri", "in_file")]),
        (datasource, mni2anat, [("mri", "reference")]),
        (mni2anat, xfmconcat, [("out_matrix_file", "in_file")]),
Exemplo n.º 10
0
def dti_preprocessing(args):
    '''
    Write it later
    '''
    dataLoc = '/Volumes/CCNC_3T/KMA'
    subject_list = args.subjects

    # Make exclusion mask in order to exclude tracks
    # going towards posterior paths from the thalamus
    for subject in subject_list:
        dtiDir = os.path.join(dataLoc, subject, 'DTI')

        # Check dicom
        dicoms = glob.glob(dtiDir+'/*dcm') + \
                glob.glob(dtiDir+'/*IMA') + \
                glob.glob(dtiDir+'/dicom/*dcm') + \
                glob.glob(dtiDir+'/dicom/*IMA')

        if dicoms:
            dicomDir = os.path.join(dtiDir, 'dicom')
            if not os.path.isdir(dicomDir):
                os.mkdir(dicomDir)

            try:
                for dicomFile in dicoms:
                    shutil.move(dicomFile, dicomDir)
            except:
                pass
            dicoms = glob.glob(dicomDir+'/*dcm') + glob.glob(dicomDir+'/*IMA')

            nifti = glob.glob(dtiDir+'/*nii.gz')
            # if no nifti
            if not nifti:
                converter = Dcm2nii(
                        source_names = dicoms,
                        gzip_output = True,
                        output_dir = dtiDir)
                converter.run()

        # raw data
        data = glob.glob(dtiDir+'/2*DTI*.nii.gz')[0]
        bvec = glob.glob(dtiDir+'/2*.bvec')[0]
        bval = glob.glob(dtiDir+'/2*.bval')[0]

        # preprocessed data
        eddy_out = os.path.join(dtiDir, 'data_eddy.nii.gz')
        newBvec = os.path.join(dtiDir, 'bvecs_new')
        nodif = os.path.join(dtiDir, 'nodif.nii.gz')
        nodif_brain = os.path.join(dtiDir, 'nodif_brain.nii.gz')
        nodif_brain_mask = os.path.join(dtiDir, 'nodif_brain_mask.nii.gz')
        fa_map = os.path.join(dtiDir, 'dti_FA.nii.gz')
        bedpostxdir = os.path.join(subject, 'DTI.bedpostX')

        if not os.path.isfile(eddy_out):
            eddy = fsl.EddyCorrect(
                    in_file = data,
                    out_file = eddy_out)
            eddy.run()

        if not os.path.isfile(newBvec):
            bvecCorrectCommand = 'bash /Volumes/CCNC_3T/KMA/kmaproject/fdt_rotate_bvecs.sh \
                    {origBvec} {newBvec} {ecclog}'.format(
                    origBvec = bvec,
                    newBvec = newBvec,
                    ecclog = eddy_out.split('nii.gz')[0]+'ecclog'
                    )
            print bvecCorrectCommand
            os.popen(bvecCorrectCommand).read()


        if not os.path.isfile(nodif):
            extractROI = fsl.ExtractROI(
                            in_file = eddy_out,
                            t_min = 0,
                            t_size = 1,
                            roi_file = nodif,)
            extractROI.run()

        if not os.path.isfile(nodif_brain_mask):
            bet = fsl.BET(
                    in_file = eddy_out,
                    frac = .35,
                    mask = True,
                    out_file = nodif_brain
                    )
            bet.run()
                    

        if not os.path.isfile(fa_map):
            dtifit = fsl.DTIFit(
                    bvals = bval,
                    bvecs = newBvec,
                    dwi = eddy_out,
                    mask = nodif_brain_mask,
                    base_name = dtiDir+'/DTI')
            dtifit.run()


        params = dict(n_fibres = 2,
                fudge = 1,
                burn_in = 1000,
                n_jumps = 1250,
                sample_every = 25)

        bpwf = create_bedpostx_pipeline('nipype_bedpostx', params)
        bpwf.inputs.inputnode.dwi = eddy_out
        bpwf.inputs.inputnode.mask = nodif_brain_mask
        bpwf.inputs.inputnode.bvecs = newBvec
        bpwf.inputs.inputnode.bvals = bval
        bpwf.run()