Пример #1
0
def test_create_eddy_correct_pipeline():
    fsl_course_dir = os.path.abspath('fsl_course_data')

    dwi_file = os.path.join(fsl_course_dir, "fdt/subj1/data.nii.gz")

    nipype_eddycorrect = create_eddy_correct_pipeline("nipype_eddycorrect")
    nipype_eddycorrect.inputs.inputnode.in_file = dwi_file
    nipype_eddycorrect.inputs.inputnode.ref_num = 0

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        original_eddycorrect = pe.Node(interface=fsl.EddyCorrect(), name="original_eddycorrect")
    original_eddycorrect.inputs.in_file = dwi_file
    original_eddycorrect.inputs.ref_num = 0

    test = pe.Node(util.AssertEqual(), name="eddy_corrected_dwi_test")

    pipeline = pe.Workflow(name="test_eddycorrect")
    pipeline.base_dir = tempfile.mkdtemp(prefix="nipype_test_eddycorrect_")

    pipeline.connect([(nipype_eddycorrect, test, [("outputnode.eddy_corrected", "volume1")]),
                      (original_eddycorrect, test, [("eddy_corrected", "volume2")]),
                      ])

    pipeline.run(plugin='Linear')
    shutil.rmtree(pipeline.base_dir)
Пример #2
0
def test_create_eddy_correct_pipeline():
    fsl_course_dir = os.path.abspath(os.environ['FSL_COURSE_DATA'])

    dwi_file = os.path.join(fsl_course_dir, "fdt1/subj1/data.nii.gz")

    trim_dwi = pe.Node(fsl.ExtractROI(t_min=0, t_size=2), name="trim_dwi")
    trim_dwi.inputs.in_file = dwi_file

    nipype_eddycorrect = create_eddy_correct_pipeline("nipype_eddycorrect")
    nipype_eddycorrect.inputs.inputnode.ref_num = 0

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        original_eddycorrect = pe.Node(interface=fsl.EddyCorrect(),
                                       name="original_eddycorrect")
    original_eddycorrect.inputs.ref_num = 0

    test = pe.Node(util.AssertEqual(), name="eddy_corrected_dwi_test")

    pipeline = pe.Workflow(name="test_eddycorrect")
    pipeline.base_dir = tempfile.mkdtemp(prefix="nipype_test_eddycorrect_")

    pipeline.connect([
        (trim_dwi, original_eddycorrect, [("roi_file", "in_file")]),
        (trim_dwi, nipype_eddycorrect, [("roi_file", "inputnode.in_file")]),
        (nipype_eddycorrect, test, [("outputnode.eddy_corrected", "volume1")]),
        (original_eddycorrect, test, [("eddy_corrected", "volume2")]),
    ])

    pipeline.run(plugin='Linear')
    shutil.rmtree(pipeline.base_dir)
Пример #3
0
def test_create_bedpostx_pipeline():
    fsl_course_dir = os.path.abspath(os.environ['FSL_COURSE_DATA'])

    mask_file = os.path.join(fsl_course_dir,
                             "fdt2/subj1.bedpostX/nodif_brain_mask.nii.gz")
    bvecs_file = os.path.join(fsl_course_dir, "fdt2/subj1/bvecs")
    bvals_file = os.path.join(fsl_course_dir, "fdt2/subj1/bvals")
    dwi_file = os.path.join(fsl_course_dir, "fdt2/subj1/data.nii.gz")
    z_min = 62
    z_size = 2

    slice_mask = pe.Node(fsl.ExtractROI(x_min=0,
                                        x_size=-1,
                                        y_min=0,
                                        y_size=-1,
                                        z_min=z_min,
                                        z_size=z_size),
                         name="slice_mask")
    slice_mask.inputs.in_file = mask_file

    slice_dwi = pe.Node(fsl.ExtractROI(x_min=0,
                                       x_size=-1,
                                       y_min=0,
                                       y_size=-1,
                                       z_min=z_min,
                                       z_size=z_size),
                        name="slice_dwi")
    slice_dwi.inputs.in_file = dwi_file

    nipype_bedpostx = create_bedpostx_pipeline("nipype_bedpostx")
    nipype_bedpostx.inputs.inputnode.bvecs = bvecs_file
    nipype_bedpostx.inputs.inputnode.bvals = bvals_file
    nipype_bedpostx.inputs.xfibres.n_fibres = 1
    nipype_bedpostx.inputs.xfibres.fudge = 1
    nipype_bedpostx.inputs.xfibres.burn_in = 0
    nipype_bedpostx.inputs.xfibres.n_jumps = 1
    nipype_bedpostx.inputs.xfibres.sample_every = 1
    nipype_bedpostx.inputs.xfibres.cnlinear = True
    nipype_bedpostx.inputs.xfibres.seed = 0

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        original_bedpostx = pe.Node(interface=fsl.BEDPOSTX(),
                                    name="original_bedpostx")
    original_bedpostx.inputs.bvecs = bvecs_file
    original_bedpostx.inputs.bvals = bvals_file
    original_bedpostx.inputs.environ['FSLPARALLEL'] = ""
    original_bedpostx.inputs.n_fibres = 1
    original_bedpostx.inputs.fudge = 1
    original_bedpostx.inputs.burn_in = 0
    original_bedpostx.inputs.n_jumps = 1
    original_bedpostx.inputs.sample_every = 1
    original_bedpostx.inputs.seed = 0

    test_f1 = pe.Node(util.AssertEqual(), name="mean_f1_test")

    pipeline = pe.Workflow(name="test_bedpostx")
    pipeline.base_dir = tempfile.mkdtemp(prefix="nipype_test_bedpostx_")

    pipeline.connect([
        (slice_mask, original_bedpostx, [("roi_file", "mask")]),
        (slice_mask, nipype_bedpostx, [("roi_file", "inputnode.mask")]),
        (slice_dwi, original_bedpostx, [("roi_file", "dwi")]),
        (slice_dwi, nipype_bedpostx, [("roi_file", "inputnode.dwi")]),
        (nipype_bedpostx, test_f1, [(("outputnode.mean_fsamples",
                                      list_to_filename), "volume1")]),
        (original_bedpostx, test_f1, [("mean_fsamples", "volume2")]),
    ])

    pipeline.run(plugin='Linear')
    shutil.rmtree(pipeline.base_dir)
Пример #4
0
def test_create_bedpostx_pipeline():
    fsl_course_dir = os.path.abspath('fsl_course_data')

    mask_file = os.path.join(fsl_course_dir,
                             "fdt/subj1.bedpostX/nodif_brain_mask.nii.gz")
    bvecs_file = os.path.join(fsl_course_dir, "fdt/subj1/bvecs")
    bvals_file = os.path.join(fsl_course_dir, "fdt/subj1/bvals")
    dwi_file = os.path.join(fsl_course_dir, "fdt/subj1/data.nii.gz")

    nipype_bedpostx = create_bedpostx_pipeline("nipype_bedpostx")
    nipype_bedpostx.inputs.inputnode.dwi = dwi_file
    nipype_bedpostx.inputs.inputnode.mask = mask_file
    nipype_bedpostx.inputs.inputnode.bvecs = bvecs_file
    nipype_bedpostx.inputs.inputnode.bvals = bvals_file
    nipype_bedpostx.inputs.xfibres.n_fibres = 2
    nipype_bedpostx.inputs.xfibres.fudge = 1
    nipype_bedpostx.inputs.xfibres.burn_in = 1000
    nipype_bedpostx.inputs.xfibres.n_jumps = 1250
    nipype_bedpostx.inputs.xfibres.sample_every = 25

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        original_bedpostx = pe.Node(interface=fsl.BEDPOSTX(),
                                    name="original_bedpostx")
    original_bedpostx.inputs.dwi = dwi_file
    original_bedpostx.inputs.mask = mask_file
    original_bedpostx.inputs.bvecs = bvecs_file
    original_bedpostx.inputs.bvals = bvals_file
    original_bedpostx.inputs.environ['FSLPARALLEL'] = ""
    original_bedpostx.inputs.fibres = 2
    original_bedpostx.inputs.weight = 1
    original_bedpostx.inputs.burn_period = 1000
    original_bedpostx.inputs.jumps = 1250
    original_bedpostx.inputs.sampling = 25

    test_f1 = pe.Node(util.AssertEqual(), name="mean_f1_test")
    test_f2 = pe.Node(util.AssertEqual(), name="mean_f2_test")
    test_th1 = pe.Node(util.AssertEqual(), name="mean_th1_test")
    test_th2 = pe.Node(util.AssertEqual(), name="mean_th2_test")
    test_ph1 = pe.Node(util.AssertEqual(), name="mean_ph1_test")
    test_ph2 = pe.Node(util.AssertEqual(), name="mean_ph2_test")

    pipeline = pe.Workflow(name="test_bedpostx")
    pipeline.base_dir = tempfile.mkdtemp(prefix="nipype_test_bedpostx_")

    def pickFirst(l):
        return l[0]

    def pickSecond(l):
        return l[1]

    pipeline.connect([
        (nipype_bedpostx, test_f1, [(("outputnode.mean_fsamples", pickFirst),
                                     "volume1")]),
        (nipype_bedpostx, test_f2, [(("outputnode.mean_fsamples", pickSecond),
                                     "volume1")]),
        (nipype_bedpostx, test_th1, [(("outputnode.mean_thsamples", pickFirst),
                                      "volume1")]),
        (nipype_bedpostx, test_th2, [(("outputnode.mean_thsamples",
                                       pickSecond), "volume1")]),
        (nipype_bedpostx, test_ph1, [(("outputnode.mean_phsamples", pickFirst),
                                      "volume1")]),
        (nipype_bedpostx, test_ph2, [(("outputnode.mean_phsamples",
                                       pickSecond), "volume1")]),
        (original_bedpostx, test_f1, [(("mean_fsamples", pickFirst), "volume2")
                                      ]),
        (original_bedpostx, test_f2, [(("mean_fsamples", pickSecond),
                                       "volume2")]),
        (original_bedpostx, test_th1, [(("mean_thsamples", pickFirst),
                                        "volume2")]),
        (original_bedpostx, test_th2, [(("mean_thsamples", pickSecond),
                                        "volume2")]),
        (original_bedpostx, test_ph1, [(("mean_phsamples", pickFirst),
                                        "volume2")]),
        (original_bedpostx, test_ph2, [(("mean_phsamples", pickSecond),
                                        "volume2")])
    ])

    pipeline.run(plugin='Linear')
    shutil.rmtree(pipeline.base_dir)
Пример #5
0
def _tbss_test_helper(estimate_skeleton):
    fsl_course_dir = os.path.abspath(os.environ['FSL_COURSE_DATA'])
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
    test_dir = tempfile.mkdtemp(prefix="nipype_test_tbss_")
    tbss_orig_dir = os.path.join(test_dir, "tbss_all_original")
    os.mkdir(tbss_orig_dir)
    old_dir = os.getcwd()
    os.chdir(tbss_orig_dir)

    subjects = ['1260', '1549']
    FA_list = [
        os.path.join(fsl_course_dir, 'tbss', subject_id + '.nii.gz')
        for subject_id in subjects
    ]
    for f in FA_list:
        shutil.copy(f, os.getcwd())

    call(['tbss_1_preproc'] +
         [subject_id + '.nii.gz' for subject_id in subjects],
         env=os.environ.update({'FSLOUTPUTTYPE': 'NIFTI_GZ'}))
    tbss1_orig_dir = os.path.join(test_dir, "tbss1_original")
    shutil.copytree(tbss_orig_dir, tbss1_orig_dir)

    call(['tbss_2_reg', '-T'],
         env=os.environ.update({'FSLOUTPUTTYPE': 'NIFTI_GZ'}))
    tbss2_orig_dir = os.path.join(test_dir, "tbss2_original")
    shutil.copytree(tbss_orig_dir, tbss2_orig_dir)

    if estimate_skeleton:
        call(['tbss_3_postreg', '-S'],
             env=os.environ.update({'FSLOUTPUTTYPE': 'NIFTI_GZ'}))
    else:
        call(['tbss_3_postreg', '-T'],
             env=os.environ.update({'FSLOUTPUTTYPE': 'NIFTI_GZ'}))
    tbss3_orig_dir = os.path.join(test_dir, "tbss3_original")
    shutil.copytree(tbss_orig_dir, tbss3_orig_dir)

    call(['tbss_4_prestats', '0.2'],
         env=os.environ.update({'FSLOUTPUTTYPE': 'NIFTI_GZ'}))
    tbss4_orig_dir = os.path.join(test_dir, "tbss4_original")
    shutil.copytree(tbss_orig_dir, tbss4_orig_dir)

    pipeline = pe.Workflow(name="test_tbss")
    pipeline.base_dir = os.path.join(test_dir, "tbss_nipype")

    tbss = create_tbss_all(estimate_skeleton=estimate_skeleton)
    tbss.inputs.inputnode.fa_list = FA_list
    tbss.inputs.inputnode.skeleton_thresh = 0.2

    tbss1_original_datasource = pe.Node(nio.DataGrabber(
        outfields=['fa_list', 'mask_list'], sort_filelist=False),
                                        name='tbss1_original_datasource')
    tbss1_original_datasource.inputs.base_directory = tbss1_orig_dir
    tbss1_original_datasource.inputs.template = 'FA/%s_FA%s.nii.gz'
    tbss1_original_datasource.inputs.template_args = dict(
        fa_list=[[subjects, '']], mask_list=[[subjects, '_mask']])

    tbss1_test_fa = pe.MapNode(util.AssertEqual(),
                               name="tbss1_fa_test",
                               iterfield=['volume1', 'volume2'])
    tbss1_test_mask = pe.MapNode(util.AssertEqual(),
                                 name="tbss1_mask_test",
                                 iterfield=['volume1', 'volume2'])

    pipeline.connect(tbss, 'tbss1.outputnode.fa_list', tbss1_test_fa,
                     'volume1')
    pipeline.connect(tbss, 'tbss1.outputnode.mask_list', tbss1_test_mask,
                     'volume1')
    pipeline.connect(tbss1_original_datasource, 'fa_list', tbss1_test_fa,
                     'volume2')
    pipeline.connect(tbss1_original_datasource, 'mask_list', tbss1_test_mask,
                     'volume2')
    tbss2_original_datasource = pe.Node(nio.DataGrabber(
        outfields=['field_list'], sort_filelist=False),
                                        name='tbss2_original_datasource')

    tbss2_original_datasource.inputs.base_directory = tbss2_orig_dir
    tbss2_original_datasource.inputs.template = 'FA/%s_FA%s.nii.gz'
    tbss2_original_datasource.inputs.template_args = dict(
        field_list=[[subjects, '_to_target_warp']])
    tbss2_test_field = pe.MapNode(util.AssertEqual(),
                                  name="tbss2_test_field",
                                  iterfield=['volume1', 'volume2'])

    pipeline.connect(tbss, 'tbss2.outputnode.field_list', tbss2_test_field,
                     'volume1')
    pipeline.connect(tbss2_original_datasource, 'field_list', tbss2_test_field,
                     'volume2')

    tbss3_original_datasource = pe.Node(nio.DataGrabber(outfields=[
        'groupmask', 'skeleton_file', 'meanfa_file', 'mergefa_file'
    ],
                                                        sort_filelist=False),
                                        name='tbss3_original_datasource')
    tbss3_original_datasource.inputs.base_directory = tbss3_orig_dir
    tbss3_original_datasource.inputs.template = 'stats/%s.nii.gz'
    tbss3_original_datasource.inputs.template_args = dict(
        groupmask=[['mean_FA_mask']],
        skeleton_file=[['mean_FA_skeleton']],
        meanfa_file=[['mean_FA']],
        mergefa_file=[['all_FA']])

    tbss3_test_groupmask = pe.Node(util.AssertEqual(),
                                   name="tbss3_test_groupmask")
    tbss3_test_skeleton_file = pe.Node(util.AssertEqual(),
                                       name="tbss3_test_skeleton_file")
    tbss3_test_meanfa_file = pe.Node(util.AssertEqual(),
                                     name="tbss3_test_meanfa_file")
    tbss3_test_mergefa_file = pe.Node(util.AssertEqual(),
                                      name="tbss3_test_mergefa_file")

    pipeline.connect(tbss, 'tbss3.outputnode.groupmask', tbss3_test_groupmask,
                     'volume1')
    pipeline.connect(tbss3_original_datasource, 'groupmask',
                     tbss3_test_groupmask, 'volume2')
    pipeline.connect(tbss, 'tbss3.outputnode.skeleton_file',
                     tbss3_test_skeleton_file, 'volume1')
    pipeline.connect(tbss3_original_datasource, 'skeleton_file',
                     tbss3_test_skeleton_file, 'volume2')
    pipeline.connect(tbss, 'tbss3.outputnode.meanfa_file',
                     tbss3_test_meanfa_file, 'volume1')
    pipeline.connect(tbss3_original_datasource, 'meanfa_file',
                     tbss3_test_meanfa_file, 'volume2')
    pipeline.connect(tbss, 'tbss3.outputnode.mergefa_file',
                     tbss3_test_mergefa_file, 'volume1')
    pipeline.connect(tbss3_original_datasource, 'mergefa_file',
                     tbss3_test_mergefa_file, 'volume2')

    tbss4_original_datasource = pe.Node(nio.DataGrabber(
        outfields=['all_FA_skeletonised', 'mean_FA_skeleton_mask'],
        sort_filelist=False),
                                        name='tbss4_original_datasource')
    tbss4_original_datasource.inputs.base_directory = tbss4_orig_dir
    tbss4_original_datasource.inputs.template = 'stats/%s.nii.gz'
    tbss4_original_datasource.inputs.template_args = dict(
        all_FA_skeletonised=[['all_FA_skeletonised']],
        mean_FA_skeleton_mask=[['mean_FA_skeleton_mask']])
    tbss4_test_all_FA_skeletonised = pe.Node(
        util.AssertEqual(), name="tbss4_test_all_FA_skeletonised")
    tbss4_test_mean_FA_skeleton_mask = pe.Node(
        util.AssertEqual(), name="tbss4_test_mean_FA_skeleton_mask")

    pipeline.connect(tbss, 'tbss4.outputnode.projectedfa_file',
                     tbss4_test_all_FA_skeletonised, 'volume1')
    pipeline.connect(tbss4_original_datasource, 'all_FA_skeletonised',
                     tbss4_test_all_FA_skeletonised, 'volume2')
    pipeline.connect(tbss, 'tbss4.outputnode.skeleton_mask',
                     tbss4_test_mean_FA_skeleton_mask, 'volume1')
    pipeline.connect(tbss4_original_datasource, 'mean_FA_skeleton_mask',
                     tbss4_test_mean_FA_skeleton_mask, 'volume2')

    pipeline.run(plugin='Linear')
    os.chdir(old_dir)
    shutil.rmtree(test_dir)
Пример #6
0
def test_tbss_all_pipeline():
    data_dir = '/nfs/s2/dticenter/data4test/tbss/mydata'
    # fsl_course_dir = os.getenv('FSL_COURSE_DATA')
    # data_dir = os.path.join(fsl_course_dir,'fsl_course_data/tbss')
    # subject_list = ['1260','1549','1636','1651','2078','2378']
    subject_list = [
        'S0001', 'S0005', 'S0036', 'S0038', 'S0085', 'S0099', 'S0004', 'S0032',
        'S0037', 'S0057', 'S0098'
    ]
    subject_list.sort()
    fsl_tbss_dir = '/nfs/s2/dticenter/data4test/tbss/tbss_fsl/tbss_mydata'
    workingdir = '/nfs/s2/dticenter/data4test/tbss/tbss_test_workingdir'
    """
    For Nipype TBSS Workflow
    
    Get a list of all FA.nii.gz for nipype TBSS workflow
    """
    def getFAList(subject_list):
        fa_list = []
        for subject_id in subject_list:
            fa_list.append(os.path.join(data_dir, subject_id + '_FA.nii.gz'))
        return fa_list

    """
    A nipype workflow for TBSS
    """
    tbss_all = tbss.create_tbss_all(name='tbss_all')
    tbss_all.inputs.inputnode.target = fsl.Info.standard_image(
        "FMRIB58_FA_1mm.nii.gz")
    tbss_all.inputs.inputnode.skeleton_thresh = 0.2
    tbss_all.inputs.inputnode.fa_list = getFAList(subject_list)
    """
    For FSL_TBSS
    
    Get other FSL_TBSS results
    """

    def getFA_prep_list(subjct_list):
        fa_prep_list = []
        for subject_id in subject_list:
            fa_prep_list.append(
                os.path.join(fsl_tbss_dir, 'FA', subject_id + '_FA_FA.nii.gz'))
        return fa_prep_list

    def getmask_prep_list(subjct_list):
        mask_prep_list = []
        for subject_id in subject_list:
            mask_prep_list.append(
                os.path.join(fsl_tbss_dir, 'FA',
                             subject_id + '_FA_FA_mask.nii.gz'))
        return mask_prep_list

    def getfield_list(subjct_list):
        field_list = []
        for subject_id in subject_list:
            field_list.append(
                os.path.join(fsl_tbss_dir, 'FA',
                             subject_id + '_FA_FA_to_target.mat'))
        return field_list

    t3_all_FA = os.path.join(fsl_tbss_dir, 'stats/all_FA.nii.gz')
    t3_mean_FA = os.path.join(fsl_tbss_dir, 'stats/mean_FA.nii.gz')
    t3_groupmask = os.path.join(fsl_tbss_dir, 'stats/mean_FA_mask.nii.gz')
    t3_skeleton_file = os.path.join(fsl_tbss_dir,
                                    'stats/mean_FA_skeleton.nii.gz')
    t4_all_FA_skeletonised = os.path.join(fsl_tbss_dir,
                                          'stats/all_FA_skeletonised.nii.gz')
    t4_mean_FA_skeleton_mask = os.path.join(
        fsl_tbss_dir, 'stats/mean_FA_skeleton_mask.nii.gz')
    t4_mean_FA_skeleton_mask_dst = os.path.join(
        fsl_tbss_dir, 'stats/mean_FA_skeleton_mask_dst.nii.gz')
    """

    """
    merge_fa_list = pe.Node(fsl.Merge(dimension="t",
                                      merged_file="all_fa.nii.gz"),
                            name="merge_fa_list")
    merge_mask_list = pe.Node(fsl.Merge(dimension="t",
                                        merged_file="all_mask.nii.gz"),
                              name="merge_mask_list")
    """
    The Test Nodes
    
    Check outputs of tbss1
    """
    FA_prep = pe.Node(util.AssertEqual(ignore_exception=False), name="FA_prep")
    merge_FA_prep = pe.Node(fsl.Merge(dimension="t",
                                      merged_file="all_FA_prep.nii.gz"),
                            name="merge_FA_prep")
    merge_FA_prep.inputs.in_files = getFA_prep_list(subject_list)
    #FA_prep = pe.MapNode(util.AssertEqual(ignore_exception = True), name = "FA_prep", iterfield=['volume1','volume2'])
    #FA_prep.inputs.volume2 = getFA_prep_list(subject_list)
    mask_prep = pe.Node(util.AssertEqual(), name="mask_prep")
    merge_mask_prep = pe.Node(fsl.Merge(dimension="t",
                                        merged_file="all_mask_prep.nii.gz"),
                              name="merge_mask_prep")
    merge_mask_prep.inputs.in_files = getmask_prep_list(subject_list)
    #mask_prep = pe.MapNode(util.AssertEqual(), name = "mask_prep", iterfield=['volume1','volume2'])
    #mask_prep.inputs.volume2 = getmask_prep_list(subject_list)
    """
    Check outputs of tbss2
    """
    #field = pe.MapNode(util.AssertEqual(), name = "field", iterfield=['volume1','volume2'])
    #field.inputs.volume2 = getfield_list(subject_list)
    """
    Check outputs of tbss3
    """
    all_FA = pe.Node(util.AssertEqual(ignore_exception=False), name="all_FA")
    all_FA.inputs.volume2 = t3_all_FA
    mean_FA = pe.Node(util.AssertEqual(ignore_exception=False),
                      name="mean_FA")  # OK
    mean_FA.inputs.volume2 = t3_mean_FA
    groupmask = pe.Node(util.AssertEqual(ignore_exception=False),
                        name="groupmask")
    groupmask.inputs.volume2 = t3_groupmask
    skeleton_file = pe.Node(util.AssertEqual(ignore_exception=False),
                            name="skeleton_file")
    skeleton_file.inputs.volume2 = t3_skeleton_file
    """
    Check outputs of tbss4
    """
    all_FA_skeletonised = pe.Node(util.AssertEqual(ignore_exception=False),
                                  name="all_FA_skeletonised")
    all_FA_skeletonised.inputs.volume2 = t4_all_FA_skeletonised
    mean_FA_skeleton_mask = pe.Node(util.AssertEqual(ignore_exception=False),
                                    name="mean_FA_skeleton_mask")
    mean_FA_skeleton_mask.inputs.volume2 = t4_mean_FA_skeleton_mask
    mean_FA_skeleton_mask_dst = pe.Node(
        util.AssertEqual(ignore_exception=False),
        name="mean_FA_skeleton_mask_dst")
    mean_FA_skeleton_mask_dst.inputs.volume2 = t4_mean_FA_skeleton_mask_dst

    cmp_nipy2fsl = pe.Workflow(name="cmp_nipy2fsl")
    cmp_nipy2fsl.base_dir = workingdir
    cmp_nipy2fsl.connect([
        (tbss_all, merge_fa_list, [('outputall_node.fa_list1', 'in_files')
                                   ]),  #OK
        (merge_fa_list, FA_prep, [('merged_file', 'volume1')]),
        (merge_FA_prep, FA_prep, [('merged_file', 'volume2')]),
        #    (tbss_all, FA_prep, [('outputall_node.fa_list1','volume1')]),
        (tbss_all, merge_mask_list, [('outputall_node.mask_list1', 'in_files')]
         ),  #OK
        (merge_mask_list, mask_prep, [('merged_file', 'volume1')]),
        (merge_mask_prep, mask_prep, [('merged_file', 'volume2')]),
        #    (tbss_all, mask_prep, [('outputall_node.mask_list1','volume1')]),

        #    (tbss_all, field, [('outputall_node.field_list2','volume1')]),
        (tbss_all, all_FA, [('outputall_node.mergefa_file3', 'volume1')]),  #OK
        (tbss_all, mean_FA, [('outputall_node.meanfa_file3', 'volume1')]),  #OK
        (tbss_all, groupmask, [('outputall_node.groupmask3', 'volume1')]),  #OK
        (tbss_all, skeleton_file, [('outputall_node.skeleton_file3', 'volume1')
                                   ]),  #OK
        (tbss_all, all_FA_skeletonised, [('outputall_node.projectedfa_file4',
                                          'volume1')]),  #OK
        (tbss_all, mean_FA_skeleton_mask, [('outputall_node.skeleton_mask4',
                                            'volume1')]),  #OK
        (tbss_all, mean_FA_skeleton_mask_dst, [('outputall_node.distance_map4',
                                                'volume1')]),  #OK
    ])

    #cmp_nipy2fsl.run(plugin=pluginName)
    cmp_nipy2fsl.run()