예제 #1
0
def test_surfsmooth(create_surf_file_in_directory):

    smooth = fs.SurfaceSmooth()

    # Test underlying command
    assert smooth.cmd == "mri_surf2surf"

    # Test mandatory args exception
    with pytest.raises(ValueError):
        smooth.run()

    # Create testing files
    surf, cwd = create_surf_file_in_directory

    # Test input settings
    smooth.inputs.in_file = surf
    smooth.inputs.subject_id = "fsaverage"
    fwhm = 5
    smooth.inputs.fwhm = fwhm
    smooth.inputs.hemi = "lh"

    # Test the command line
    assert smooth.cmdline == \
        ("mri_surf2surf --cortex --fwhm 5.0000 --hemi lh --sval %s --tval %s/lh.a_smooth%d.nii --s fsaverage" %
         (surf, cwd, fwhm))

    # Test identity
    shmooth = fs.SurfaceSmooth(subject_id="fsaverage",
                               fwhm=6,
                               in_file=surf,
                               hemi="lh",
                               out_file="lh.a_smooth.nii")
    assert smooth != shmooth
예제 #2
0
파일: test_utils.py 프로젝트: amoliu/nipype
def test_surfsmooth():

    smooth = fs.SurfaceSmooth()

    # Test underlying command
    yield assert_equal, smooth.cmd, "mri_surf2surf"

    # Test mandatory args exception
    yield assert_raises, ValueError, smooth.run

    # Create testing files
    surf, cwd, oldwd = create_surf_file()

    # Test input settings
    smooth.inputs.in_file = surf
    smooth.inputs.subject_id = "fsaverage"
    fwhm = 5
    smooth.inputs.fwhm = fwhm
    smooth.inputs.hemi = "lh"

    # Test the command line
    yield assert_equal, smooth.cmdline, \
        ("mri_surf2surf --cortex --fwhm 5.0000 --hemi lh --sval %s --tval %s/lh.a_smooth%d.nii --s fsaverage" %
         (surf, cwd, fwhm))

    # Test identity
    shmooth = fs.SurfaceSmooth(subject_id="fsaverage",
                               fwhm=6,
                               in_file=surf,
                               hemi="lh",
                               out_file="lh.a_smooth.nii")
    yield assert_not_equal, smooth, shmooth

    # Clean up
    clean_directory(cwd, oldwd)
예제 #3
0
def main(subject, sourcedata):
    derivatives = op.join(sourcedata, 'derivatives')

    derivatives_fmriprep = op.join(derivatives, 'fmriprep')
    derivatives_freesurfer = op.join(derivatives, 'freesurfer')
    # os.environ['SUBJECTS_DIR'] = derivatives_freesurfer

    fn_template = op.join(
        derivatives_fmriprep, f'sub-{subject}', 'func',
        f'sub-{subject}_task-numrisk_acq-*_run-*_space-fsaverage6_hemi-*.func.gii'
    )

    fns = glob.glob(fn_template)

    workflow = pe.Workflow(name=f'smooth_sub-{subject}', base_dir='/scratch')

    input_node = pe.Node(niu.IdentityInterface(fields=['surface_files']),
                         name='input_node')
    input_node.inputs.surface_files = fns

    def get_hemis(in_files):
        import re

        reg = re.compile(
            '.*/(?P<subject>sub-[0-9]+)_task.*_hemi-(?P<hemi>L|R)\.func\.gii')
        hemis = [reg.match(fn).group(2) for fn in in_files]
        hemis = ['lh' if hemi == 'L' else 'rh' for hemi in hemis]

        return hemis

    smoother = pe.MapNode(freesurfer.SurfaceSmooth(fwhm=5,
                                                   subject_id='fsaverage6'),
                          iterfield=['in_file', 'hemi'],
                          name='smoother')

    workflow.connect(input_node, 'surface_files', smoother, 'in_file')
    workflow.connect(input_node, ('surface_files', get_hemis), smoother,
                     'hemi')

    def get_suffix(hemis):
        import re
        reg = re.compile(
            '.*/(?P<subject>sub-[0-9]+)_task.*_hemi-(?P<hemi>L|R)\.func\.gii')
        hemis = [reg.match(fn).group(2) for fn in in_files]

        return ['_hemi-{}'.format(hemi) for hemi in hemis]

    ds = pe.MapNode(DerivativesDataSink(out_path_base='smoothed',
                                        keep_dtype=True),
                    iterfield=['source_file', 'in_file', 'suffix'],
                    name='datasink')
    ds.inputs.base_directory = derivatives
    ds.inputs.desc = 'smoothed'

    workflow.connect(input_node, 'surface_files', ds, 'source_file')
    workflow.connect(smoother, 'out_file', ds, 'in_file')
    workflow.connect(input_node, ('surface_files', get_suffix), ds, 'suffix')

    workflow.run(plugin='MultiProc', plugin_args={'n_procs': 15})
예제 #4
0
def main(derivatives, subject, session):

    for hemi in ['lh', 'rh']:
        template = op.join(
            derivatives, 'sampled_giis', f'sub-{subject}', f'ses-{session}',
            'func',
            f'sub-{subject}_ses-{session}_left_over_right_desc-zmap-depth-*_hemi-{hemi}.gii'
        )
        print(template)
        zmaps = glob.glob(template)

        print(zmaps)

        mean_zmap = np.mean([nb.load(fn).darrays[0].data for fn in zmaps], 0)
        zmap_im = nb.load(zmaps[0])
        mean_zmap_im = gifti.GiftiImage(header=zmap_im.header,
                                        extra=zmap_im.extra)
        mean_zmap_im.add_gifti_data_array(gifti.GiftiDataArray(mean_zmap))
        mean_zmap_im.to_filename(
            op.join(
                derivatives, 'sampled_giis', f'sub-{subject}',
                f'ses-{session}', 'func',
                f'sub-{subject}_ses-{session}_left_over_right_desc-zmap-depth-all_hemi-{hemi}.gii'
            ))

        mean_zmap_abs = np.abs(mean_zmap)
        mean_zmap_abs_im = gifti.GiftiImage(header=zmap_im.header,
                                            extra=zmap_im.extra)
        mean_zmap_abs_im.add_gifti_data_array(
            gifti.GiftiDataArray(mean_zmap_abs))
        mean_zmap_abs_im.to_filename(
            op.join(
                derivatives, 'sampled_giis', f'sub-{subject}',
                f'ses-{session}', 'func',
                f'sub-{subject}_ses-{session}_left_over_right_desc-abszmap-depth-all_hemi-{hemi}.gii'
            ))

        os.environ['SUBJECTS_DIR'] = op.join(derivatives, 'freesurfer')
        smoother = freesurfer.SurfaceSmooth()
        smoother.inputs.in_file = mean_zmap_abs_im.get_filename()
        smoother.inputs.fwhm = 2.0
        smoother.inputs.subject_id = f'sub-{subject}'
        smoother.inputs.hemi = hemi
        smoother.inputs.out_file = op.join(
            derivatives, 'sampled_giis', f'sub-{subject}', f'ses-{session}',
            'func',
            f'sub-{subject}_ses-{session}_left_over_right_desc-abszmap-depth-all_hemi-{hemi}_smoothed.gii'
        )

        r = smoother.run()
예제 #5
0
def smooth_surf_wf(name,
                   in_files,
                   hemispheres,
                   derivatives_dir,
                   smooth_fwhm=5.0,
                   base_dir='/tmp/workflow_folders'):

    wf = pe.Workflow(name=name,
                     base_dir=base_dir)

    input_node = pe.Node(niu.IdentityInterface(fields=['in-files',
                                          'hemispheres']),
                                  name='input_node')

    freesurfer_dir = op.join(derivatives_dir, 'freesurfer')

    input_node.inputs.in_files = in_files
    input_node.inputs.hemispheres = hemispheres

    smoother = pe.MapNode(freesurfer.SurfaceSmooth(subjects_dir=freesurfer_dir),
                          iterfield=['in_file',
                                     'hemi'],
                          name='smoother')
    smoother.inputs.subject_id = 'fsaverage'
    smoother.inputs.fwhm = smooth_fwhm

    wf.connect(input_node, 'in_files', smoother, 'in_file')
    wf.connect(input_node, 'hemispheres', smoother, 'hemi')

    ds = pe.MapNode(bids.DerivativesDataSink(desc='smoothed',
                                             out_path_base='smoothed_surfaces',
                                             base_directory=op.join(derivatives_dir)),
                    iterfield=['source_file',
                               'in_file',
                               'extra_values'],
                    name='datasink')
    wf.connect(input_node, 'in_files', ds, 'source_file')
    wf.connect(smoother, 'out_file', ds, 'in_file')
    wf.connect(input_node, ('hemispheres', format_hemi), ds, 'extra_values')



    return wf
예제 #6
0
def nipype_mri_surf2surf(t1path, hemi):
    splitpath = t1path.split(os.sep)
    fsdir = os.path.join(BIDS_DATA_DIR, "derivatives",
                         "freesurfer_{0}".format(splitpath[-3]))
    outdir = os.path.join(BIDS_DATA_DIR, "derivatives",
                          "freesurfer_projection_{0}".format(splitpath[-3]),
                          splitpath[-4])
    funcpath = os.path.join(
        BIDS_DATA_DIR, "derivatives", "spmpreproc_{0}".format(splitpath[-3]),
        splitpath[-4],
        "wrr{0}_{1}_task-localizer_bold.nii.gz".format(splitpath[-4],
                                                       splitpath[-3]))
    assert os.path.isfile(funcpath), funcpath
    basename = os.path.basename(funcpath).replace(".nii.gz", "")
    texturefile = os.path.join(outdir, basename + ".{0}.gii".format(hemi))
    smoothfile = os.path.join(outdir, basename + ".s5.{0}.gii".format(hemi))
    if not os.path.isfile(texturefile):
        raise ValueError("Oups!: {0}".format(path))
    if not os.path.isfile(smoothfile):
        open(smoothfile, "wt").close()
    surf2surf = freesurfer.SurfaceSmooth(in_file=texturefile,
                                         hemi=hemi,
                                         subject_id=splitpath[-4],
                                         fwhm=5,
                                         out_file=smoothfile,
                                         subjects_dir=fsdir)
    print(surf2surf.cmdline)
    if PROCESS:
        surf2surf.run()
    surf2surf = freesurfer.SurfaceTransform(
        hemi=hemi,
        source_subject=splitpath[-4],
        source_file=smoothfile,
        target_subject="ico",
        target_ico_order=7,
        out_file=os.path.join(outdir,
                              basename + ".ico7.s5.{0}.gii".format(hemi)),
        subjects_dir=fsdir)
    print(surf2surf.cmdline)
    if PROCESS:
        surf2surf.run()
예제 #7
0
def main(subject, session, bids_folder, space='fsnative', n_procs=12):

    base_dir = '/scratch/gdehol/workflow_folders'

    if not op.exists(base_dir):
        base_dir = '/tmp'

    wf = pe.Workflow(name=f'smooth_{subject}_{session}_{space}',
                     base_dir=base_dir)

    runs = get_runs(subject, session)
    fns_l = [
        get_surf_file(subject, session, run, bids_folder, 'lh') for run in runs
    ]
    fns_r = [
        get_surf_file(subject, session, run, bids_folder, 'rh') for run in runs
    ]
    fns = fns_l + fns_r

    hemis = ['lh'] * len(runs) + ['rh'] * len(runs)

    input_node = pe.Node(niu.IdentityInterface(
        fields=['freesurfer_subject', 'surface_files', 'hemis']),
                         name='input_node')
    input_node.inputs.freesurfer_subject = f'sub-{subject}'
    input_node.inputs.surface_files = fns
    input_node.inputs.hemis = hemis

    freesurfer_dir = op.join(bids_folder, 'derivatives', 'freesurfer')
    smoother = pe.MapNode(freesurfer.SurfaceSmooth(
        fwhm=5, subjects_dir=freesurfer_dir),
                          iterfield=['in_file', 'hemi'],
                          name='smoother')

    wf.connect(input_node, 'freesurfer_subject', smoother, 'subject_id')
    wf.connect(input_node, 'surface_files', smoother, 'in_file')
    wf.connect(input_node, 'hemis', smoother, 'hemi')

    def get_suffix(in_files):
        print(in_files)
        import re
        reg = re.compile(
            '.*/(?P<subject>sub-[0-9]+)_.*_hemi-(?P<hemi>L|R)_bold\.func\.gii')
        hemis = [reg.match(fn).group(2) for fn in in_files]

        return ['_hemi-{}'.format(hemi) for hemi in hemis]

    ds = pe.MapNode(DerivativesDataSink(
        out_path_base='smoothed',
        dismiss_entities=['suffix', 'extension'],
        extension=".func.gii",
        suffix='bold'),
                    iterfield=['source_file', 'in_file'],
                    name='datasink')
    ds.inputs.base_directory = op.join(bids_folder, 'derivatives')
    ds.inputs.desc = 'smoothed'

    wf.connect(input_node, 'surface_files', ds, 'source_file')
    wf.connect(smoother, 'out_file', ds, 'in_file')

    wf.run(plugin='MultiProc', plugin_args={'n_procs': n_procs})
예제 #8
0
def create_surface_projection_workflow(name="surface_projection"):

    # Define the workflow inputs
    inputnode = pe.Node(util.IdentityInterface(
        fields=["subject_id", "timeseries", "tkreg_affine", "smooth_fwhm"]),
                        name="inputs")

    # Set up a hemisphere iterable
    hemisource = pe.Node(util.IdentityInterface(fields=["hemi"]),
                         iterables=("hemi", ["lh", "rh"]),
                         name="hemisource")

    # Project data onto the surface mesh
    surfproject = pe.MapNode(fs.SampleToSurface(sampling_range=(0, 1, .1),
                                                sampling_units="frac",
                                                cortex_mask=True),
                             iterfield=["source_file", "reg_file"],
                             name="surfproject")
    surfproject.inputs.sampling_method = "average"

    # Apply the spherical warp to the data to bring into fsaverage space
    surftransform = pe.MapNode(fs.SurfaceTransform(target_subject="fsaverage",
                                                   reshape=True),
                               iterfield=["source_file"],
                               name="surftransform")

    # Smooth the data along the surface
    smoothnormsurf = pe.MapNode(fs.SurfaceSmooth(subject_id="fsaverage",
                                                 reshape=True),
                                iterfield=["in_file"],
                                name="smoothnormsurf")

    # Convert the fsaverage surface to nifti
    cvtnormsurf = pe.MapNode(fs.MRIConvert(out_type="niigz"),
                             iterfield=["in_file"],
                             name="convertnormsurf")

    # Rename the timeseries
    rename = pe.MapNode(util.Rename(
        format_string="%(hemi)s.timeseries.fsaverage", keep_ext=True),
                        iterfield=["in_file"],
                        name="rename")

    # Define the outputs
    outputnode = pe.Node(util.IdentityInterface(fields=["timeseries"]),
                         name="outputs")

    # Define and connect the workflow
    tosurf = pe.Workflow(name=name)
    tosurf.connect([
        (inputnode, surfproject, [("timeseries", "source_file"),
                                  ("subject_id", "subject_id"),
                                  ("tkreg_affine", "reg_file")]),
        (hemisource, surfproject, [("hemi", "hemi")]),
        (surfproject, surftransform, [("out_file", "source_file")]),
        (inputnode, surftransform, [("subject_id", "source_subject")]),
        (hemisource, surftransform, [("hemi", "hemi")]),
        (surftransform, smoothnormsurf, [("out_file", "in_file")]),
        (hemisource, smoothnormsurf, [("hemi", "hemi")]),
        (inputnode, smoothnormsurf, [("smooth_fwhm", "fwhm")]),
        (smoothnormsurf, cvtnormsurf, [("out_file", "in_file")]),
        (cvtnormsurf, rename, [("out_file", "in_file")]),
        (hemisource, rename, [("hemi", "hemi")]),
        (rename, outputnode, [("out_file", "timeseries")]),
    ])

    return tosurf
예제 #9
0
def create_ml_preprocess_workflow(
        name,
        project_dir,
        work_dir,
        sessions_file,
        session_template,
        fs_dir,
        annot_template='{subject_id}/label/{hemi}.aparc.a2009s.annot',
        fwhm_vals=[2],
        ico_order_vals=[4],
        do_save_vol_ds=False,
        do_save_smooth_vol_ds=False,
        do_save_surface_smooth_vol_ds=False,
        do_save_surface_ds=False,
        do_save_smooth_surface_ds=False,
        do_save_sphere_nifti=False,
        do_save_sphere_ds=True,
        do_save_join_sessions_ds=True,
        do_save_join_subjects_ds=True):

    #initialize workflow
    workflow = pe.Workflow(name=name)
    workflow.base_dir = work_dir

    sessions_info = ColumnData(sessions_file, dtype=str)
    subject_ids = set(sessions_info['subject_id'])
    session_map = [
        (sid, [s for i, s, r in zip(*sessions_info.values()) if i == sid])
        for sid in subject_ids
    ]

    ##for each subject
    subjects = pe.Node(interface=util.IdentityInterface(fields=['subject_id']),
                       name='subjects')
    subjects.iterables = [('subject_id', subject_ids)]

    ##for each session
    sessions = pe.Node(
        interface=util.IdentityInterface(fields=['subject_id', 'session_dir']),
        name='sessions')
    sessions.itersource = ('subjects', 'subject_id')
    sessions.iterables = [('session_dir', dict(session_map))]
    workflow.connect(subjects, 'subject_id', sessions, 'subject_id')

    #get session directory
    get_session_dir = pe.Node(interface=nio.SelectFiles(session_template),
                              name='get_session_dir')
    workflow.connect(sessions, 'session_dir', get_session_dir, 'session_dir')

    #save outputs
    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.parameterization = False
    workflow.connect(get_session_dir, 'session_dir', datasink,
                     'base_directory')

    template = {
        'nifti_file': 'mri/f.nii.gz',
        'attributes_file': 'attributes.txt',
        'reg_file': 'mri/transforms/functional_to_anatomy.dat'
    }
    get_files = pe.Node(nio.SelectFiles(template), name='get_files')
    workflow.connect(get_session_dir, 'session_dir', get_files,
                     'base_directory')

    vol_to_ds = pe.Node(nmutil.NiftiToDataset(), name='vol_to_ds')
    vol_to_ds.inputs.ds_file = 'vol.hdf5'

    workflow.connect(get_files, 'nifti_file', vol_to_ds, 'nifti_file')
    workflow.connect(get_files, 'attributes_file', vol_to_ds,
                     'attributes_file')
    workflow.connect(subjects, 'subject_id', vol_to_ds, 'subject_id')
    workflow.connect(sessions, 'session_dir', vol_to_ds, 'session_id')

    if do_save_vol_ds:
        workflow.connect(vol_to_ds, 'ds_file', datasink, 'ml.@vol')

    fwhm = pe.Node(util.IdentityInterface(fields=['fwhm']), name='fwhm')
    fwhm.iterables = [('fwhm', fwhm_vals)]

    if do_save_smooth_vol_ds:
        smooth_vol = pe.Node(interface=fs.MRIConvert(), name='smooth_vol')
        workflow.connect(get_files, 'nifti_file', smooth_vol, 'in_file')
        workflow.connect(fwhm, 'fwhm', smooth_vol, 'fwhm')

        smooth_vol_to_ds = pe.Node(nmutil.NiftiToDataset(),
                                   name='smooth_vol_to_ds')
        smooth_vol_to_ds.inputs.ds_file = 'smooth_vol.hdf5'

        workflow.connect(smooth_vol, 'out_file', smooth_vol_to_ds,
                         'nifti_file')
        workflow.connect(get_files, 'attributes_file', smooth_vol_to_ds,
                         'attributes_file')
        workflow.connect(subjects, 'subject_id', smooth_vol_to_ds,
                         'subject_id')
        workflow.connect(sessions, 'session_dir', smooth_vol_to_ds,
                         'session_id')

        workflow.connect(smooth_vol_to_ds, 'ds_file', datasink,
                         'ml.@smooth_vol')

    if do_save_surface_smooth_vol_ds:
        surface_smooth_vol = pe.Node(interface=fs.Smooth(),
                                     name='surface_smooth_vol')
        workflow.connect(get_files, 'reg_file', surface_smooth_vol, 'reg_file')
        workflow.connect(get_files, 'nifti_file', surface_smooth_vol,
                         'in_file')
        workflow.connect(fwhm, 'fwhm', surface_smooth_vol, 'surface_fwhm')

        surface_smooth_vol_to_ds = pe.Node(nmutil.NiftiToDataset(),
                                           name='surface_smooth_vol_to_ds')
        surface_smooth_vol_to_ds.inputs.ds_file = 'surface_smooth_vol.hdf5'

        workflow.connect(surface_smooth_vol, 'out_file',
                         surface_smooth_vol_to_ds, 'nifti_file')
        workflow.connect(get_files, 'attributes_file',
                         surface_smooth_vol_to_ds, 'attributes_file')
        workflow.connect(subjects, 'subject_id', surface_smooth_vol_to_ds,
                         'subject_id')
        workflow.connect(sessions, 'session_dir', surface_smooth_vol_to_ds,
                         'session_id')

        workflow.connect(surface_smooth_vol_to_ds, 'ds_file', datasink,
                         'ml.@surface_smooth_vol')

    hemi = pe.Node(util.IdentityInterface(fields=['hemi']), name='hemi')
    hemi.iterables = [('hemi', ['lh', 'rh'])]

    to_surface = pe.Node(fs.SampleToSurface(), name='to_surface')
    to_surface.inputs.sampling_method = 'average'
    to_surface.inputs.sampling_range = (0., 1., 0.1)
    to_surface.inputs.sampling_units = 'frac'
    to_surface.inputs.subjects_dir = fs_dir
    workflow.connect(hemi, 'hemi', to_surface, 'hemi')
    workflow.connect(get_files, 'nifti_file', to_surface, 'source_file')
    workflow.connect(get_files, 'reg_file', to_surface, 'reg_file')

    if do_save_surface_ds:
        surface_to_ds = pe.Node(nmutil.NiftiToDataset(), name='surface_to_ds')
        workflow.connect(to_surface, 'out_file', surface_to_ds, 'nifti_file')
        workflow.connect(get_files, 'attributes_file', surface_to_ds,
                         'attributes_file')
        workflow.connect(subjects, 'subject_id', surface_to_ds, 'subject_id')
        workflow.connect(sessions, 'session_dir', surface_to_ds, 'session_id')

        join_surfaces = pe.JoinNode(nmutil.JoinDatasets(),
                                    name='join_surfaces',
                                    joinsource='hemi',
                                    joinfield='input_datasets')
        join_surfaces.inputs.joined_dataset = 'surface.hdf5'
        join_surfaces.inputs.join_hemispheres = True
        workflow.connect(surface_to_ds, 'ds_file', join_surfaces,
                         'input_datasets')

        workflow.connect(join_surfaces, 'joined_dataset', datasink,
                         'ml.@surface')

    smooth_surface = pe.Node(fs.SurfaceSmooth(), name='smooth_surface')
    smooth_surface.inputs.subjects_dir = fs_dir
    workflow.connect(to_surface, 'out_file', smooth_surface, 'in_file')
    workflow.connect(sessions, 'subject_id', smooth_surface, 'subject_id')
    workflow.connect(hemi, 'hemi', smooth_surface, 'hemi')
    workflow.connect(fwhm, 'fwhm', smooth_surface, 'fwhm')

    if do_save_smooth_surface_ds:
        smooth_surface_to_ds = pe.Node(nmutil.NiftiToDataset(),
                                       name='smooth_surface_to_ds')
        workflow.connect(smooth_surface, 'out_file', smooth_surface_to_ds,
                         'nifti_file')
        workflow.connect(get_files, 'attributes_file', smooth_surface_to_ds,
                         'attributes_file')
        workflow.connect(subjects, 'subject_id', smooth_surface_to_ds,
                         'subject_id')
        workflow.connect(sessions, 'session_dir', smooth_surface_to_ds,
                         'session_id')

        join_smooth_surfaces = pe.JoinNode(nmutil.JoinDatasets(),
                                           name='join_smooth_surfaces',
                                           joinsource='hemi',
                                           joinfield='input_datasets')
        join_smooth_surfaces.inputs.joined_dataset = 'smooth_surface.hdf5'
        join_smooth_surfaces.inputs.join_hemispheres = True
        workflow.connect(smooth_surface_to_ds, 'ds_file', join_smooth_surfaces,
                         'input_datasets')

        workflow.connect(join_smooth_surfaces, 'joined_dataset', datasink,
                         'ml.@smooth_surface')

    ico_order = pe.Node(util.IdentityInterface(fields=['ico_order']),
                        name='ico_order')
    ico_order.iterables = [('ico_order', ico_order_vals)]

    to_sphere = pe.Node(fs.SurfaceTransform(), name='to_sphere')
    to_sphere.inputs.target_subject = 'ico'
    to_sphere.inputs.subjects_dir = fs_dir
    workflow.connect(hemi, 'hemi', to_sphere, 'hemi')
    workflow.connect(smooth_surface, 'out_file', to_sphere, 'source_file')
    workflow.connect(subjects, 'subject_id', to_sphere, 'source_subject')
    workflow.connect(ico_order, 'ico_order', to_sphere, 'target_ico_order')

    if do_save_sphere_nifti:
        workflow.connect(to_sphere, 'out_file', datasink, 'surf.@sphere')

    template = {'annot_file': annot_template}
    get_annot_file = pe.Node(nio.SelectFiles(template), name='get_annot_file')
    get_annot_file.inputs.base_directory = fs_dir
    get_annot_file.inputs.subject_id = 'fsaverage'
    workflow.connect(hemi, 'hemi', get_annot_file, 'hemi')

    transform_annot = pe.Node(fs.SurfaceTransform(), name='transform_annot')
    transform_annot.inputs.source_subject = 'fsaverage'
    transform_annot.inputs.target_subject = 'ico'
    transform_annot.inputs.subjects_dir = fs_dir
    workflow.connect(hemi, 'hemi', transform_annot, 'hemi')
    workflow.connect(get_annot_file, 'annot_file', transform_annot,
                     'source_annot_file')
    workflow.connect(ico_order, 'ico_order', transform_annot,
                     'target_ico_order')

    sphere_to_ds = pe.Node(nmutil.NiftiToDataset(), name='sphere_to_ds')
    workflow.connect(to_sphere, 'out_file', sphere_to_ds, 'nifti_file')
    workflow.connect(get_files, 'attributes_file', sphere_to_ds,
                     'attributes_file')
    workflow.connect(transform_annot, 'out_file', sphere_to_ds, 'annot_file')
    workflow.connect(subjects, 'subject_id', sphere_to_ds, 'subject_id')
    workflow.connect(sessions, 'session_dir', sphere_to_ds, 'session_id')

    join_hemispheres = pe.JoinNode(nmutil.JoinDatasets(),
                                   name='join_hemispheres',
                                   joinsource='hemi',
                                   joinfield='input_datasets')
    join_hemispheres.inputs.joined_dataset = 'sphere.hdf5'
    join_hemispheres.inputs.join_hemispheres = True

    workflow.connect(sphere_to_ds, 'ds_file', join_hemispheres,
                     'input_datasets')

    if do_save_sphere_ds:
        workflow.connect(join_hemispheres, 'joined_dataset', datasink,
                         'ml.@sphere')

    join_sessions = pe.JoinNode(nmutil.JoinDatasets(),
                                name='join_sessions',
                                joinsource='sessions',
                                joinfield='input_datasets')
    workflow.connect(join_hemispheres, 'joined_dataset', join_sessions,
                     'input_datasets')

    if do_save_join_sessions_ds:
        join_sessions_sink = pe.Node(nio.DataSink(), name='join_sessions_sink')
        join_sessions_sink.inputs.parameterization = False
        join_sessions_sink.inputs.base_directory = os.path.join(
            project_dir, 'ml')
        workflow.connect(subjects, 'subject_id', join_sessions_sink,
                         'container')
        workflow.connect(join_sessions, 'joined_dataset', join_sessions_sink,
                         '@join_sessions')

    join_subjects = pe.JoinNode(nmutil.JoinDatasets(),
                                name='join_subjects',
                                joinsource='subjects',
                                joinfield='input_datasets')
    workflow.connect(join_sessions, 'joined_dataset', join_subjects,
                     'input_datasets')

    if do_save_join_subjects_ds:
        join_subjects_sink = pe.Node(nio.DataSink(), name='join_subjects_sink')
        join_subjects_sink.inputs.parameterization = False
        join_subjects_sink.inputs.base_directory = os.path.join(
            project_dir, 'ml')
        workflow.connect(join_subjects, 'joined_dataset', join_subjects_sink,
                         '@join_subjects')

    return workflow