def mod_smooth(in_file, brightness_threshold, usans, fwhm, smooth_type, reg_file, surface_fwhm, subjects_dir=None): import nipype.interfaces.fsl as fsl import nipype.interfaces.freesurfer as fs if smooth_type == 'susan': smooth = fsl.SUSAN() smooth.inputs.fwhm = fwhm smooth.inputs.brightness_threshold = brightness_threshold smooth.inputs.usans = usans smooth.inputs.in_file = in_file res = smooth.run() smoothed_file = res.outputs.smoothed_file elif smooth_type == 'isotropic': smooth = fsl.IsotropicSmooth() smooth.inputs.in_file = in_file smooth.inputs.fwhm = fwhm res = smooth.run() smoothed_file = res.outputs.out_file elif smooth_type == 'freesurfer': smooth = fs.Smooth() smooth.inputs.reg_file = reg_file smooth.inputs.in_file = in_file smooth.inputs.surface_fwhm = surface_fwhm smooth.inputs.vol_fwhm = fwhm smooth.inputs.proj_frac_avg = (0.0, 1.0, 0.1) smooth.inputs.subjects_dir = subjects_dir res = smooth.run() smoothed_file = res.outputs.smoothed_file return smoothed_file
def mod_smooth(in_file, mask_file, fwhm, smooth_type, reg_file, surface_fwhm, subjects_dir=None): import nipype.interfaces.fsl as fsl import nipype.interfaces.freesurfer as fs import os if smooth_type == 'susan': if fwhm == 0: return in_file smooth = create_susan_smooth() smooth.base_dir = os.getcwd() smooth.inputs.inputnode.fwhm = fwhm smooth.inputs.inputnode.mask_file = mask_file smooth.inputs.inputnode.in_file = in_file res = smooth.run() smoothed_file = res.outputs.outputnode.smoothed_files elif smooth_type == 'isotropic': if fwhm == 0: return in_file smooth = fsl.IsotropicSmooth() smooth.inputs.in_file = in_file smooth.inputs.fwhm = fwhm res = smooth.run() smoothed_file = res.outputs.out_file elif smooth_type == 'freesurfer': if fwhm == 0 and surface_fwhm == 0: return in_file smooth = fs.Smooth() smooth.inputs.reg_file = reg_file smooth.inputs.in_file = in_file smooth.inputs.surface_fwhm = surface_fwhm smooth.inputs.vol_fwhm = fwhm smooth.inputs.proj_frac_avg = (0.0, 1.0, 0.1) smooth.inputs.subjects_dir = subjects_dir res = smooth.run() smoothed_file = res.outputs.smoothed_file return smoothed_file
def test_smooth(): input_map = dict( args=dict(argstr='%s', ), environ=dict(), in_file=dict( argstr='--i %s', mandatory=True, ), num_iters=dict( xor=['surface_fwhm'], mandatory=True, ), proj_frac=dict( argstr='--projfrac %s', xor=['proj_frac_avg'], ), proj_frac_avg=dict( xor=['proj_frac'], argstr='--projfrac-avg %.2f %.2f %.2f', ), reg_file=dict( argstr='--reg %s', mandatory=True, ), smoothed_file=dict(argstr='--o %s', ), subjects_dir=dict(), surface_fwhm=dict( xor=['num_iters'], mandatory=True, argstr='--fwhm %d', requires=['reg_file'], ), vol_fwhm=dict(argstr='--vol-fwhm %d', ), ) instance = freesurfer.Smooth() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(instance.inputs.traits()[key], metakey), value
surface and fit it back into the volume forming the cortical ribbon. The projection uses the average value along a "cortical column". In addition to the surface smoothing, the rest of the volume is smoothed with a 3d gaussian kernel. .. note:: It is very important to note that the projection to the surface takes a 3d manifold to a 2d manifold. Hence the reverse projection, simply fills the thickness of cortex with the smoothed data. The smoothing is not performed in a depth specific manner. The output of this branch should only be used for surface-based analysis and visualization. """ volsmooth = pe.Node(interface=spm.Smooth(), name="volsmooth") surfsmooth = pe.MapNode(interface=fs.Smooth(proj_frac_avg=(0, 1, 0.1)), name="surfsmooth", iterfield=['in_file']) """ We connect up the different nodes to implement the preprocessing workflow. """ preproc.connect([ (realign, surfregister, [('mean_image', 'source_file')]), (FreeSurferSource, ApplyVolTransform, [('brainmask', 'target_file')]), (surfregister, ApplyVolTransform, [('out_reg_file', 'reg_file')]), (realign, ApplyVolTransform, [('mean_image', 'source_file')]), (ApplyVolTransform, Threshold, [('transformed_file', 'in_file')]), (realign, art, [('realignment_parameters', 'realignment_parameters'), ('realigned_files', 'realigned_files')]), (Threshold, art, [('binary_file', 'mask_file')]),
preprocessing.connect(iter_fwhm, "fwhm", anisotropic_voxel_smooth, "inputnode.fwhm") preprocessing.connect(compute_mask, "brain_mask", anisotropic_voxel_smooth, 'inputnode.mask_file') recon_all = pe.Node(interface=fs.ReconAll(), name="recon_all") surfregister = pe.Node(interface=fs.BBRegister(), name='surfregister') surfregister.inputs.init = 'fsl' surfregister.inputs.contrast_type = 't2' preprocessing.connect(realign, 'mean_image', surfregister, 'source_file') preprocessing.connect(recon_all, 'subject_id', surfregister, 'subject_id') preprocessing.connect(recon_all, 'subjects_dir', surfregister, 'subjects_dir') isotropic_surface_smooth = pe.MapNode( interface=fs.Smooth(proj_frac_avg=(0, 1, 0.1)), iterfield=['in_file'], name="isotropic_surface_smooth") preprocessing.connect(surfregister, 'out_reg_file', isotropic_surface_smooth, 'reg_file') preprocessing.connect(realign, "realigned_files", isotropic_surface_smooth, "in_file") preprocessing.connect(iter_fwhm, "fwhm", isotropic_surface_smooth, "surface_fwhm") preprocessing.connect(iter_fwhm, "fwhm", isotropic_surface_smooth, "vol_fwhm") preprocessing.connect(recon_all, 'subjects_dir', isotropic_surface_smooth, 'subjects_dir') merge_smoothed_files = pe.Node( interface=util.Merge(3), name='merge_smoothed_files') preprocessing.connect(isotropic_voxel_smooth, 'smoothed_files',
#This is a Nipype generator. Warning, here be dragons. #!/usr/bin/env python import sys import nipype import nipype.pipeline as pe import nipype.interfaces.freesurfer as freesurfer #Wraps the executable command ``mri_segment``. freesurfer_SegmentWM = pe.Node(interface = freesurfer.SegmentWM(), name='freesurfer_SegmentWM') freesurfer_SegmentWM.inputs.in_file = 'brain.nii' #Wraps the executable command ``mris_volsmooth``. freesurfer_Smooth = pe.Node(interface = freesurfer.Smooth(), name='freesurfer_Smooth') #Create a workflow to connect all those nodes analysisflow = nipype.Workflow('MyWorkflow') analysisflow.connect(freesurfer_SegmentWM, "out_file", freesurfer_Smooth, "in_file") #Run the workflow plugin = 'MultiProc' #adjust your desired plugin here plugin_args = {'n_procs': 1} #adjust to your number of cores analysisflow.write_graph(graph2use='flat', format='png', simple_form=False) analysisflow.run(plugin=plugin, plugin_args=plugin_args)
def create_ml_preprocess_workflow( name, project_dir, work_dir, sessions_file, session_template, fs_dir, annot_template='{subject_id}/label/{hemi}.aparc.a2009s.annot', fwhm_vals=[2], ico_order_vals=[4], do_save_vol_ds=False, do_save_smooth_vol_ds=False, do_save_surface_smooth_vol_ds=False, do_save_surface_ds=False, do_save_smooth_surface_ds=False, do_save_sphere_nifti=False, do_save_sphere_ds=True, do_save_join_sessions_ds=True, do_save_join_subjects_ds=True): #initialize workflow workflow = pe.Workflow(name=name) workflow.base_dir = work_dir sessions_info = ColumnData(sessions_file, dtype=str) subject_ids = set(sessions_info['subject_id']) session_map = [ (sid, [s for i, s, r in zip(*sessions_info.values()) if i == sid]) for sid in subject_ids ] ##for each subject subjects = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name='subjects') subjects.iterables = [('subject_id', subject_ids)] ##for each session sessions = pe.Node( interface=util.IdentityInterface(fields=['subject_id', 'session_dir']), name='sessions') sessions.itersource = ('subjects', 'subject_id') sessions.iterables = [('session_dir', dict(session_map))] workflow.connect(subjects, 'subject_id', sessions, 'subject_id') #get session directory get_session_dir = pe.Node(interface=nio.SelectFiles(session_template), name='get_session_dir') workflow.connect(sessions, 'session_dir', get_session_dir, 'session_dir') #save outputs datasink = pe.Node(nio.DataSink(), name='datasink') datasink.inputs.parameterization = False workflow.connect(get_session_dir, 'session_dir', datasink, 'base_directory') template = { 'nifti_file': 'mri/f.nii.gz', 'attributes_file': 'attributes.txt', 'reg_file': 'mri/transforms/functional_to_anatomy.dat' } get_files = pe.Node(nio.SelectFiles(template), name='get_files') workflow.connect(get_session_dir, 'session_dir', get_files, 'base_directory') vol_to_ds = pe.Node(nmutil.NiftiToDataset(), name='vol_to_ds') vol_to_ds.inputs.ds_file = 'vol.hdf5' workflow.connect(get_files, 'nifti_file', vol_to_ds, 'nifti_file') workflow.connect(get_files, 'attributes_file', vol_to_ds, 'attributes_file') workflow.connect(subjects, 'subject_id', vol_to_ds, 'subject_id') workflow.connect(sessions, 'session_dir', vol_to_ds, 'session_id') if do_save_vol_ds: workflow.connect(vol_to_ds, 'ds_file', datasink, 'ml.@vol') fwhm = pe.Node(util.IdentityInterface(fields=['fwhm']), name='fwhm') fwhm.iterables = [('fwhm', fwhm_vals)] if do_save_smooth_vol_ds: smooth_vol = pe.Node(interface=fs.MRIConvert(), name='smooth_vol') workflow.connect(get_files, 'nifti_file', smooth_vol, 'in_file') workflow.connect(fwhm, 'fwhm', smooth_vol, 'fwhm') smooth_vol_to_ds = pe.Node(nmutil.NiftiToDataset(), name='smooth_vol_to_ds') smooth_vol_to_ds.inputs.ds_file = 'smooth_vol.hdf5' workflow.connect(smooth_vol, 'out_file', smooth_vol_to_ds, 'nifti_file') workflow.connect(get_files, 'attributes_file', smooth_vol_to_ds, 'attributes_file') workflow.connect(subjects, 'subject_id', smooth_vol_to_ds, 'subject_id') workflow.connect(sessions, 'session_dir', smooth_vol_to_ds, 'session_id') workflow.connect(smooth_vol_to_ds, 'ds_file', datasink, 'ml.@smooth_vol') if do_save_surface_smooth_vol_ds: surface_smooth_vol = pe.Node(interface=fs.Smooth(), name='surface_smooth_vol') workflow.connect(get_files, 'reg_file', surface_smooth_vol, 'reg_file') workflow.connect(get_files, 'nifti_file', surface_smooth_vol, 'in_file') workflow.connect(fwhm, 'fwhm', surface_smooth_vol, 'surface_fwhm') surface_smooth_vol_to_ds = pe.Node(nmutil.NiftiToDataset(), name='surface_smooth_vol_to_ds') surface_smooth_vol_to_ds.inputs.ds_file = 'surface_smooth_vol.hdf5' workflow.connect(surface_smooth_vol, 'out_file', surface_smooth_vol_to_ds, 'nifti_file') workflow.connect(get_files, 'attributes_file', surface_smooth_vol_to_ds, 'attributes_file') workflow.connect(subjects, 'subject_id', surface_smooth_vol_to_ds, 'subject_id') workflow.connect(sessions, 'session_dir', surface_smooth_vol_to_ds, 'session_id') workflow.connect(surface_smooth_vol_to_ds, 'ds_file', datasink, 'ml.@surface_smooth_vol') hemi = pe.Node(util.IdentityInterface(fields=['hemi']), name='hemi') hemi.iterables = [('hemi', ['lh', 'rh'])] to_surface = pe.Node(fs.SampleToSurface(), name='to_surface') to_surface.inputs.sampling_method = 'average' to_surface.inputs.sampling_range = (0., 1., 0.1) to_surface.inputs.sampling_units = 'frac' to_surface.inputs.subjects_dir = fs_dir workflow.connect(hemi, 'hemi', to_surface, 'hemi') workflow.connect(get_files, 'nifti_file', to_surface, 'source_file') workflow.connect(get_files, 'reg_file', to_surface, 'reg_file') if do_save_surface_ds: surface_to_ds = pe.Node(nmutil.NiftiToDataset(), name='surface_to_ds') workflow.connect(to_surface, 'out_file', surface_to_ds, 'nifti_file') workflow.connect(get_files, 'attributes_file', surface_to_ds, 'attributes_file') workflow.connect(subjects, 'subject_id', surface_to_ds, 'subject_id') workflow.connect(sessions, 'session_dir', surface_to_ds, 'session_id') join_surfaces = pe.JoinNode(nmutil.JoinDatasets(), name='join_surfaces', joinsource='hemi', joinfield='input_datasets') join_surfaces.inputs.joined_dataset = 'surface.hdf5' join_surfaces.inputs.join_hemispheres = True workflow.connect(surface_to_ds, 'ds_file', join_surfaces, 'input_datasets') workflow.connect(join_surfaces, 'joined_dataset', datasink, 'ml.@surface') smooth_surface = pe.Node(fs.SurfaceSmooth(), name='smooth_surface') smooth_surface.inputs.subjects_dir = fs_dir workflow.connect(to_surface, 'out_file', smooth_surface, 'in_file') workflow.connect(sessions, 'subject_id', smooth_surface, 'subject_id') workflow.connect(hemi, 'hemi', smooth_surface, 'hemi') workflow.connect(fwhm, 'fwhm', smooth_surface, 'fwhm') if do_save_smooth_surface_ds: smooth_surface_to_ds = pe.Node(nmutil.NiftiToDataset(), name='smooth_surface_to_ds') workflow.connect(smooth_surface, 'out_file', smooth_surface_to_ds, 'nifti_file') workflow.connect(get_files, 'attributes_file', smooth_surface_to_ds, 'attributes_file') workflow.connect(subjects, 'subject_id', smooth_surface_to_ds, 'subject_id') workflow.connect(sessions, 'session_dir', smooth_surface_to_ds, 'session_id') join_smooth_surfaces = pe.JoinNode(nmutil.JoinDatasets(), name='join_smooth_surfaces', joinsource='hemi', joinfield='input_datasets') join_smooth_surfaces.inputs.joined_dataset = 'smooth_surface.hdf5' join_smooth_surfaces.inputs.join_hemispheres = True workflow.connect(smooth_surface_to_ds, 'ds_file', join_smooth_surfaces, 'input_datasets') workflow.connect(join_smooth_surfaces, 'joined_dataset', datasink, 'ml.@smooth_surface') ico_order = pe.Node(util.IdentityInterface(fields=['ico_order']), name='ico_order') ico_order.iterables = [('ico_order', ico_order_vals)] to_sphere = pe.Node(fs.SurfaceTransform(), name='to_sphere') to_sphere.inputs.target_subject = 'ico' to_sphere.inputs.subjects_dir = fs_dir workflow.connect(hemi, 'hemi', to_sphere, 'hemi') workflow.connect(smooth_surface, 'out_file', to_sphere, 'source_file') workflow.connect(subjects, 'subject_id', to_sphere, 'source_subject') workflow.connect(ico_order, 'ico_order', to_sphere, 'target_ico_order') if do_save_sphere_nifti: workflow.connect(to_sphere, 'out_file', datasink, 'surf.@sphere') template = {'annot_file': annot_template} get_annot_file = pe.Node(nio.SelectFiles(template), name='get_annot_file') get_annot_file.inputs.base_directory = fs_dir get_annot_file.inputs.subject_id = 'fsaverage' workflow.connect(hemi, 'hemi', get_annot_file, 'hemi') transform_annot = pe.Node(fs.SurfaceTransform(), name='transform_annot') transform_annot.inputs.source_subject = 'fsaverage' transform_annot.inputs.target_subject = 'ico' transform_annot.inputs.subjects_dir = fs_dir workflow.connect(hemi, 'hemi', transform_annot, 'hemi') workflow.connect(get_annot_file, 'annot_file', transform_annot, 'source_annot_file') workflow.connect(ico_order, 'ico_order', transform_annot, 'target_ico_order') sphere_to_ds = pe.Node(nmutil.NiftiToDataset(), name='sphere_to_ds') workflow.connect(to_sphere, 'out_file', sphere_to_ds, 'nifti_file') workflow.connect(get_files, 'attributes_file', sphere_to_ds, 'attributes_file') workflow.connect(transform_annot, 'out_file', sphere_to_ds, 'annot_file') workflow.connect(subjects, 'subject_id', sphere_to_ds, 'subject_id') workflow.connect(sessions, 'session_dir', sphere_to_ds, 'session_id') join_hemispheres = pe.JoinNode(nmutil.JoinDatasets(), name='join_hemispheres', joinsource='hemi', joinfield='input_datasets') join_hemispheres.inputs.joined_dataset = 'sphere.hdf5' join_hemispheres.inputs.join_hemispheres = True workflow.connect(sphere_to_ds, 'ds_file', join_hemispheres, 'input_datasets') if do_save_sphere_ds: workflow.connect(join_hemispheres, 'joined_dataset', datasink, 'ml.@sphere') join_sessions = pe.JoinNode(nmutil.JoinDatasets(), name='join_sessions', joinsource='sessions', joinfield='input_datasets') workflow.connect(join_hemispheres, 'joined_dataset', join_sessions, 'input_datasets') if do_save_join_sessions_ds: join_sessions_sink = pe.Node(nio.DataSink(), name='join_sessions_sink') join_sessions_sink.inputs.parameterization = False join_sessions_sink.inputs.base_directory = os.path.join( project_dir, 'ml') workflow.connect(subjects, 'subject_id', join_sessions_sink, 'container') workflow.connect(join_sessions, 'joined_dataset', join_sessions_sink, '@join_sessions') join_subjects = pe.JoinNode(nmutil.JoinDatasets(), name='join_subjects', joinsource='subjects', joinfield='input_datasets') workflow.connect(join_sessions, 'joined_dataset', join_subjects, 'input_datasets') if do_save_join_subjects_ds: join_subjects_sink = pe.Node(nio.DataSink(), name='join_subjects_sink') join_subjects_sink.inputs.parameterization = False join_subjects_sink.inputs.base_directory = os.path.join( project_dir, 'ml') workflow.connect(join_subjects, 'joined_dataset', join_subjects_sink, '@join_subjects') return workflow