def test_MakeAverageSubject_inputs(): input_map = dict( args=dict(argstr='%s', ), environ=dict( nohash=True, usedefault=True, ), ignore_exception=dict( nohash=True, usedefault=True, ), out_name=dict( argstr='--out %s', usedefault=True, ), subjects_dir=dict(), subjects_ids=dict( argstr='--subjects %s', mandatory=True, sep=' ', ), terminal_output=dict( mandatory=True, nohash=True, ), ) inputs = MakeAverageSubject.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_MakeAverageSubject_outputs(): output_map = dict(average_subject_name=dict(), ) outputs = MakeAverageSubject.output_spec() for key, metadata in output_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(outputs.traits()[key], metakey), value
def test_MakeAverageSubject_inputs(): input_map = dict(args=dict(argstr='%s', ), environ=dict(nohash=True, usedefault=True, ), ignore_exception=dict(nohash=True, usedefault=True, ), out_name=dict(argstr='--out %s', usedefault=True, ), subjects_dir=dict(), subjects_ids=dict(argstr='--subjects %s', mandatory=True, sep=' ', ), terminal_output=dict(mandatory=True, nohash=True, ), ) inputs = MakeAverageSubject.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def processT1(t1_filename): import os import nipype.pipeline.engine as pe import nipype.interfaces.io as nio from nipype.interfaces.freesurfer.preprocess import ReconAll from nipype.interfaces.freesurfer.utils import MakeAverageSubject subject_list = ['s1', 's3'] data_dir = os.path.abspath('data') subjects_dir = os.path.abspath('amri_freesurfer_tutorial/subjects_dir') wf = pe.Workflow(name="l1workflow") wf.base_dir = os.path.abspath('amri_freesurfer_tutorial/workdir') datasource = pe.MapNode(interface=nio.DataGrabber(infields=['subject_id'], outfields=['struct']), name='datasource', iterfield=['subject_id']) datasource.inputs.base_directory = data_dir datasource.inputs.template = '%s/%s.nii' datasource.inputs.template_args = dict(struct=[['subject_id', 'struct']]) datasource.inputs.subject_id = subject_list recon_all = pe.MapNode(interface=ReconAll(), name='recon_all', iterfield=['subject_id', 'T1_files']) recon_all.inputs.subject_id = subject_list if not os.path.exists(subjects_dir): os.mkdir(subjects_dir) recon_all.inputs.subjects_dir = subjects_dir wf.connect(datasource, 'struct', recon_all, 'T1_files') average = pe.Node(interface=MakeAverageSubject(), name="average") average.inputs.subjects_dir = subjects_dir wf.connect(recon_all, 'subject_id', average, 'subjects_ids') wf.run("MultiProc", plugin_args={'n_procs': 4})
datasource.inputs.base_directory = data_dir datasource.inputs.template = '%s/%s.nii' datasource.inputs.template_args = dict(struct=[['subject_id', 'struct']]) datasource.inputs.subject_id = subject_list datasource.inputs.sort_filelist = True wf.connect(inputspec, 'subject_id', datasource, 'subject_id') """ Run recon-all """ recon_all = create_reconall_workflow() recon_all.inputs.inputspec.subjects_dir = subjects_dir wf.connect(datasource, 'struct', recon_all, 'inputspec.T1_files') wf.connect(inputspec, 'subject_id', recon_all, 'inputspec.subject_id') """ Make average subject """ average = pe.JoinNode(interface=MakeAverageSubject(), joinsource="inputspec", joinfield="subjects_ids", name="average") average.inputs.subjects_dir = subjects_dir wf.connect(recon_all, 'postdatasink_outputspec.subject_id', average, 'subjects_ids') wf.run("MultiProc", plugin_args={'n_procs': 4})
datasource.inputs.template = '%s/%s.nii' datasource.inputs.template_args = dict(struct=[['subject_id', 'struct']]) datasource.inputs.subject_id = subject_list datasource.inputs.sort_filelist = True wf.connect(inputspec, 'subject_id', datasource, 'subject_id') """ Run recon-all """ recon_all = create_reconall_workflow() recon_all.inputs.inputspec.subjects_dir = subjects_dir wf.connect(datasource, 'struct', recon_all, 'inputspec.T1_files') wf.connect(inputspec, 'subject_id', recon_all, 'inputspec.subject_id') """ Make average subject """ average = pe.JoinNode( interface=MakeAverageSubject(), joinsource="inputspec", joinfield="subjects_ids", name="average") average.inputs.subjects_dir = subjects_dir wf.connect(recon_all, 'postdatasink_outputspec.subject_id', average, 'subjects_ids') wf.run("MultiProc", plugin_args={'n_procs': 4})
outfields=['struct']), name='datasource', iterfield=['subject_id']) datasource.inputs.base_directory = data_dir datasource.inputs.template = '%s/%s.nii' datasource.inputs.template_args = dict(struct=[['subject_id', 'struct']]) datasource.inputs.subject_id = subject_list """ Run recon-all """ recon_all = pe.MapNode(interface=ReconAll(), name='recon_all', iterfield=['subject_id', 'T1_files']) recon_all.inputs.subject_id = subject_list if not os.path.exists(subjects_dir): os.mkdir(subjects_dir) recon_all.inputs.subjects_dir = subjects_dir wf.connect(datasource, 'struct', recon_all, 'T1_files') """ Make average subject """ average = pe.Node(interface=MakeAverageSubject(), name="average") average.inputs.subjects_dir = subjects_dir wf.connect(recon_all, 'subject_id', average, 'subjects_ids') wf.run("MultiProc", plugin_args={'n_procs': 4})