def create_ants_registration_pipeline(name='ants_registration'): # set fsl output type fsl.FSLCommand.set_default_output_type('NIFTI_GZ') # initiate workflow ants_registration = Workflow(name='ants_registration') # inputnode inputnode = Node(util.IdentityInterface( fields=['corr_Z', 'ants_affine', 'ants_warp', 'ref']), name='inputnode') # outputnode outputnode = Node(util.IdentityInterface(fields=[ 'ants_reg_corr_Z', ]), name='outputnode') #also transform to mni space collect_transforms = Node(interface=util.Merge(2), name='collect_transforms') ants_reg = MapNode(ants.ApplyTransforms(input_image_type=3, dimension=3, interpolation='Linear'), name='ants_reg', iterfield='input_image') ants_registration.connect([ (inputnode, ants_reg, [('corr_Z', 'input_image')]), (inputnode, ants_reg, [('ref', 'reference_image')]), (inputnode, collect_transforms, [('ants_affine', 'in1')]), (inputnode, collect_transforms, [('ants_warp', 'in2')]), (collect_transforms, ants_reg, [('out', 'transforms')]), (ants_reg, outputnode, [('output_image', 'ants_reg_corr_Z')]) ]) return ants_registration
def ica_aroma_workflow(): flow = Workflow('denoise_ica_aroma') inputnode = Node(util.IdentityInterface(fields=[ 'fslDir', 'inFile', 'mask', 'dim', 'TR', 'mc', 'denType', 'affmat', 'warp' ]), name='inputnode') outputnode = Node(util.IdentityInterface(fields=['denoised']), name='outputnode') aroma = Node(util.Function(input_names=[ 'fslDir', 'inFile', 'mask', 'dim', 'TR', 'mc', 'denType', 'affmat', 'warp' ], output_names=['denoised'], function=ica_aroma_denoise), name='ICA_AROMA') flow.connect(inputnode, 'fslDir', aroma, 'fslDir') flow.connect(inputnode, 'inFile', aroma, 'inFile') flow.connect(inputnode, 'mask', aroma, 'mask') flow.connect(inputnode, 'dim', aroma, 'dim') flow.connect(inputnode, 'TR', aroma, 'TR') flow.connect(inputnode, 'mc', aroma, 'mc') flow.connect(inputnode, 'denType', aroma, 'denType') flow.connect(inputnode, 'affmat', aroma, 'affmat') flow.connect(inputnode, 'warp', aroma, 'warp') flow.connect(aroma, 'denoised', outputnode, 'denoised') return flow
def create_reconall_pipeline(name='reconall'): reconall = Workflow(name='reconall') #inputnode inputnode = Node(util.IdentityInterface( fields=['anat', 'fs_subjects_dir', 'fs_subject_id']), name='inputnode') outputnode = Node( util.IdentityInterface(fields=['fs_subjects_dir', 'fs_subject_id']), name='outputnode') # run reconall recon_all = Node( fs.ReconAll( args='-all -hippo-subfields -no-isrunning' ), #for RSV152 took out s because of preprocessing with version 6.0 name="recon_all") #recon_all.inputs.directive= 'autorecon2-wm' # -autorecon3 recon_all.plugin_args = {'submit_specs': 'request_memory = 9000'} # function to replace / in subject id string with a _ def sub_id(sub_id): return sub_id.replace('/', '_') reconall.connect([ (inputnode, recon_all, [('fs_subjects_dir', 'subjects_dir'), ('anat', 'T1_files'), (('fs_subject_id', sub_id), 'subject_id')]), (recon_all, outputnode, [('subject_id', 'fs_subject_id'), ('subjects_dir', 'fs_subjects_dir')]) ]) return reconall
def freesurfer_nifti(): ''' Simple method to convert freesurfer mgz files to nifti format ''' #start with a useful function to grab data #define workflow flow = Workflow(name='freesurfer_nifti') inputnode = Node( util.IdentityInterface(fields=['mgz_image', 'anatomical']), name='inputnode') outputnode = Node(util.IdentityInterface(fields=['aparc_aseg_nifti']), name='outputnode') #define nodes convert_aparc_aseg = Node(interface=freesurfer.MRIConvert(), name='aparc_aseg_nifti') convert_aparc_aseg.inputs.out_type = 'nii' anatomical = Node(interface=freesurfer.MRIConvert(), name='anatomical_ready') anatomical.inputs.out_type = 'nii' #connect nodes return flow
def func2mni_wf(): mni_skull_2mm = '/usr/share/fsl/5.0/data/standard/MNI152_T1_2mm.nii.gz' mni_brain_2mm = '/usr/share/fsl/5.0/data/standard/MNI152_T1_2mm_brain.nii.gz' flow = Workflow('func2mni_nonlinear') inputnode = Node(util.IdentityInterface(fields=['func_image', 'reference_image', 'func2anat_affine', 'anat2mni_warp']),name = 'inputnode') outputnode = Node(util.IdentityInterface(fields=['func2mni_2mm', 'func2mni_4mm']),name = 'outputnode') applywarp = Node(fsl.ApplyWarp(), name = 'apply_warp',) applywarp.inputs.ref_file = mni_brain_2mm flirt4mm = Node(fsl.FLIRT(), name = 'resample_4mm') flirt4mm.inputs.reference = mni_brain_2mm flirt4mm.inputs.apply_isoxfm = 4.0 flow.connect(inputnode, 'func_image' , applywarp, 'in_file') flow.connect(inputnode, 'anat2mni_warp' , applywarp, 'field_file') flow.connect(inputnode, 'func2anat_affine' , applywarp, 'premat') flow.connect(applywarp, 'out_file' , flirt4mm, 'in_file') flow.connect(applywarp, 'out_file' , outputnode, 'func2mni_2mm') flow.connect(flirt4mm, 'out_file' , outputnode, 'func2mni_4mm') return flow
def create_reconall_pipeline(name='reconall'): reconall = Workflow(name='reconall') #inputnode inputnode = Node(util.IdentityInterface( fields=['anat', 'fs_subjects_dir', 'fs_subject_id']), name='inputnode') outputnode = Node( util.IdentityInterface(fields=['fs_subjects_dir', 'fs_subject_id']), name='outputnode') # run reconall recon_all = Node(fs.ReconAll(args='-nuiterations 7 -no-isrunning'), name="recon_all") recon_all.plugin_args = {'submit_specs': 'request_memory = 9000'} # function to replace / in subject id string with a _ def sub_id(sub_id): return sub_id.replace('/', '_') reconall.connect([ (inputnode, recon_all, [('fs_subjects_dir', 'subjects_dir'), ('anat', 'T1_files'), (('fs_subject_id', sub_id), 'subject_id')]), (recon_all, outputnode, [('subject_id', 'fs_subject_id'), ('subjects_dir', 'fs_subjects_dir')]) ]) return reconall
def create_dcmconvert_pipeline(name='dcmconvert'): from nipype.pipeline.engine import Node, Workflow import nipype.interfaces.utility as util from nipype.interfaces.dcmstack import DcmStack # workflow dcmconvert = Workflow(name='dcmconvert') #inputnode inputnode = Node(util.IdentityInterface(fields=['dicoms', 'filename']), name='inputnode') # outputnode outputnode = Node(util.IdentityInterface(fields=['nifti']), name='outputnode') # conversion node converter = Node(DcmStack(embed_meta=True), name='converter') # connections dcmconvert.connect([(inputnode, converter, [('dicoms', 'dicom_files'), ('filename', 'out_format')]), (converter, outputnode, [('out_file', 'nifti')])]) return dcmconvert
def make_workflow(): flairs = [os.path.abspath(i) for i in glob.glob(args.flair)] weights = [os.path.abspath(i) for i in glob.glob(args.weights)] weights_source = Node(interface=IdentityInterface(fields=['weights']), name='weights_source') weights_source.inputs.weights = weights data_source = Node(IdentityInterface(fields=['flairs']), name='data_source') data_source.iterables = ('flairs', flairs) sink = Node(interface=DataSink(), name='sink') sink.inputs.base_directory = wmh_dir sink.inputs.substitutions = [ ('_flairs_', ''), ('_FLAIR.nii.gz/', '/'), ] sink.inputs.regexp_substitutions = [ ('\.\..*\.\.', ''), ] test_wf = ibbmTum_wf.get_test_wf(row_st=192, cols_st=192, thres_mask=10) wmh = Workflow(name='wmh', base_dir=wf_temp) wmh.connect(weights_source, 'weights', test_wf, 'inputspec.weights') wmh.connect(data_source, 'flairs', test_wf, 'inputspec.flair') wmh.connect(test_wf, 'outputspec.wmh_mask', sink, '@pred') return wmh
def create_workflow_hrfpattern_spm(): # GLM design = Node(interface=spm_design(), name='design_glm') design.inputs.timing_units = 'secs' design.inputs.interscan_interval = .85 design.inputs.bases = {'hrf': {'derivs': [0, 0]}} estimate = Node(interface=EstimateModel(), name="estimate") estimate.inputs.estimation_method = {'Classical': 1} contrastestimate = Node(interface=EstimateContrast(), name="contrast") contrastestimate.inputs.contrasts = [('Visual', 'T', [ '1', ], [ 1, ])] w = Workflow(name='hrfpattern_spm') w.connect(input_node, 'bold', model, 'functional_runs') w.connect(input_node, 'events', model, 'bids_event_file') w.connect(model, 'session_info', design, 'session_info') w.connect(design, 'spm_mat_file', estimate, 'spm_mat_file') w.connect(estimate, 'spm_mat_file', contrastestimate, 'spm_mat_file') w.connect(estimate, 'beta_images', contrastestimate, 'beta_images') w.connect(estimate, 'residual_image', contrastestimate, 'residual_image') w.connect(contrastestimate, 'spmT_images', output_node, 'T_image') return w
def preprocess(input_file, output_dir, conform=True, bias_correct=True, skullstrip=True): preprocess_flow = Workflow(name='preprocess', base_dir=output_dir) conform = Node(MRIConvert(conform=True, out_type='niigz', out_file='conformed.nii.gz'), name='conform') n4 = Node(N4BiasFieldCorrection(dimension=3, bspline_fitting_distance=300, shrink_factor=3, n_iterations=[50, 50, 30, 20], output_image='n4.nii.gz'), name='n4') robex = Node(ROBEX(seed=1729, stripped_image='brain.nii.gz'), name='robex') preprocess_flow.connect([(conform, n4, [('out_file', 'input_image')]), (n4, robex, [('output_image', 'input_image')])]) preprocess_flow.write_graph(graph2use='orig') conform.inputs.in_file = input_file preprocess_flow.run('MultiProc', plugin_args={'n_procs': 5})
def create_reconall_pipeline(name='reconall'): reconall=Workflow(name='reconall') #inputnode inputnode=Node(util.IdentityInterface(fields=['anat', 'fs_subjects_dir', 'fs_subject_id' ]), name='inputnode') outputnode=Node(util.IdentityInterface(fields=['fs_subjects_dir', 'fs_subject_id']), name='outputnode') def rename_subject_for_fu(input_id): output_id=input_id+"_fu" return output_id #modify subject name so it can be saved in the same folder as other LIFE- freesurfer data rename=Node(util.Function(input_names=['input_id'], output_names=['output_id'], function = rename_subject_for_fu), name="rename") # run reconall recon_all = Node(fs.ReconAll(args='-all -hippo-subfields -no-isrunning', openmp=24), #FS version 6.0: -hippocampal-subfields-T1, version 5.3.. -hippo-subfields name="recon_all") #recon_all.inputs.directive= 'autorecon2-wm' # -autorecon3 recon_all.plugin_args={'submit_specs': 'request_memory = 9000'} reconall.connect([ (inputnode, rename, [('fs_subject_id', 'input_id')]), (rename, recon_all, [('output_id', 'subject_id')]), (inputnode, recon_all, [('fs_subjects_dir', 'subjects_dir'), ('anat', 'T1_files')]), (recon_all, outputnode, [('subject_id', 'fs_subject_id'), ('subjects_dir', 'fs_subjects_dir')]) ]) return reconall
def create_reconall_pipeline(name='reconall_wf'): reconall_wf = Workflow(name='reconall_wf') #inputnode inputnode = Node(util.IdentityInterface( fields=['anat', 'fs_subjects_dir', 'fs_subject_id']), name='inputnode') outputnode = Node( util.IdentityInterface(fields=['fs_subjects_dir', 'fs_subject_id']), name='outputnode') # run reconall reconall = Node( fs.ReconAll(args='-all -no-isrunning', openmp=8), #subfield segmentation after recon-all name="reconall") #recon_all.inputs.directive= 'autorecon2-wm' # -autorecon3 reconall_wf.plugin_args = {'submit_specs': 'request_memory = 9000'} reconall_wf.connect([ (inputnode, reconall, [('fs_subject_id', 'subject_id')]), (inputnode, reconall, [('fs_subjects_dir', 'subjects_dir'), ('anat', 'T1_files')]), (reconall, outputnode, [('subject_id', 'fs_subject_id'), ('subjects_dir', 'fs_subjects_dir')]) ]) return reconall_wf
def create_bet_mask_from_dwi(name, do_realignment=True): wf = Workflow(name=name) inputnode = Node(interface=IdentityInterface(fields=["dwi", "bvec", "bval"]), name="inputnode") b0s = Node(DwiextractB0(), "b0s") wf.connect(inputnode, "dwi", b0s, "in_file") wf.connect(inputnode, "bvec", b0s, "bvec") wf.connect(inputnode, "bval", b0s, "bval") meanb0 = Node(fsl.ImageMaths(op_string='-Tmean', suffix='_mean'), name="meanb0") wf.connect(b0s, "out_file", meanb0, "in_file") mcflirt = Node(fsl.MCFLIRT(), "mcflirt") bet = Node(fsl.BET(), "bet") bet.inputs.frac = 0.3 bet.inputs.robust = True bet.inputs.mask = True if do_realignment: wf.connect(meanb0, "out_file", mcflirt, "in_file") wf.connect(mcflirt, "out_file", bet, "in_file") else: wf.connect(meanb0, "out_file", bet, "in_file") outputnode = Node(interface=IdentityInterface(fields=["mask_file"]), name="outputnode") wf.connect(bet, "mask_file", outputnode, "mask_file") return wf
def create_normalize_pipeline(name='normalize'): # workflow normalize = Workflow(name='normalize') # Define nodes inputnode = Node(interface=util.IdentityInterface(fields=['epi_coreg', 'tr']), name='inputnode') outputnode = Node(interface=util.IdentityInterface(fields=[ 'normalized_file']), name='outputnode') # time-normalize scans normalize_time=Node(util.Function(input_names=['in_file','tr'], output_names=['out_file'], function=time_normalizer), name='normalize_time') normalize_time.plugin_args={'submit_specs': 'request_memory = 17000'} normalize.connect([(inputnode, normalize_time, [('tr', 'tr')]), (inputnode, normalize_time, [('epi_coreg', 'in_file')]), (normalize_time, outputnode, [('out_file', 'normalized_file')]) ]) # time-normalize scans return normalize
def create_ants_registration_pipeline(name='ants_registration'): # set fsl output type fsl.FSLCommand.set_default_output_type('NIFTI_GZ') # initiate workflow ants_registration = Workflow(name='ants_registration') # inputnode inputnode = Node(util.IdentityInterface( fields=['denoised_ts', 'composite_transform', 'ref']), name='inputnode') # outputnode outputnode = Node(util.IdentityInterface(fields=[ 'ants_reg_ts', ]), name='outputnode') ants_reg = Node(ants.ApplyTransforms(input_image_type=3, dimension=3, interpolation='Linear'), name='ants_reg') ants_registration.connect([ (inputnode, ants_reg, [('denoised_ts', 'input_image')]), (inputnode, ants_reg, [('ref', 'reference_image')]), (inputnode, ants_reg, [('composite_transform', 'transforms')]), (ants_reg, outputnode, [('output_image', 'ants_reg_ts')]) ]) return ants_registration
def create_reconall_pipeline(name='reconall'): reconall = Workflow(name='reconall') #inputnode inputnode = Node(util.IdentityInterface( fields=['anat', 'fs_subjects_dir', 'fs_subject_id']), name='inputnode') outputnode = Node( util.IdentityInterface(fields=['fs_subjects_dir', 'fs_subject_id']), name='outputnode') # run reconall recon_all = create_skullstripped_recon_flow() # function to replace / in subject id string with a _ def sub_id(sub_id): return sub_id.replace('/', '_') reconall.connect([(inputnode, recon_all, [('fs_subjects_dir', 'inputspec.subjects_dir'), ('anat', 'inputspec.T1_files'), (('fs_subject_id', sub_id), 'inputspec.subject_id')]), (recon_all, outputnode, [('outputspec.subject_id', 'fs_subject_id'), ('outputspec.subjects_dir', 'fs_subjects_dir')])]) return reconall
def anatomical_preprocessing(): ''' Inputs: MP2RAGE Skull stripped image using Spectre-2010 Workflow: 1. reorient to RPI 2. create a brain mask Returns: brain brain_mask ''' # define workflow flow = Workflow('anat_preprocess') inputnode = Node(util.IdentityInterface( fields=['anat', 'anat_gm', 'anat_wm', 'anat_csf', 'anat_first']), name='inputnode') outputnode = Node(util.IdentityInterface(fields=[ 'brain', 'brain_gm', 'brain_wm', 'brain_csf', 'brain_first', 'brain_mask', ]), name='outputnode') reorient = Node(interface=preprocess.Resample(), name='anat_reorient') reorient.inputs.orientation = 'RPI' reorient.inputs.outputtype = 'NIFTI' erode = Node(interface=fsl.ErodeImage(), name='anat_preproc') reorient_gm = reorient.clone('anat_preproc_gm') reorient_wm = reorient.clone('anat_preproc_wm') reorient_cm = reorient.clone('anat_preproc_csf') reorient_first = reorient.clone('anat_preproc_first') make_mask = Node(interface=fsl.UnaryMaths(), name='anat_preproc_mask') make_mask.inputs.operation = 'bin' # connect workflow nodes flow.connect(inputnode, 'anat', reorient, 'in_file') flow.connect(inputnode, 'anat_gm', reorient_gm, 'in_file') flow.connect(inputnode, 'anat_wm', reorient_wm, 'in_file') flow.connect(inputnode, 'anat_csf', reorient_cm, 'in_file') flow.connect(inputnode, 'anat_first', reorient_first, 'in_file') flow.connect(reorient, 'out_file', erode, 'in_file') flow.connect(erode, 'out_file', make_mask, 'in_file') flow.connect(make_mask, 'out_file', outputnode, 'brain_mask') flow.connect(erode, 'out_file', outputnode, 'brain') flow.connect(reorient_gm, 'out_file', outputnode, 'brain_gm') flow.connect(reorient_wm, 'out_file', outputnode, 'brain_wm') flow.connect(reorient_cm, 'out_file', outputnode, 'brain_csf') flow.connect(reorient_first, 'out_file', outputnode, 'brain_first') return flow
def make_segment(self): # Ref: http://nipype.readthedocs.io/en/0.12.1/interfaces/generated/nipype.interfaces.fsl.utils.html#reorient2std ro = Node(interface=fsl.Reorient2Std(), name='ro') # Ref: http://nipype.readthedocs.io/en/latest/interfaces/generated/interfaces.spm/preprocess.html#segment seg = Node(interface=spm.NewSegment(channel_info=(0.0001, 60, (True, True))), name="seg") spm_tissues_split = Node(Function(['in_list'], ['gm', 'wm', 'csf'], self.spm_tissues), name='spm_tissues_split') gzip = Node(Function(['in_list'], ['out_list'], self.gzip_spm), name='gzip') segment = Workflow(name='Segment', base_dir=self.temp_dir) gunzip = Node(interface=Gunzip(), name='gunzip') # for new segment segment.connect(ro, 'out_file', gunzip, 'in_file') segment.connect(gunzip, 'out_file', seg, 'channel_files') segment.connect(seg, 'native_class_images', spm_tissues_split, 'in_list') return segment
def create_workflow(): workflow = Workflow( name='transform_manual_mask') inputs = Node(IdentityInterface(fields=[ 'subject_id', 'session_id', 'refsubject_id', 'ref_funcmask', 'ref_func', 'funcs', ]), name='in') # Find the transformation matrix func_ref -> func # First find transform from func to manualmask's ref func # first take the median (flirt functionality has changed and no longer automatically takes the first volume when given 4D files) median_func = MapNode( interface=fsl.maths.MedianImage(dimension="T"), name='median_func', iterfield=('in_file'), ) findtrans = MapNode(fsl.FLIRT(), iterfield=['in_file'], name='findtrans' ) # Invert the matrix transform invert = MapNode(fsl.ConvertXFM(invert_xfm=True), name='invert', iterfield=['in_file'], ) workflow.connect(findtrans, 'out_matrix_file', invert, 'in_file') # Transform the manualmask to be aligned with func funcreg = MapNode(ApplyXFMRefName(), name='funcreg', iterfield=['in_matrix_file', 'reference'], ) workflow.connect(inputs, 'funcs', median_func, 'in_file') workflow.connect(median_func, 'out_file', findtrans, 'in_file') workflow.connect(inputs, 'ref_func', findtrans, 'reference') workflow.connect(invert, 'out_file', funcreg, 'in_matrix_file') workflow.connect(inputs, 'ref_func', funcreg, 'in_file') workflow.connect(inputs, 'funcs', funcreg, 'reference') return workflow
def create(self): #, **kwargs): """ Create the nodes and connections for the workflow """ # Preamble csvReader = CSVReader() csvReader.inputs.in_file = self.csv_file.default_value csvReader.inputs.header = self.hasHeader.default_value csvOut = csvReader.run() print("=" * 80) print(csvOut.outputs.__dict__) print("=" * 80) iters = {} label = list(csvOut.outputs.__dict__.keys())[0] result = eval("csvOut.outputs.{0}".format(label)) iters['tests'], iters['trains'] = subsample_crossValidationSet( result, self.sample_size.default_value) # Main event out_fields = ['T1', 'T2', 'Label', 'trainindex', 'testindex'] inputsND = Node(interface=IdentityInterface(fields=out_fields), run_without_submitting=True, name='inputs') inputsND.iterables = [('trainindex', iters['trains']), ('testindex', iters['tests'])] if not self.hasHeader.default_value: inputsND.inputs.T1 = csvOut.outputs.column_0 inputsND.inputs.Label = csvOut.outputs.column_1 inputsND.inputs.T2 = csvOut.outputs.column_2 else: inputsND.inputs.T1 = csvOut.outputs.__dict__['t1'] inputsND.inputs.Label = csvOut.outputs.__dict__['label'] inputsND.inputs.T2 = csvOut.outputs.__dict__['t2'] pass #TODO metaflow = Workflow(name='metaflow') metaflow.config['execution'] = { 'plugin': 'Linear', 'stop_on_first_crash': 'false', 'stop_on_first_rerun': 'false', # This stops at first attempt to rerun, before running, and before deleting previous results. 'hash_method': 'timestamp', 'single_thread_matlab': 'true', # Multi-core 2011a multi-core for matrix multiplication. 'remove_unnecessary_outputs': 'true', 'use_relative_paths': 'false', # relative paths should be on, require hash update when changed. 'remove_node_directories': 'false', # Experimental 'local_hash_check': 'false' } metaflow.add_nodes([inputsND]) """import pdb; pdb.set_trace()""" fusionflow = FusionLabelWorkflow() self.connect([ (metaflow, fusionflow, [('inputs.trainindex', 'trainT1s.index'), ('inputs.T1', 'trainT1s.inlist')]), (metaflow, fusionflow, [('inputs.trainindex', 'trainLabels.index'), ('inputs.Label', 'trainLabels.inlist')]), (metaflow, fusionflow, [('inputs.testindex', 'testT1s.index'), ('inputs.T1', 'testT1s.inlist')]) ])
def make_w_coreg_3T_afni(): n_in = Node(IdentityInterface(fields=[ 'mean', 'T1w', ]), name='input') n_out = Node(IdentityInterface(fields=[ 'mat_func2struct', ]), name='output') n_allineate = Node(interface=Allineate(), name='allineate') n_allineate.inputs.one_pass = True n_allineate.inputs.args = '-master BASE' n_allineate.inputs.warp_type = 'shift_rotate' n_allineate.inputs.cost = 'nmi' n_allineate.inputs.outputtype = 'NIFTI' n_allineate.inputs.out_file = 'afni_realigned.nii.gz' n_allineate.inputs.out_matrix = 'afni_realigned.aff12.1D' w = Workflow('coreg_afni') w.connect(n_in, 'mean', n_allineate, 'in_file') w.connect(n_in, 'T1w', n_allineate, 'reference') w.connect(n_allineate, 'out_matrix', n_out, 'mat_func2struct') return w
def create_mgzconvert_pipeline(name='mgzconvert'): # workflow mgzconvert = Workflow(name='mgzconvert') # inputnode inputnode = Node( util.IdentityInterface(fields=['fs_subjects_dir', 'fs_subject_id']), name='inputnode') # outputnode outputnode = Node(util.IdentityInterface(fields=[ 'anat_head', 'anat_brain', 'anat_brain_mask', 'wmseg', 'wmedge' ]), name='outputnode') # import files from freesurfer fs_import = Node(interface=nio.FreeSurferSource(), name='fs_import') # convert Freesurfer T1 file to nifti head_convert = Node(fs.MRIConvert(out_type='niigz', out_file='T1.nii.gz'), name='head_convert') # create brainmask from aparc+aseg with single dilation def get_aparc_aseg(files): for name in files: if 'aparc+aseg' in name: return name # create brain by converting only freesurfer output brain_convert = Node(fs.MRIConvert(out_type='niigz', out_file='brain.nii.gz'), name='brain_convert') brain_binarize = Node(fsl.ImageMaths(op_string='-bin -fillh', out_file='T1_brain_mask.nii.gz'), name='brain_binarize') # cortical and cerebellar white matter volumes to construct wm edge # [lh cerebral wm, lh cerebellar wm, rh cerebral wm, rh cerebellar wm, brain stem] wmseg = Node(fs.Binarize(out_type='nii.gz', match=[2, 7, 41, 46, 16], binary_file='T1_brain_wmseg.nii.gz'), name='wmseg') # make edge from wmseg to visualize coregistration quality edge = Node(fsl.ApplyMask(args='-edge -bin', out_file='T1_brain_wmedge.nii.gz'), name='edge') # connections mgzconvert.connect([ (inputnode, fs_import, [('fs_subjects_dir', 'subjects_dir'), ('fs_subject_id', 'subject_id')]), (fs_import, head_convert, [('T1', 'in_file')]), (fs_import, wmseg, [(('aparc_aseg', get_aparc_aseg), 'in_file')]), (fs_import, brain_convert, [('brainmask', 'in_file')]), (wmseg, edge, [('binary_file', 'in_file'), ('binary_file', 'mask_file')]), (head_convert, outputnode, [('out_file', 'anat_head')]), (brain_convert, outputnode, [('out_file', 'anat_brain')]), (brain_convert, brain_binarize, [('out_file', 'in_file')]), (brain_binarize, outputnode, [('out_file', 'anat_brain_mask')]), (wmseg, outputnode, [('binary_file', 'wmseg')]), (edge, outputnode, [('out_file', 'wmedge')]) ]) return mgzconvert
def create_smoothing_pipeline(name='smoothing'): # set fsl output type fsl.FSLCommand.set_default_output_type('NIFTI') # initiate workflow smoothing = Workflow(name='smoothing') # inputnode inputnode=Node(util.IdentityInterface(fields=['ts_transformed', 'fwhm' ]), name='inputnode') # outputnode outputnode=Node(util.IdentityInterface(fields=['ts_smoothed' ]), name='outputnode') #apply smoothing smooth = MapNode(fsl.Smooth(),name = 'smooth', iterfield='in_file') smoothing.connect([ (inputnode, smooth, [ ('ts_transformed', 'in_file'), ('fwhm', 'fwhm')] ), (smooth, outputnode, [('smoothed_file', 'ts_smoothed')] ) ]) return smoothing
def test_nipype_srtm_zhou2003_roi(self): from temporalimage import Quantity from temporalimage.t4d import _csvwrite_frameTiming import pandas as pd from .generate_test_data import generate_fakeTAC_SRTM self.t, self.dt, self.TAC, self.refTAC = generate_fakeTAC_SRTM(BP=0.5, R1=0.7) frameStart = self.t - self.dt/2 frameEnd = self.t + self.dt/2 csvfilename = os.path.join(self.tmpdirname,'srtm_roi_timing.csv') _csvwrite_frameTiming(frameStart, frameEnd, csvfilename, time_unit='min') roiTACcsvfile = os.path.join(self.tmpdirname,'roi_tacs.csv') roiTACs = pd.DataFrame({'target': self.TAC, 'ref': self.refTAC}) roiTACs.T.to_csv(roiTACcsvfile, index_label='ROI') infosource = Node(IdentityInterface(fields=['in_file']), name="infosource") infosource.iterables = ('in_file', [roiTACcsvfile]) km = Node(KineticModelROI(model='SRTM_Zhou2003', #roiTACcsvFile=roiTACcsvfile, frameTimingFile=csvfilename, refRegion='ref', startActivity=self.startActivity, weights=self.weights), name="km") km_workflow = Workflow(name="km_workflow", base_dir=self.tmpdirname) km_workflow.connect([ (infosource, km, [('in_file', 'roiTACcsvFile')]) ]) km_workflow.run()
def create_workflow_allin_slices(name='motion_correction', iterfield=['in_file']): workflow = Workflow(name=name) inputs = Node(IdentityInterface(fields=[ 'subject_id', 'session_id', 'ref_func', 'ref_func_weights', 'funcs', 'funcs_masks', 'mc_method', ]), name='in') inputs.iterables = [ ('mc_method', ['afni:3dAllinSlices']) ] mc = MapNode( AFNIAllinSlices(), iterfield=iterfield, name='mc') workflow.connect( [(inputs, mc, [('funcs', 'in_file'), ('ref_func_weights', 'in_weight_file'), ('ref_func', 'ref_file'), ])]) return workflow
def make_full_workflow(session='7TGE', n_fmap=10): n_in = Node(IdentityInterface(fields=[ 'T1w', 'func', 'fmap', 'subject', ]), name='input') n_out = Node(IdentityInterface(fields=[ 'func1', 'func2', 'filtered1', 'filtered2', 'mat_func2struct', ]), name='output') n_merge = Node(interface=Merge(), name='merge') n_merge.inputs.dimension = 't' w_preproc = make_workflow(n_fmap) w_smooth1 = make_w_smooth('1') w_smooth2 = make_w_smooth('2') w = Workflow(session) w.connect(n_in, 'func', n_merge, 'in_files') w.connect(n_merge, 'merged_file', w_preproc, 'input.func') w.connect(n_in, 'fmap', w_preproc, 'input.fmap') w.connect(w_preproc, 'output.func1', n_out, 'func1') w.connect(w_preproc, 'output.func2', n_out, 'func2') w.connect(w_preproc, 'output.func1', w_smooth1, 'input.func') w.connect(w_preproc, 'output.func2', w_smooth2, 'input.func') w.connect(w_smooth1, 'output.func', n_out, 'filtered1') w.connect(w_smooth2, 'output.func', n_out, 'filtered2') if session.startswith('7T'): w_coreg_7T = make_w_coreg_7T() w.connect(n_in, 'T1w', w_coreg_7T, 'input.T1w') w.connect(w_preproc, 'output.mean', w_coreg_7T, 'input.mean') w.connect(w_coreg_7T, 'output.mat_ants', n_out, 'mat_func2struct') else: w_coreg = make_w_freesurfer2func() w_coreg_3T = make_w_coreg_3T_ants() """ w.connect(n_in, 'T1w', w_coreg, 'input.T1w') w.connect(n_in, 'subject', w_coreg, 'input.subject') w.connect(w_preproc, 'output.mean', w_coreg, 'input.mean') """ w.connect(n_in, 'T1w', w_coreg_3T, 'input.T1w') w.connect(w_preproc, 'output.mean', w_coreg_3T, 'input.mean') w.connect(w_coreg_3T, 'output.mat_func2struct', n_out, 'mat_func2struct') return w
def create_moco_pipeline(name='motion_correction'): # initiate workflow moco = Workflow(name='motion_correction') # set fsl output fsl.FSLCommand.set_default_output_type('NIFTI_GZ') # inputnode inputnode = Node(util.IdentityInterface(fields=['epi']), name='inputnode') # outputnode outputnode = Node(util.IdentityInterface(fields=[ 'epi_moco', 'par_moco', 'mat_moco', 'rms_moco', 'epi_mean', 'rotplot', 'transplot', 'dispplots', 'tsnr_file' ]), name='outputnode') # mcflirt motion correction to 1st volume mcflirt = Node(fsl.MCFLIRT(save_mats=True, save_plots=True, save_rms=True, ref_vol=1, out_file='rest_realigned.nii.gz'), name='mcflirt') # plot motion parameters rotplotter = Node(fsl.PlotMotionParams(in_source='fsl', plot_type='rotations', out_file='rotation_plot.png'), name='rotplotter') transplotter = Node(fsl.PlotMotionParams(in_source='fsl', plot_type='translations', out_file='translation_plot.png'), name='transplotter') dispplotter = MapNode(interface=fsl.PlotMotionParams( in_source='fsl', plot_type='displacement', ), name='dispplotter', iterfield=['in_file']) dispplotter.iterables = ('plot_type', ['displacement']) # calculate tmean tmean = Node(fsl.maths.MeanImage(dimension='T', out_file='rest_realigned_mean.nii.gz'), name='tmean') # calculate tsnr tsnr = Node(confounds.TSNR(), name='tsnr') # create connections moco.connect([(inputnode, mcflirt, [('epi', 'in_file')]), (mcflirt, tmean, [('out_file', 'in_file')]), (mcflirt, rotplotter, [('par_file', 'in_file')]), (mcflirt, transplotter, [('par_file', 'in_file')]), (mcflirt, dispplotter, [('rms_files', 'in_file')]), (tmean, outputnode, [('out_file', 'epi_mean')]), (mcflirt, outputnode, [('out_file', 'epi_moco'), ('par_file', 'par_moco'), ('mat_file', 'mat_moco'), ('rms_files', 'rms_moco')]), (rotplotter, outputnode, [('out_file', 'rotplot')]), (transplotter, outputnode, [('out_file', 'transplot')]), (dispplotter, outputnode, [('out_file', 'dispplots')]), (mcflirt, tsnr, [('out_file', 'in_file')]), (tsnr, outputnode, [('tsnr_file', 'tsnr_file')])]) return moco
def stub_wf(*args, **kwargs): wflow = Workflow(name='realigner') inputnode = Node(IdentityInterface(fields=['func']), name='inputspec') outputnode = Node( interface=IdentityInterface(fields=['realigned_file']), name='outputspec') wflow.connect(inputnode, 'func', outputnode, 'realigned_file') return wflow
def create_structural(subject, working_dir, data_dir, freesurfer_dir, out_dir, standard_brain): # main workflow struct_preproc = Workflow(name='anat_preproc') struct_preproc.base_dir = working_dir struct_preproc.config['execution'][ 'crashdump_dir'] = struct_preproc.base_dir + "/crash_files" # select files #templates={'anat': '3T/nifti/MPRAGEADNI32Ch.nii.gz'} #selectfiles = Node(nio.SelectFiles(templates, base_directory=data_dir), name="selectfiles") # workflow to run freesurfer reconall reconall = create_reconall_pipeline() reconall.inputs.inputnode.fs_subjects_dir = freesurfer_dir reconall.inputs.inputnode.fs_subject_id = subject # workflow to get brain, head and wmseg from freesurfer and convert to nifti mgzconvert = create_mgzconvert_pipeline() mgzconvert.inputs.inputnode.fs_subjects_dir = freesurfer_dir mgzconvert.inputs.inputnode.fs_subject_id = subject normalize = create_normalize_pipeline() normalize.inputs.inputnode.standard = standard_brain # sink to store files sink = Node(nio.DataSink(base_directory=out_dir, parameterization=False, substitutions=[('transform_Warped', 'T1_brain2mni')]), name='sink') # connections struct_preproc.connect([ #(selectfiles, sink, [('anat', 'outputnode.test')]), #(selectfiles, reconall, [('anat', 'inputnode.anat')]), #(reconall, mgzconvert, [('outputnode.fs_subject_id', 'inputnode.fs_subject_id'), # ('outputnode.fs_subjects_dir', 'inputnode.fs_subjects_dir')]), #for second round of structural don't redo FREESURFER (mgzconvert, normalize, [('outputnode.anat_brain', 'inputnode.anat')]), (mgzconvert, sink, [('outputnode.anat_head', '@head')]), (mgzconvert, sink, [('outputnode.anat_brain', '@brain')]), (mgzconvert, sink, [('outputnode.anat_brain_mask', '@mask')]), (mgzconvert, sink, [('outputnode.wmedge', '@wmedge')]), (normalize, sink, [('outputnode.anat2std', '@anat2std'), ('outputnode.anat2std_transforms', 'transforms2mni.@anat2std_transforms'), ('outputnode.std2anat_transforms', 'transforms2mni.@std2anat_transforms')]) ]) struct_preproc.write_graph(dotfilename='struct_preproc.dot', graph2use='colored', format='pdf', simple_form=True) # struct_preproc.run() struct_preproc.run( ) #, plugin_args = {'initial_specs': 'request_memory = 1500'}plugin='CondorDAGMan'
def create_converter_diffusion_pipeline(working_dir, ds_dir, name='converter_diffusion'): # initiate workflow converter_wf = Workflow(name=name) converter_wf.base_dir = os.path.join(working_dir, 'LeiCA_resting') # set fsl output fsl.FSLCommand.set_default_output_type('NIFTI_GZ') # inputnode inputnode = Node(util.IdentityInterface(fields=['dMRI_dicom']), name='inputnode') outputnode = Node(util.IdentityInterface(fields=['dMRI']), name='outputnode') niftisink = Node(nio.DataSink(), name='niftisink') niftisink.inputs.base_directory = os.path.join(ds_dir, 'raw_niftis') ####### converter_dMRI = Node(Dcm2nii(), name="converter_dMRI") converter_dMRI.inputs.gzip_output = True converter_dMRI.inputs.nii_output = True converter_dMRI.inputs.anonymize = False converter_dMRI.plugin_args = {'submit_specs': 'request_memory = 2000'} converter_wf.connect(inputnode, 'dMRI_dicom', converter_dMRI, 'source_names') dMRI_rename = Node(util.Rename(format_string='DTI_mx_137.nii.gz'), name='dMRI_rename') converter_wf.connect(converter_dMRI, 'converted_files', dMRI_rename, 'in_file') bvecs_rename = Node(util.Rename(format_string='DTI_mx_137.bvecs'), name='bvecs_rename') converter_wf.connect(converter_dMRI, 'bvecs', bvecs_rename, 'in_file') bvals_rename = Node(util.Rename(format_string='DTI_mx_137.bvals'), name='bvals_rename') converter_wf.connect(converter_dMRI, "bvals", bvals_rename, 'in_file') # reorient to standard orientation reor_2_std = Node(fsl.Reorient2Std(), name='reor_2_std') converter_wf.connect(dMRI_rename, 'out_file', reor_2_std, 'in_file') converter_wf.connect(reor_2_std, 'out_file', outputnode, 'dMRI') # save original niftis converter_wf.connect(reor_2_std, 'out_file', niftisink, 'dMRI.@dwi') converter_wf.connect(bvals_rename, 'out_file', niftisink, 'dMRI.@bvals') converter_wf.connect(bvecs_rename, 'out_file', niftisink, 'dMRI.@bvecs') converter_wf.write_graph(dotfilename='converter_struct', graph2use='flat', format='pdf') return converter_wf