def create_workflow(): workflow = Workflow( name='transform_manual_mask') inputs = Node(IdentityInterface(fields=[ 'subject_id', 'session_id', 'refsubject_id', 'ref_funcmask', 'ref_func', 'funcs', ]), name='in') # Find the transformation matrix func_ref -> func # First find transform from func to manualmask's ref func # first take the median (flirt functionality has changed and no longer automatically takes the first volume when given 4D files) median_func = MapNode( interface=fsl.maths.MedianImage(dimension="T"), name='median_func', iterfield=('in_file'), ) findtrans = MapNode(fsl.FLIRT(), iterfield=['in_file'], name='findtrans' ) # Invert the matrix transform invert = MapNode(fsl.ConvertXFM(invert_xfm=True), name='invert', iterfield=['in_file'], ) workflow.connect(findtrans, 'out_matrix_file', invert, 'in_file') # Transform the manualmask to be aligned with func funcreg = MapNode(ApplyXFMRefName(), name='funcreg', iterfield=['in_matrix_file', 'reference'], ) workflow.connect(inputs, 'funcs', median_func, 'in_file') workflow.connect(median_func, 'out_file', findtrans, 'in_file') workflow.connect(inputs, 'ref_func', findtrans, 'reference') workflow.connect(invert, 'out_file', funcreg, 'in_matrix_file') workflow.connect(inputs, 'ref_func', funcreg, 'in_file') workflow.connect(inputs, 'funcs', funcreg, 'reference') return workflow
def create_moco_pipeline(name='motion_correction'): # initiate workflow moco = Workflow(name='motion_correction') # set fsl output fsl.FSLCommand.set_default_output_type('NIFTI_GZ') # inputnode inputnode = Node(util.IdentityInterface(fields=['epi']), name='inputnode') # outputnode outputnode = Node(util.IdentityInterface(fields=[ 'epi_moco', 'par_moco', 'mat_moco', 'rms_moco', 'epi_mean', 'rotplot', 'transplot', 'dispplots', 'tsnr_file' ]), name='outputnode') # mcflirt motion correction to 1st volume mcflirt = Node(fsl.MCFLIRT(save_mats=True, save_plots=True, save_rms=True, ref_vol=1, out_file='rest_realigned.nii.gz'), name='mcflirt') # plot motion parameters rotplotter = Node(fsl.PlotMotionParams(in_source='fsl', plot_type='rotations', out_file='rotation_plot.png'), name='rotplotter') transplotter = Node(fsl.PlotMotionParams(in_source='fsl', plot_type='translations', out_file='translation_plot.png'), name='transplotter') dispplotter = MapNode(interface=fsl.PlotMotionParams( in_source='fsl', plot_type='displacement', ), name='dispplotter', iterfield=['in_file']) dispplotter.iterables = ('plot_type', ['displacement']) # calculate tmean tmean = Node(fsl.maths.MeanImage(dimension='T', out_file='rest_realigned_mean.nii.gz'), name='tmean') # calculate tsnr tsnr = Node(confounds.TSNR(), name='tsnr') # create connections moco.connect([(inputnode, mcflirt, [('epi', 'in_file')]), (mcflirt, tmean, [('out_file', 'in_file')]), (mcflirt, rotplotter, [('par_file', 'in_file')]), (mcflirt, transplotter, [('par_file', 'in_file')]), (mcflirt, dispplotter, [('rms_files', 'in_file')]), (tmean, outputnode, [('out_file', 'epi_mean')]), (mcflirt, outputnode, [('out_file', 'epi_moco'), ('par_file', 'par_moco'), ('mat_file', 'mat_moco'), ('rms_files', 'rms_moco')]), (rotplotter, outputnode, [('out_file', 'rotplot')]), (transplotter, outputnode, [('out_file', 'transplot')]), (dispplotter, outputnode, [('out_file', 'dispplots')]), (mcflirt, tsnr, [('out_file', 'in_file')]), (tsnr, outputnode, [('tsnr_file', 'tsnr_file')])]) return moco
def create_corr_ts(name='corr_ts'): corr_ts = Workflow(name='corr_ts') # Define nodes inputnode = Node(util.IdentityInterface(fields=[ 'ts', 'hc_mask', ]), name='inputnode') outputnode = Node(interface=util.IdentityInterface( fields=['corrmap', 'corrmap_z', 'hc_ts']), name='outputnode') #extract mean time series of mask mean_TS = MapNode(interface=fsl.ImageMeants(), name="mean_TS", iterfield='mask') #iterate over using Eigenvalues or mean #mean_TS.iterables = ("eig", [True, False]) #mean_TS.inputs.order = 1 #mean_TS.inputs.show_all = True mean_TS.inputs.eig = False #use only mean of ROI mean_TS.inputs.out_file = "TS.1D" #calculate correlation of all voxels with seed voxel corr_TS = MapNode(interface=afni.Fim(), name='corr_TS', iterfield='ideal_file') corr_TS.inputs.out = 'Correlation' corr_TS.inputs.out_file = "corr.nii.gz" apply_FisherZ = MapNode(interface=afni.Calc(), name="apply_FisherZ", iterfield='in_file_a') apply_FisherZ.inputs.expr = 'log((1+a)/(1-a))/2' #log = ln apply_FisherZ.inputs.out_file = 'corr_Z.nii.gz' apply_FisherZ.inputs.outputtype = "NIFTI" corr_ts.connect([(inputnode, mean_TS, [('hc_mask', 'mask')]), (inputnode, mean_TS, [('ts', 'in_file')]), (mean_TS, outputnode, [('out_file', 'hc_ts')]), (inputnode, corr_TS, [('ts', 'in_file')]), (mean_TS, corr_TS, [('out_file', 'ideal_file')]), (corr_TS, apply_FisherZ, [('out_file', 'in_file_a')]), (corr_TS, outputnode, [('out_file', 'corrmap')]), (apply_FisherZ, outputnode, [('out_file', 'corrmap_z')])]) return corr_ts
def create_smoothing_pipeline(name='smoothing'): # set fsl output type fsl.FSLCommand.set_default_output_type('NIFTI') # initiate workflow smoothing = Workflow(name='smoothing') # inputnode inputnode=Node(util.IdentityInterface(fields=['ts_transformed', 'fwhm' ]), name='inputnode') # outputnode outputnode=Node(util.IdentityInterface(fields=['ts_smoothed' ]), name='outputnode') #apply smoothing smooth = MapNode(fsl.Smooth(),name = 'smooth', iterfield='in_file') smoothing.connect([ (inputnode, smooth, [ ('ts_transformed', 'in_file'), ('fwhm', 'fwhm')] ), (smooth, outputnode, [('smoothed_file', 'ts_smoothed')] ) ]) return smoothing
def create_workflow_allin_slices(name='motion_correction', iterfield=['in_file']): workflow = Workflow(name=name) inputs = Node(IdentityInterface(fields=[ 'subject_id', 'session_id', 'ref_func', 'ref_func_weights', 'funcs', 'funcs_masks', 'mc_method', ]), name='in') inputs.iterables = [ ('mc_method', ['afni:3dAllinSlices']) ] mc = MapNode( AFNIAllinSlices(), iterfield=iterfield, name='mc') workflow.connect( [(inputs, mc, [('funcs', 'in_file'), ('ref_func_weights', 'in_weight_file'), ('ref_func', 'ref_file'), ])]) return workflow
def create_ants_registration_pipeline(name='ants_registration'): # set fsl output type fsl.FSLCommand.set_default_output_type('NIFTI_GZ') # initiate workflow ants_registration = Workflow(name='ants_registration') # inputnode inputnode = Node(util.IdentityInterface( fields=['corr_Z', 'ants_affine', 'ants_warp', 'ref']), name='inputnode') # outputnode outputnode = Node(util.IdentityInterface(fields=[ 'ants_reg_corr_Z', ]), name='outputnode') #also transform to mni space collect_transforms = Node(interface=util.Merge(2), name='collect_transforms') ants_reg = MapNode(ants.ApplyTransforms(input_image_type=3, dimension=3, interpolation='Linear'), name='ants_reg', iterfield='input_image') ants_registration.connect([ (inputnode, ants_reg, [('corr_Z', 'input_image')]), (inputnode, ants_reg, [('ref', 'reference_image')]), (inputnode, collect_transforms, [('ants_affine', 'in1')]), (inputnode, collect_transforms, [('ants_warp', 'in2')]), (collect_transforms, ants_reg, [('out', 'transforms')]), (ants_reg, outputnode, [('output_image', 'ants_reg_corr_Z')]) ]) return ants_registration
def create_images_workflow(): # Correct for the sphinx position and use reorient to standard. workflow = Workflow(name='minimal_proc') inputs = Node(IdentityInterface(fields=['images']), name="in") outputs = Node(IdentityInterface(fields=['images']), name="out") sphinx = MapNode(fs.MRIConvert(sphinx=True), iterfield=['in_file'], name='sphinx') workflow.connect(inputs, 'images', sphinx, 'in_file') ro = MapNode(fsl.Reorient2Std(), iterfield=['in_file'], name='ro') workflow.connect(sphinx, 'out_file', ro, 'in_file') workflow.connect(ro, 'out_file', outputs, 'images') return workflow
def create_warp_transform(name='warpmultitransform'): # set fsl output type fsl.FSLCommand.set_default_output_type('NIFTI_GZ') # initiate workflow warp = Workflow(name='warp') # inputnode inputnode = MapNode(util.IdentityInterface(fields=[ 'input_image', 'atlas_aff2template', 'atlas_warp2template', 'atlas2target_composite', 'template2target_inverse', 'ref' ]), name='inputnode', iterfield=['input_image', 'ref']) # outputnode outputnode = Node(util.IdentityInterface(fields=[ 'ants_reg', ]), name='outputnode') collect_transforms = Node(interface=util.Merge(4), name='collect_transforms') ants_reg = MapNode(ants.ApplyTransforms(input_image_type=3, dimension=3, interpolation='Linear'), name='apply_ants_reg', iterfield=['input_image', 'reference_image']) ants_reg.inputs.invert_transform_flags = [False, False, False, False] warp.connect([ (inputnode, ants_reg, [('input_image', 'input_image')]), (inputnode, ants_reg, [('ref', 'reference_image')]), (inputnode, collect_transforms, [('atlas_aff2template', 'in4')]), (inputnode, collect_transforms, [('atlas_warp2template', 'in3')]), (inputnode, collect_transforms, [('atlas2target_composite', 'in2')]), (inputnode, collect_transforms, [('template2target_inverse', 'in1')]), (collect_transforms, ants_reg, [ ('out', 'transforms') ]), #for WarpImageMultiTransform:transformation_series (ants_reg, outputnode, [('output_image', 'ants_reg')]) ]) return warp
def _make_nodes(self, cwd=None): """ Cast generated nodes to be Arcana nodes """ for i, node in NipypeMapNode._make_nodes(self, cwd=cwd): # "Cast" NiPype node to a Arcana Node and set Arcana Node # parameters node.__class__ = Node node._arcana_init( **{n: getattr(self, n) for n in self.arcana_params}) yield i, node
def create_workflow(): workflow = Workflow(name='transform_manual_mask') inputs = Node(IdentityInterface(fields=[ 'subject_id', 'session_id', 'manualmask', 'manualmask_func_ref', 'funcs', ]), name='in') # Find the transformation matrix func_ref -> func # First find transform from func to manualmask's ref func findtrans = MapNode(fsl.FLIRT(), iterfield=['in_file'], name='findtrans') # Invert the matrix transform invert = MapNode( fsl.ConvertXFM(invert_xfm=True), name='invert', iterfield=['in_file'], ) workflow.connect(findtrans, 'out_matrix_file', invert, 'in_file') # Transform the manualmask to be aligned with func funcreg = MapNode( ApplyXFMRefName(), name='funcreg', iterfield=['in_matrix_file', 'reference'], ) workflow.connect(inputs, 'funcs', findtrans, 'in_file') workflow.connect(inputs, 'manualmask_func_ref', findtrans, 'reference') workflow.connect(invert, 'out_file', funcreg, 'in_matrix_file') workflow.connect(inputs, 'manualmask', funcreg, 'in_file') workflow.connect(inputs, 'funcs', funcreg, 'reference') return workflow
def _make_nodes(self, cwd=None): """ Cast generated nodes to be Arcana nodes """ for i, node in NipypeMapNode._make_nodes(self, cwd=cwd): # "Cast" NiPype node to a Arcana Node and set Arcana Node # parameters node.__class__ = self.node_cls node._environment = self._environment node._versions = self._versions node._wall_time = self._wall_time node._annotations = self._annotations yield i, node
def create_workflow(xfm_dir, xfm_pattern, atlas_dir, atlas_pattern, source_dir, source_pattern, work_dir, out_dir, name="new_data_to_atlas_space"): wf = Workflow(name=name) wf.base_dir = os.path.join(work_dir) datasource_source = Node(interface=DataGrabber(sort_filelist=True), name='datasource_source') datasource_source.inputs.base_directory = os.path.abspath(source_dir) datasource_source.inputs.template = source_pattern datasource_xfm = Node(interface=DataGrabber(sort_filelist=True), name='datasource_xfm') datasource_xfm.inputs.base_directory = os.path.abspath(xfm_dir) datasource_xfm.inputs.template = xfm_pattern datasource_atlas = Node(interface=DataGrabber(sort_filelist=True), name='datasource_atlas') datasource_atlas.inputs.base_directory = os.path.abspath(atlas_dir) datasource_atlas.inputs.template = atlas_pattern resample = MapNode(interface=Resample(sinc_interpolation=True), name='resample_', iterfield=['input_file', 'transformation']) wf.connect(datasource_source, 'outfiles', resample, 'input_file') wf.connect(datasource_xfm, 'outfiles', resample, 'transformation') wf.connect(datasource_atlas, 'outfiles', resample, 'like') bigaverage = Node(interface=BigAverage(output_float=True, robust=False), name='bigaverage', iterfield=['input_file']) wf.connect(resample, 'output_file', bigaverage, 'input_files') datasink = Node(interface=DataSink(base_directory=out_dir, container=out_dir), name='datasink') wf.connect([(bigaverage, datasink, [('output_file', 'average')])]) wf.connect([(resample, datasink, [('output_file', 'atlas_space')])]) wf.connect([(datasource_xfm, datasink, [('outfiles', 'transforms')])]) return wf
name='selecttemplates') wf.connect([(infosource, selectfiles, [('subject', 'subject'), ('ses', 'ses')])]) #wf.connect([(infosource, selecttemplates, [('ses','ses')])]) ############ ## Step 1 ## ############ # Bias correct the T1 and TSE #input_image not input T1_N4_n = MapNode(N4BiasFieldCorrection(dimension=3, bspline_fitting_distance=300, shrink_factor=2, n_iterations=[50, 50, 40, 30], rescale_intensities=True, num_threads=20), name='T1_N4_n', iterfield=['input_image']) wf.connect([(selectfiles, T1_N4_n, [('mprage', 'input_image')])]) T2_N4_n = MapNode(N4BiasFieldCorrection(dimension=3, bspline_fitting_distance=300, shrink_factor=2, n_iterations=[50, 50, 40, 30], rescale_intensities=True, num_threads=20), name='T2_N4_n', iterfield=['input_image']) wf.connect([(selectfiles, T2_N4_n, [('tse', 'input_image')])])
def create_workflow(unwarp_direction='y'): workflow = Workflow(name='func_unwarp') inputs = Node( IdentityInterface(fields=[ # 'subject_id', # 'session_id', 'funcs', 'funcmasks', 'fmap_phasediff', 'fmap_magnitude', 'fmap_mask', ]), name='in') outputs = Node(IdentityInterface(fields=[ 'funcs', 'funcmasks', ]), name='out') # --- --- --- --- --- --- --- Convert to radians --- --- --- --- --- --- # fslmaths $FUNCDIR/"$SUB"_B0_phase -div 100 -mul 3.141592653589793116 # -odt float $FUNCDIR/"$SUB"_B0_phase_rescaled # in_file --> out_file phase_radians = Node(fsl.ImageMaths( op_string='-mul 3.141592653589793116 -div 100', out_data_type='float', suffix='_radians', ), name='phaseRadians') workflow.connect(inputs, 'fmap_phasediff', phase_radians, 'in_file') # --- --- --- --- --- --- --- Unwrap Fieldmap --- --- --- --- --- --- # --- Unwrap phase # prelude -p $FUNCDIR/"$SUB"_B0_phase_rescaled # -a $FUNCDIR/"$SUB"_B0_magnitude # -o $FUNCDIR/"$SUB"_fmri_B0_phase_rescaled_unwrapped # -m $FUNCDIR/"$SUB"_B0_magnitude_brain_mask # magnitude_file, phase_file [, mask_file] --> unwrapped_phase_file unwrap = MapNode( PRELUDE(), name='unwrap', iterfield=['mask_file'], ) workflow.connect([ (inputs, unwrap, [('fmap_magnitude', 'magnitude_file')]), (inputs, unwrap, [('fmap_mask', 'mask_file')]), (phase_radians, unwrap, [('out_file', 'phase_file')]), ]) # --- --- --- --- --- --- --- Convert to Radians / Sec --- --- --- --- --- # fslmaths $FUNCDIR/"$SUB"_B0_phase_rescaled_unwrapped # -mul 200 $FUNCDIR/"$SUB"_B0_phase_rescaled_unwrapped rescale = MapNode( fsl.ImageMaths(op_string='-mul 200'), name='rescale', iterfield=['in_file'], ) workflow.connect(unwrap, 'unwrapped_phase_file', rescale, 'in_file') # --- --- --- --- --- --- --- Unmask fieldmap --- --- --- --- --- unmask_phase = MapNode( FUGUE( save_unmasked_fmap=True, unwarp_direction=unwarp_direction, ), name='unmask_phase', iterfield=['mask_file', 'fmap_in_file'], ) workflow.connect(rescale, 'out_file', unmask_phase, 'fmap_in_file') workflow.connect(inputs, 'fmap_mask', unmask_phase, 'mask_file') # --- --- --- --- --- --- --- Undistort functionals --- --- --- --- --- # phasemap_in_file = phasediff # mask_file = mask # in_file = functional image # dwell_time = 0.0005585 s # unwarp_direction undistort = MapNode( FUGUE( dwell_time=0.0005585, # based on Process-NHP-MRI/Process_functional_data.md: asym_se_time=0.020, smooth3d=2.0, median_2dfilter=True, unwarp_direction=unwarp_direction, ), name='undistort', iterfield=['in_file', 'mask_file', 'fmap_in_file'], ) workflow.connect(unmask_phase, 'fmap_out_file', undistort, 'fmap_in_file') workflow.connect(inputs, 'fmap_mask', undistort, 'mask_file') workflow.connect(inputs, 'funcs', undistort, 'in_file') undistort_masks = undistort.clone('undistort_masks') workflow.connect(unmask_phase, 'fmap_out_file', undistort_masks, 'fmap_in_file') workflow.connect(inputs, 'fmap_mask', undistort_masks, 'mask_file') workflow.connect(inputs, 'funcmasks', undistort_masks, 'in_file') workflow.connect(undistort, 'unwarped_file', outputs, 'funcs') workflow.connect(undistort_masks, 'unwarped_file', outputs, 'funcmasks') return workflow
def create_moco_pipeline(working_dir, ds_dir, name='motion_correction'): """ Workflow for motion correction to 1st volume based on https://github.com/NeuroanatomyAndConnectivity/pipelines/blob/master/src/lsd_lemon/func_preproc/moco.py """ # initiate workflow moco_wf = Workflow(name=name) moco_wf.base_dir = os.path.join(working_dir,'LeiCA_resting', 'rsfMRI_preprocessing') # set fsl output fsl.FSLCommand.set_default_output_type('NIFTI_GZ') # I/O NODES inputnode = Node(util.IdentityInterface(fields=['epi', 'vols_to_drop']), name='inputnode') outputnode = Node(util.IdentityInterface(fields=['epi_moco', 'par_moco', 'mat_moco', 'rms_moco', 'initial_mean_epi_moco', 'rotplot', 'transplot', 'dispplots', 'tsnr_file', 'epi_mask']), name='outputnode') ds = Node(nio.DataSink(base_directory=ds_dir), name='ds') ds.inputs.substitutions = [('_TR_id_', 'TR_')] # REMOVE FIRST VOLUMES drop_vols = Node(util.Function(input_names=['in_file','t_min'], output_names=['out_file'], function=strip_rois_func), name='remove_vol') moco_wf.connect(inputnode, 'epi', drop_vols, 'in_file') moco_wf.connect(inputnode, 'vols_to_drop', drop_vols, 't_min') # MCFILRT MOCO TO 1st VOLUME mcflirt = Node(fsl.MCFLIRT(save_mats=True, save_plots=True, save_rms=True, ref_vol=0, out_file='rest_realigned.nii.gz' ), name='mcflirt') moco_wf.connect(drop_vols, 'out_file', mcflirt, 'in_file') moco_wf.connect([(mcflirt, ds, [('par_file', 'realign.par.@par'), ('mat_file', 'realign.MAT.@mat'), ('rms_files', 'realign.plots.@rms')])]) moco_wf.connect([(mcflirt, outputnode, [('out_file', 'epi_moco'), ('par_file', 'par_moco'), ('mat_file', 'mat_moco'), ('rms_files', 'rms_moco')])]) # CREATE MEAN EPI (INTENSITY NORMALIZED) initial_mean_epi_moco = Node(fsl.maths.MeanImage(dimension='T', out_file='initial_mean_epi_moco.nii.gz'), name='initial_mean_epi_moco') moco_wf.connect(mcflirt, 'out_file', initial_mean_epi_moco, 'in_file') moco_wf.connect(initial_mean_epi_moco, 'out_file', outputnode, 'initial_mean_epi_moco') moco_wf.connect(initial_mean_epi_moco, 'out_file', ds, 'QC.initial_mean_epi_moco') # PLOT MOTION PARAMETERS rotplotter = Node(fsl.PlotMotionParams(in_source='fsl', plot_type='rotations', out_file='rotation_plot.png'), name='rotplotter') moco_wf.connect(mcflirt, 'par_file', rotplotter, 'in_file') moco_wf.connect(rotplotter, 'out_file', ds, 'realign.plots.@rotplot') transplotter = Node(fsl.PlotMotionParams(in_source='fsl', plot_type='translations', out_file='translation_plot.png'), name='transplotter') moco_wf.connect(mcflirt, 'par_file', transplotter, 'in_file') moco_wf.connect(transplotter, 'out_file', ds, 'realign.plots.@transplot') dispplotter = MapNode(interface=fsl.PlotMotionParams(in_source='fsl', plot_type='displacement'), name='dispplotter', iterfield=['in_file']) dispplotter.iterables = ('plot_type', ['displacement']) moco_wf.connect(mcflirt, 'rms_files', dispplotter, 'in_file') moco_wf.connect(dispplotter, 'out_file', ds, 'realign.plots.@dispplots') moco_wf.write_graph(dotfilename=moco_wf.name, graph2use='flat', format='pdf') return moco_wf
out_fsl_file=True), name='bbregister') # Convert the BBRegister transformation to ANTS ITK format convert2itk = Node(C3dAffineTool(fsl2ras=True, itk_transform=True), name='convert2itk') # Concatenate BBRegister's and ANTS' transforms into a list merge = Node(Merge(2), iterfield=['in2'], name='mergexfm') # Transform the contrast images. First to anatomical and then to the target warpall = MapNode(ApplyTransforms(args='--float', input_image_type=3, interpolation='Linear', invert_transform_flags=[False, False], num_threads=1, reference_image=template, terminal_output='file'), name='warpall', iterfield=['input_image']) # Transform the mean image. First to anatomical and then to the target warpmean = Node(ApplyTransforms(args='--float', input_image_type=3, interpolation='Linear', invert_transform_flags=[False, False], num_threads=1, reference_image=template, terminal_output='file'), name='warpmean')
group = Workflow(name='group') group.base_dir=working_dir # sink sink = Node(nio.DataSink(base_directory=out_dir, parameterization=False), name='sink') '''groupmeans and sdv ======================= ''' # merge means merger = MapNode(fsl.Merge(dimension='t'), iterfield=['in_files'], name='merger') merger.inputs.in_files=mean_methodlist # calculate mean of means meaner = MapNode(fsl.maths.MeanImage(dimension='T'), iterfield=['in_file', 'out_file'], name='meaner') meaner.inputs.out_file=['lin_groupmean.nii.gz','nonlin_groupmean.nii.gz','fmap_groupmean.nii.gz','topup_groupmean.nii.gz'] group.connect([(merger, meaner, [('merged_file', 'in_file')])]) # mask mean files mean_masked = MapNode(fsl.BinaryMaths(operation='mul'), iterfield=['in_file', 'out_file'], name='mean_masked') mean_masked.inputs.out_file=['lin_groupmean.nii.gz','nonlin_groupmean.nii.gz','fmap_groupmean.nii.gz','topup_groupmean.nii.gz']
corr_epi_txt = corr_fields_txt.clone(name='corr_epi_txt') corr_epi_txt.inputs.filename='correlation_groundtruth.txt' simulated.connect([(simulation, make_list2, [('outputnode.lin_coreg', 'file1'), ('outputnode.nonlin_coreg', 'file2'), ('outputnode.fmap_coreg', 'file3')]), (make_list2, corr_epi, [('filelist', 'image2')]), (groundtruth, corr_epi, [('outputnode.lin_coreg', 'image1')]), (selectfiles, corr_epi, [('anat_brain_mask', 'mask')]), (corr_epi, corr_epi_txt, [('linreg_stats', 'stats')])]) # similarity to anatomy lin_sim = MapNode(interface = nutil.Similarity(), name = 'similarity_lin', iterfield=['metric']) lin_sim.inputs.metric = ['mi','nmi','cc','cr','crl1'] nonlin_sim = lin_sim.clone(name='similarity_nonlin') nonlin_sim.inputs.metric = ['mi','nmi','cc','cr','crl1'] fmap_sim = lin_sim.clone(name='similarity_fmap') fmap_sim.inputs.metric = ['mi','nmi','cc','cr','crl1'] def write_simtext(lin_metrics, nonlin_metrics, fmap_metrics, filename): import numpy as np import os lin_array = np.array(lin_metrics) lin_array=lin_array.reshape(np.size(lin_array),1) nonlin_array = np.array(nonlin_metrics) nonlin_array=nonlin_array.reshape(np.size(nonlin_array),1) fmap_array = np.array(fmap_metrics)
def build_correlation_wf(Registration=True, use_Ankita_Function=False, name='pearsonCorrcalc'): corr_wf = Workflow(name=name) if Registration: inputnode = Node(interface=util.IdentityInterface(fields=[ 'in_files', 'atlas_files', 'func2std', 'reference', 'mask_file' ]), name='inputspec') outputnode = Node( interface=util.IdentityInterface(fields=['pearsonCorr_files']), name='outputspec') if use_Ankita_Function: coff_matrix = MapNode(util.Function( function=pearson_corr_Ankita, input_names=['in_file', 'atlas_file'], output_names=['coff_matrix_file']), iterfield=['in_file', 'atlas_file'], name='coff_matrix') transform_corr = MapNode(interface=fsl.ApplyXFM(interp='spline'), iterfield=['in_file', 'in_matrix_file'], name='transform_corr') maskCorrFile = MapNode(interface=fsl.ImageMaths(suffix='_masked', op_string='-mas'), iterfield=['in_file'], name='maskWarpFile') make_npy_from_Corr = MapNode(util.Function( function=make_npy_from_CorrFile, input_names=['Corr_file', 'mask_file'], output_names=['coff_matrix_file']), iterfield=['Corr_file'], name='coff_matrix_in_npy') else: coff_matrix = MapNode(util.Function( function=pearsonr_with_roi_mean_w_reg, input_names=['in_file', 'atlas_file'], output_names=['coff_matrix_file']), iterfield=['in_file', 'atlas_file'], name='coff_matrix') transform_corr = MapNode(interface=fsl.ApplyXFM(interp='spline'), iterfield=['in_file', 'in_matrix_file'], name='transform_corr') maskCorrFile = MapNode(interface=fsl.ImageMaths(suffix='_masked', op_string='-mas'), iterfield=['in_file'], name='maskWarpFile') make_npy_from_Corr = MapNode(util.Function( function=make_npy_from_CorrFile, input_names=['Corr_file', 'mask_file'], output_names=['coff_matrix_file']), iterfield=['Corr_file'], name='coff_matrix_in_npy') datasink = Node(interface=DataSink(), name='datasink') corr_wf.connect(inputnode, 'in_files', coff_matrix, 'in_file') corr_wf.connect(inputnode, 'atlas_files', coff_matrix, 'atlas_file') corr_wf.connect(coff_matrix, 'coff_matrix_file', transform_corr, 'in_file') corr_wf.connect(inputnode, 'func2std', transform_corr, 'in_matrix_file') corr_wf.connect(inputnode, 'reference', transform_corr, 'reference') corr_wf.connect(transform_corr, 'out_file', maskCorrFile, 'in_file') corr_wf.connect(inputnode, 'mask_file', maskCorrFile, 'in_file2') corr_wf.connect(maskCorrFile, 'out_file', make_npy_from_Corr, 'Corr_file') corr_wf.connect(inputnode, 'mask_file', make_npy_from_Corr, 'mask_file') corr_wf.connect(make_npy_from_Corr, 'coff_matrix_file', outputnode, 'pearsonCorr_files') corr_wf.connect(outputnode, 'pearsonCorr_files', datasink, 'out_file') else: inputnode = Node(interface=util.IdentityInterface( fields=['in_files', 'atlas_file', 'mask_file']), name='inputspec') outputnode = Node(interface=util.IdentityInterface( fields=['pearsonCorr_files', 'pearsonCorr_files_in_nii']), name='outputspec') if use_Ankita_Function: coff_matrix = MapNode(util.Function( function=pearson_corr_Ankita, input_names=['in_file', 'atlas_file'], output_names=['coff_matrix_file']), iterfield=['in_file'], name='coff_matrix') maskCorrFile = MapNode(interface=fsl.ImageMaths(suffix='_masked', op_string='-mas'), iterfield=['in_file'], name='maskWarpFile') make_npy_from_Corr = MapNode(util.Function( function=make_npy_from_CorrFile, input_names=['Corr_file', 'mask_file'], output_names=['coff_matrix_file']), iterfield=['Corr_file'], name='coff_matrix_in_npy') datasink = Node(interface=DataSink(), name='datasink') corr_wf.connect(inputnode, 'in_files', coff_matrix, 'in_file') corr_wf.connect(inputnode, 'atlas_file', coff_matrix, 'atlas_file') corr_wf.connect(coff_matrix, 'coff_matrix_file', maskCorrFile, 'in_file') corr_wf.connect(inputnode, 'mask_file', maskCorrFile, 'in_file2') corr_wf.connect(maskCorrFile, 'out_file', make_npy_from_Corr, 'Corr_file') corr_wf.connect(inputnode, 'mask_file', make_npy_from_Corr, 'mask_file') corr_wf.connect(make_npy_from_Corr, 'coff_matrix_file', outputnode, 'pearsonCorr_files') corr_wf.connect(outputnode, 'pearsonCorr_files', datasink, 'out_file') else: coff_matrix = MapNode(util.Function( function=pearsonr_with_roi_mean, input_names=['in_file', 'atlas_file', 'mask_file'], output_names=['coff_matrix_file', 'coff_matrix_file_in_nii']), iterfield=['in_file'], name='coff_matrix') datasink = Node(interface=DataSink(), name='datasink') # selectfile = MapNode(interface=util.Select(index=[0]), iterfield = ['inlist'],name='select') corr_wf.connect(inputnode, 'in_files', coff_matrix, 'in_file') corr_wf.connect(inputnode, 'atlas_file', coff_matrix, 'atlas_file') corr_wf.connect(inputnode, 'mask_file', coff_matrix, 'mask_file') corr_wf.connect(coff_matrix, 'coff_matrix_file', outputnode, 'pearsonCorr_files') corr_wf.connect(coff_matrix, 'coff_matrix_file_in_nii', outputnode, 'pearsonCorr_files_in_nii') corr_wf.connect(outputnode, 'pearsonCorr_files', datasink, 'out_file') # coff_matrix = MapNode(util.Function(function=pearsonr_with_roi_mean_w_reg, # input_names=['in_file','atlas_file'], # output_names=['coff_matrix_file']), # iterfield=['in_file'], # name = 'coff_matrix') # maskCorrFile = MapNode(interface=fsl.ImageMaths(suffix='_masked', # op_string='-mas'), # iterfield=['in_file'], # name = 'maskWarpFile') # make_npy_from_Corr = MapNode(util.Function(function=make_npy_from_CorrFile, # input_names=['Corr_file','mask_file'], # output_names=['coff_matrix_file']), # iterfield=['Corr_file'], # name = 'coff_matrix_in_npy') # datasink = Node(interface=DataSink(), name='datasink') # corr_wf.connect(inputnode, 'in_files', coff_matrix, 'in_file') # corr_wf.connect(inputnode, 'atlas_file', coff_matrix, 'atlas_file') # corr_wf.connect(coff_matrix,'coff_matrix_file', maskCorrFile, 'in_file') # corr_wf.connect(inputnode, 'mask_file', maskCorrFile, 'in_file2') # corr_wf.connect(maskCorrFile,'out_file', make_npy_from_Corr, 'Corr_file') # corr_wf.connect(inputnode,'mask_file', make_npy_from_Corr, 'mask_file') # corr_wf.connect(make_npy_from_Corr, 'coff_matrix_file', outputnode, 'pearsonCorr_files') # corr_wf.connect(outputnode, 'pearsonCorr_files', datasink, 'out_file') return corr_wf
def create_similarity_pipeline(name): similarity=Workflow(name=name) # inputnode inputnode=Node(util.IdentityInterface(fields=['anat_brain', 'mask', 'lin_mean', 'nonlin_mean', 'fmap_mean', 'topup_mean', 'filename' ]), name='inputnode') # outputnode outputnode=Node(util.IdentityInterface(fields=['textfile']), name='outputnode') # resample all means to make sure they have the same resolution as reference anatomy resamp_mask = Node(afni.Resample(outputtype='NIFTI_GZ'), name='resample_mask') resamp_lin = resamp_mask.clone(name = 'resample_lin') resamp_nonlin = resamp_mask.clone(name='resample_nonlin') resamp_fmap = resamp_mask.clone(name='resample_fmap') resamp_topup = resamp_mask.clone(name='resample_topup') similarity.connect([(inputnode, resamp_mask, [('mask', 'in_file'), ('anat_brain', 'master')]), (inputnode, resamp_lin, [('lin_mean', 'in_file'), ('anat_brain', 'master')]), (inputnode, resamp_nonlin, [('nonlin_mean', 'in_file'), ('anat_brain', 'master')]), (inputnode, resamp_fmap, [('fmap_mean', 'in_file'), ('anat_brain', 'master')]), (inputnode, resamp_topup, [('topup_mean', 'in_file'), ('anat_brain', 'master')]), ]) # calculate similarity (all possible metrics) for each methods to mni lin_sim = MapNode(interface = nutil.Similarity(), name = 'similarity_lin', iterfield=['metric']) lin_sim.inputs.metric = ['mi','nmi','cc','cr','crl1'] nonlin_sim = lin_sim.clone(name='similarity_nonlin') nonlin_sim.inputs.metric = ['mi','nmi','cc','cr','crl1'] fmap_sim = lin_sim.clone(name='similarity_fmap') fmap_sim.inputs.metric = ['mi','nmi','cc','cr','crl1'] topup_sim = lin_sim.clone(name='similarity_topup') topup_sim.inputs.metric = ['mi','nmi','cc','cr','crl1'] similarity.connect([(inputnode, lin_sim, [('anat_brain', 'volume1')]), (resamp_lin, lin_sim, [('out_file', 'volume2')]), (resamp_mask, lin_sim, [('out_file', 'mask1'), ('out_file', 'mask2')]), (inputnode, nonlin_sim, [('anat_brain', 'volume1')]), (resamp_nonlin, nonlin_sim, [('out_file', 'volume2')]), (resamp_mask, nonlin_sim, [('out_file', 'mask1'), ('out_file', 'mask2')]), (inputnode, fmap_sim, [('anat_brain', 'volume1')]), (resamp_fmap, fmap_sim, [('out_file', 'volume2')]), (resamp_mask, fmap_sim, [('out_file', 'mask1'), ('out_file', 'mask2')]), (inputnode, topup_sim, [('anat_brain', 'volume1')]), (resamp_topup, topup_sim, [('out_file', 'volume2')]), (resamp_mask, topup_sim, [('out_file', 'mask1'), ('out_file', 'mask2')]) ]) # write values to one text file per subject def write_text(lin_metrics, nonlin_metrics, fmap_metrics, topup_metrics, filename): import numpy as np import os lin_array = np.array(lin_metrics) lin_array=lin_array.reshape(np.size(lin_array),1) nonlin_array = np.array(nonlin_metrics) nonlin_array=nonlin_array.reshape(np.size(nonlin_array),1) fmap_array = np.array(fmap_metrics) fmap_array=fmap_array.reshape(np.size(fmap_array),1) topup_array = np.array(topup_metrics) topup_array=topup_array.reshape(np.size(topup_array),1) metrics=np.concatenate((lin_array, nonlin_array, fmap_array, topup_array),axis=1) metrics_file = filename np.savetxt(metrics_file, metrics, delimiter=' ', fmt='%f') return os.path.abspath(filename) write_txt = Node(interface=Function(input_names=['lin_metrics', 'nonlin_metrics', 'fmap_metrics', 'topup_metrics', 'filename'], output_names=['txtfile'], function=write_text), name='write_file') similarity.connect([(inputnode, write_txt, [('filename', 'filename')]), (lin_sim, write_txt, [('similarity', 'lin_metrics')]), (nonlin_sim, write_txt, [('similarity', 'nonlin_metrics')]), (fmap_sim, write_txt, [('similarity', 'fmap_metrics')]), (topup_sim, write_txt, [('similarity', 'topup_metrics')]), (write_txt, outputnode, [('txtfile', 'textfile')]) ]) return similarity
def create_denoise_pipeline(name='denoise'): # workflow denoise = Workflow(name='denoise') # Define nodes inputnode = Node(interface=util.IdentityInterface(fields=['brain_mask', 'epi_coreg', 'wmseg', 'csfseg', 'highpass_freq', 'tr']), name='inputnode') outputnode = Node(interface=util.IdentityInterface(fields=['wmcsf_mask', 'combined_motion', 'comp_regressor', 'comp_F', 'comp_pF', 'out_betas', 'ts_fullspectrum', 'ts_filtered']), name='outputnode') # combine tissue classes to noise mask wmcsf_mask = Node(fsl.BinaryMaths(operation='add', out_file='wmcsf_mask.nii'), name='wmcsf_mask') denoise.connect([(inputnode, wmcsf_mask, [('wmseg', 'in_file'), ('csfseg', 'operand_file')])]) #resample + binarize wm_csf mask to epi resolution. resample_wmcsf= Node(afni.Resample(resample_mode='NN', outputtype='NIFTI_GZ', out_file='wmcsf_mask_lowres.nii.gz'), name = 'resample_wmcsf') bin_wmcsf_mask=Node(fsl.utils.ImageMaths(), name="bin_wmcsf_mask") bin_wmcsf_mask.inputs.op_string='-nan -thr 0.99 -ero -bin' denoise.connect([(wmcsf_mask, resample_wmcsf, [('out_file', 'in_file')]), (inputnode, resample_wmcsf, [('brain_mask', 'master')]), (resample_wmcsf, bin_wmcsf_mask,[('out_file', 'in_file')]), (bin_wmcsf_mask, outputnode, [('out_file', 'wmcsf_mask')]) ]) #no other denoising filters created here because AROMA performs already well. compcor=Node(conf.ACompCor(), name="compcor") compcor.inputs.num_components=5 #https://www.sciencedirect.com/science/article/pii/S105381191400175X?via%3Dihub denoise.connect([ (inputnode, compcor, [('epi_coreg', 'realigned_file')]), (bin_wmcsf_mask, compcor, [('out_file', 'mask_files')]), ]) def create_designs(compcor_regressors,epi_coreg,mask): import numpy as np import pandas as pd import os from nilearn.input_data import NiftiMasker brain_masker = NiftiMasker(mask_img = mask, smoothing_fwhm=None, standardize=False, memory='nilearn_cache', memory_level=5, verbose=2) whole_brain = brain_masker.fit_transform(epi_coreg) avg_signal = np.mean(whole_brain,axis=1) all_regressors=pd.read_csv(compcor_regressors,sep='\t') #add global signal. all_regressors['global_signal']=avg_signal fn=os.getcwd()+'/all_regressors.txt' all_regressors.to_csv(fn, sep='\t', index=False) return [fn, compcor_regressors] #create a list of design to loop over. create_design = Node(util.Function(input_names=['compcor_regressors','epi_coreg','mask'], output_names=['reg_list'], function=create_designs), name='create_design') denoise.connect([ (compcor, create_design, [('components_file', 'compcor_regressors')]), (inputnode, create_design, [('epi_coreg', 'epi_coreg')]), (inputnode, create_design, [('brain_mask', 'mask')]) ]) # regress compcor and other noise components filter2 = MapNode(fsl.GLM(out_f_name='F_noise.nii.gz', out_pf_name='pF_noise.nii.gz', out_res_name='rest2anat_denoised.nii.gz', output_type='NIFTI_GZ', demean=True), iterfield=['design'], name='filternoise') filter2.plugin_args = {'submit_specs': 'request_memory = 17000'} denoise.connect([(inputnode, filter2, [('epi_coreg', 'in_file')]), #(createfilter2, filter2, [('out_files', 'design')]), #(compcor, filter2, [('components_file', 'design')]), (create_design, filter2, [('reg_list', 'design')]), (inputnode, filter2, [('brain_mask', 'mask')]), (filter2, outputnode, [('out_f', 'comp_F'), ('out_pf', 'comp_pF'), ('out_file', 'out_betas'), ('out_res', 'ts_fullspectrum'), ]) ]) def calc_sigma(TR,highpass): # https://www.jiscmail.ac.uk/cgi-bin/webadmin?A2=ind1205&L=FSL&P=R57592&1=FSL&9=A&I=-3&J=on&d=No+Match%3BMatch%3BMatches&z=4 sigma=1. / (2 * TR * highpass) return sigma calc_s=Node(util.Function(input_names=['TR', 'highpass'], output_names=['sigma'], function=calc_sigma), name='calc_s') denoise.connect(inputnode, 'tr', calc_s, 'TR') denoise.connect(inputnode, 'highpass_freq', calc_s, 'highpass') #use only highpass filter (because high-frequency content is already somewhat filtered in AROMA)) highpass_filter = MapNode(fsl.TemporalFilter(out_file='rest_denoised_highpassed.nii'), name='highpass_filter', iterfield=['in_file']) highpass_filter.plugin_args = {'submit_specs': 'request_memory = 17000'} denoise.connect([(calc_s, highpass_filter, [('sigma', 'highpass_sigma')]), (filter2, highpass_filter, [('out_res', 'in_file')]), (highpass_filter, outputnode, [('out_file', 'ts_filtered')]) ]) return denoise
def create_denoise_pipeline(name='denoise'): # workflow denoise = Workflow(name='denoise') # Define nodes inputnode = Node(interface=util.IdentityInterface(fields=[ 'anat_brain', 'brain_mask', 'epi2anat_dat', 'unwarped_mean', 'epi_coreg', 'moco_par', 'highpass_sigma', 'lowpass_sigma', 'tr' ]), name='inputnode') outputnode = Node(interface=util.IdentityInterface(fields=[ 'wmcsf_mask', 'brain_mask_resamp', 'brain_mask2epi', 'combined_motion', 'outlier_files', 'intensity_files', 'outlier_stats', 'outlier_plots', 'mc_regressor', 'mc_F', 'mc_pF', 'comp_regressor', 'comp_F', 'comp_pF', 'normalized_file' ]), name='outputnode') # run fast to get tissue probability classes fast = Node(fsl.FAST(), name='fast') denoise.connect([(inputnode, fast, [('anat_brain', 'in_files')])]) # functions to select tissue classes def selectindex(files, idx): import numpy as np from nipype.utils.filemanip import filename_to_list, list_to_filename return list_to_filename( np.array(filename_to_list(files))[idx].tolist()) def selectsingle(files, idx): return files[idx] # resample tissue classes resample_tissue = MapNode(afni.Resample(resample_mode='NN', outputtype='NIFTI_GZ'), iterfield=['in_file'], name='resample_tissue') denoise.connect([ (inputnode, resample_tissue, [('epi_coreg', 'master')]), (fast, resample_tissue, [(('partial_volume_files', selectindex, [0, 2]), 'in_file')]), ]) # binarize tissue classes binarize_tissue = MapNode( fsl.ImageMaths(op_string='-nan -thr 0.99 -ero -bin'), iterfield=['in_file'], name='binarize_tissue') denoise.connect([ (resample_tissue, binarize_tissue, [('out_file', 'in_file')]), ]) # combine tissue classes to noise mask wmcsf_mask = Node(fsl.BinaryMaths(operation='add', out_file='wmcsf_mask_lowres.nii.gz'), name='wmcsf_mask') denoise.connect([(binarize_tissue, wmcsf_mask, [(('out_file', selectsingle, 0), 'in_file'), (('out_file', selectsingle, 1), 'operand_file')]), (wmcsf_mask, outputnode, [('out_file', 'wmcsf_mask')])]) # resample brain mask resample_brain = Node(afni.Resample( resample_mode='NN', outputtype='NIFTI_GZ', out_file='T1_brain_mask_lowres.nii.gz'), name='resample_brain') denoise.connect([(inputnode, resample_brain, [('brain_mask', 'in_file'), ('epi_coreg', 'master')]), (resample_brain, outputnode, [('out_file', 'brain_mask_resamp')])]) # project brain mask into original epi space fpr quality assessment brainmask2epi = Node(fs.ApplyVolTransform( interp='nearest', inverse=True, transformed_file='T1_brain_mask2epi.nii.gz', ), name='brainmask2epi') denoise.connect([ (inputnode, brainmask2epi, [('brain_mask', 'target_file'), ('epi2anat_dat', 'reg_file'), ('unwarped_mean', 'source_file')]), (brainmask2epi, outputnode, [('transformed_file', 'brain_mask2epi')]) ]) # perform artefact detection artefact = Node(ra.ArtifactDetect(save_plot=True, use_norm=True, parameter_source='FSL', mask_type='file', norm_threshold=1, zintensity_threshold=3, use_differences=[True, False]), name='artefact') artefact.plugin_args = {'submit_specs': 'request_memory = 17000'} denoise.connect([ (inputnode, artefact, [('epi_coreg', 'realigned_files'), ('moco_par', 'realignment_parameters')]), (resample_brain, artefact, [('out_file', 'mask_file')]), (artefact, outputnode, [('norm_files', 'combined_motion'), ('outlier_files', 'outlier_files'), ('intensity_files', 'intensity_files'), ('statistic_files', 'outlier_stats'), ('plot_files', 'outlier_plots')]) ]) # Compute motion regressors motreg = Node(util.Function( input_names=['motion_params', 'order', 'derivatives'], output_names=['out_files'], function=motion_regressors), name='getmotionregress') motreg.plugin_args = {'submit_specs': 'request_memory = 17000'} denoise.connect([(inputnode, motreg, [('moco_par', 'motion_params')])]) # Create a filter to remove motion and art confounds createfilter1 = Node(util.Function( input_names=['motion_params', 'comp_norm', 'outliers', 'detrend_poly'], output_names=['out_files'], function=build_filter1), name='makemotionbasedfilter') createfilter1.inputs.detrend_poly = 2 createfilter1.plugin_args = {'submit_specs': 'request_memory = 17000'} denoise.connect([ (motreg, createfilter1, [('out_files', 'motion_params')]), ( artefact, createfilter1, [ #('norm_files', 'comp_norm'), ('outlier_files', 'outliers') ]), (createfilter1, outputnode, [('out_files', 'mc_regressor')]) ]) # regress out motion and art confounds filter1 = Node(fsl.GLM(out_f_name='F_mcart.nii.gz', out_pf_name='pF_mcart.nii.gz', out_res_name='rest_mc_denoised.nii.gz', demean=True), name='filtermotion') filter1.plugin_args = {'submit_specs': 'request_memory = 17000'} denoise.connect([(inputnode, filter1, [('epi_coreg', 'in_file')]), (createfilter1, filter1, [(('out_files', list_to_filename), 'design')]), (filter1, outputnode, [('out_f', 'mc_F'), ('out_pf', 'mc_pF')])]) # create filter with compcor components createfilter2 = Node(util.Function(input_names=[ 'realigned_file', 'mask_file', 'num_components', 'extra_regressors' ], output_names=['out_files'], function=extract_noise_components), name='makecompcorfilter') createfilter2.inputs.num_components = 6 createfilter2.plugin_args = {'submit_specs': 'request_memory = 17000'} denoise.connect([ (createfilter1, createfilter2, [(('out_files', list_to_filename), 'extra_regressors')]), (filter1, createfilter2, [('out_res', 'realigned_file')]), (wmcsf_mask, createfilter2, [('out_file', 'mask_file')]), (createfilter2, outputnode, [('out_files', 'comp_regressor')]), ]) # regress compcor and other noise components filter2 = Node(fsl.GLM(out_f_name='F_noise.nii.gz', out_pf_name='pF_noise.nii.gz', out_res_name='rest2anat_denoised.nii.gz', demean=True), name='filternoise') filter2.plugin_args = {'submit_specs': 'request_memory = 17000'} denoise.connect([(filter1, filter2, [('out_res', 'in_file')]), (createfilter2, filter2, [('out_files', 'design')]), (resample_brain, filter2, [('out_file', 'mask')]), (filter2, outputnode, [('out_f', 'comp_F'), ('out_pf', 'comp_pF')])]) # bandpass filter denoised file bandpass_filter = Node( fsl.TemporalFilter(out_file='rest_denoised_bandpassed.nii.gz'), name='bandpass_filter') bandpass_filter.plugin_args = {'submit_specs': 'request_memory = 17000'} denoise.connect([(inputnode, bandpass_filter, [('highpass_sigma', 'highpass_sigma'), ('lowpass_sigma', 'lowpass_sigma')]), (filter2, bandpass_filter, [('out_res', 'in_file')])]) # time-normalize scans normalize_time = Node(util.Function(input_names=['in_file', 'tr'], output_names=['out_file'], function=time_normalizer), name='normalize_time') normalize_time.plugin_args = {'submit_specs': 'request_memory = 17000'} denoise.connect([ (inputnode, normalize_time, [('tr', 'tr')]), (bandpass_filter, normalize_time, [('out_file', 'in_file')]), (normalize_time, outputnode, [('out_file', 'normalized_file')]) ]) return denoise
def calc_local_metrics(cfg): import os from nipype import config from nipype.pipeline.engine import Node, Workflow, MapNode import nipype.interfaces.utility as util import nipype.interfaces.io as nio import nipype.interfaces.fsl as fsl import nipype.interfaces.freesurfer as freesurfer import CPAC.alff.alff as cpac_alff import CPAC.reho.reho as cpac_reho import CPAC.utils.utils as cpac_utils import CPAC.vmhc.vmhc as cpac_vmhc import CPAC.registration.registration as cpac_registration import CPAC.network_centrality.z_score as cpac_centrality_z_score import utils as calc_metrics_utils # INPUT PARAMETERS dicom_dir = cfg['dicom_dir'] preprocessed_data_dir = cfg['preprocessed_data_dir'] working_dir = cfg['working_dir'] freesurfer_dir = cfg['freesurfer_dir'] template_dir = cfg['template_dir'] script_dir = cfg['script_dir'] ds_dir = cfg['ds_dir'] subject_id = cfg['subject_id'] TR_list = cfg['TR_list'] vols_to_drop = cfg['vols_to_drop'] rois_list = cfg['rois_list'] lp_cutoff_freq = cfg['lp_cutoff_freq'] hp_cutoff_freq = cfg['hp_cutoff_freq'] use_fs_brainmask = cfg['use_fs_brainmask'] use_n_procs = cfg['use_n_procs'] plugin_name = cfg['plugin_name'] ##################################### # GENERAL SETTINGS ##################################### fsl.FSLCommand.set_default_output_type('NIFTI_GZ') freesurfer.FSCommand.set_default_subjects_dir(freesurfer_dir) wf = Workflow(name='LeiCA_metrics') wf.base_dir = os.path.join(working_dir) nipype_cfg = dict(logging=dict(workflow_level='DEBUG'), execution={'stop_on_first_crash': True, 'remove_unnecessary_outputs': True, 'job_finished_timeout': 120}) config.update_config(nipype_cfg) wf.config['execution']['crashdump_dir'] = os.path.join(working_dir, 'crash') ds = Node(nio.DataSink(base_directory=ds_dir), name='ds') ds.inputs.substitutions = [('_TR_id_', 'TR_')] ds.inputs.regexp_substitutions = [('_variabilty_MNIspace_3mm[0-9]*/', ''), ('_z_score[0-9]*/', '')] ##################################### # SET ITERATORS ##################################### # GET SCAN TR_ID ITERATOR scan_infosource = Node(util.IdentityInterface(fields=['TR_id']), name='scan_infosource') scan_infosource.iterables = ('TR_id', TR_list) # get atlas data templates_atlases = { # 'GM_mask_MNI_2mm': 'SPM_GM/SPM_GM_mask_2mm.nii.gz', # 'GM_mask_MNI_3mm': 'SPM_GM/SPM_GM_mask_3mm.nii.gz', 'FSL_MNI_3mm_template': 'MNI152_T1_3mm_brain.nii.gz', 'vmhc_symm_brain': 'cpac_image_resources/symmetric/MNI152_T1_2mm_brain_symmetric.nii.gz', 'vmhc_symm_brain_3mm': 'cpac_image_resources/symmetric/MNI152_T1_3mm_brain_symmetric.nii.gz', 'vmhc_symm_skull': 'cpac_image_resources/symmetric/MNI152_T1_2mm_symmetric.nii.gz', 'vmhc_symm_brain_mask_dil': 'cpac_image_resources/symmetric/MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz', 'vmhc_config_file_2mm': 'cpac_image_resources/symmetric/T1_2_MNI152_2mm_symmetric.cnf' } selectfiles_anat_templates = Node(nio.SelectFiles(templates_atlases, base_directory=template_dir), name="selectfiles_anat_templates") # GET SUBJECT SPECIFIC FUNCTIONAL AND STRUCTURAL DATA selectfiles_templates = { 'epi_2_MNI_warp': '{subject_id}/rsfMRI_preprocessing/registration/epi_2_MNI_warp/TR_{TR_id}/*.nii.gz', 'epi_mask': '{subject_id}/rsfMRI_preprocessing/masks/brain_mask_epiSpace/TR_{TR_id}/*.nii.gz', 'preproc_epi_full_spectrum': '{subject_id}/rsfMRI_preprocessing/epis/01_denoised/TR_{TR_id}/*.nii.gz', 'preproc_epi_bp': '{subject_id}/rsfMRI_preprocessing/epis/02_denoised_BP/TR_{TR_id}/*.nii.gz', 'preproc_epi_bp_tNorm': '{subject_id}/rsfMRI_preprocessing/epis/03_denoised_BP_tNorm/TR_{TR_id}/*.nii.gz', 'epi_2_struct_mat': '{subject_id}/rsfMRI_preprocessing/registration/epi_2_struct_mat/TR_{TR_id}/*.mat', 't1w': '{subject_id}/raw_niftis/sMRI/t1w_reoriented.nii.gz', 't1w_brain': '{subject_id}/rsfMRI_preprocessing/struct_prep/t1w_brain/t1w_reoriented_maths.nii.gz', } selectfiles = Node(nio.SelectFiles(selectfiles_templates, base_directory=preprocessed_data_dir), name="selectfiles") wf.connect(scan_infosource, 'TR_id', selectfiles, 'TR_id') selectfiles.inputs.subject_id = subject_id # CREATE TRANSFORMATIONS # creat MNI 2 epi warp MNI_2_epi_warp = Node(fsl.InvWarp(), name='MNI_2_epi_warp') MNI_2_epi_warp.inputs.reference = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz') wf.connect(selectfiles, 'epi_mask', MNI_2_epi_warp, 'reference') wf.connect(selectfiles, 'epi_2_MNI_warp', MNI_2_epi_warp, 'warp') # # CREATE GM MASK IN EPI SPACE # GM_mask_epiSpace = Node(fsl.ApplyWarp(), name='GM_mask_epiSpace') # GM_mask_epiSpace.inputs.out_file = 'GM_mask_epiSpace.nii.gz' # # wf.connect(selectfiles_anat_templates, 'GM_mask_MNI_2mm', GM_mask_epiSpace, 'in_file') # wf.connect(selectfiles, 'epi_mask', GM_mask_epiSpace, 'ref_file') # wf.connect(MNI_2_epi_warp, 'inverse_warp', GM_mask_epiSpace, 'field_file') # wf.connect(GM_mask_epiSpace, 'out_file', ds, 'GM_mask_epiSpace') # fixme # # CREATE TS IN MNI SPACE # # is it ok to apply the 2mm warpfield to the 3mm template? # # seems ok: https://www.jiscmail.ac.uk/cgi-bin/webadmin?A2=ind0904&L=FSL&P=R14011&1=FSL&9=A&J=on&d=No+Match%3BMatch%3BMatches&z=4 # epi_bp_MNIspace_3mm = Node(fsl.ApplyWarp(), name='epi_bp_MNIspace_3mm') # epi_bp_MNIspace_3mm.inputs.interp = 'spline' # epi_bp_MNIspace_3mm.plugin_args = {'submit_specs': 'request_memory = 4000'} # wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', epi_bp_MNIspace_3mm, 'ref_file') # wf.connect(selectfiles, 'preproc_epi_bp', epi_bp_MNIspace_3mm, 'in_file') # wf.connect(selectfiles, 'epi_2_MNI_warp', epi_bp_MNIspace_3mm, 'field_file') # CREATE EPI MASK IN MNI SPACE epi_mask_MNIspace_3mm = Node(fsl.ApplyWarp(), name='epi_mask_MNIspace_3mm') epi_mask_MNIspace_3mm.inputs.interp = 'nn' epi_mask_MNIspace_3mm.plugin_args = {'submit_specs': 'request_memory = 4000'} wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', epi_mask_MNIspace_3mm, 'ref_file') wf.connect(selectfiles, 'epi_mask', epi_mask_MNIspace_3mm, 'in_file') wf.connect(selectfiles, 'epi_2_MNI_warp', epi_mask_MNIspace_3mm, 'field_file') wf.connect(epi_mask_MNIspace_3mm, 'out_file', ds, 'epi_mask_MNIspace_3mm') ##################### # CALCULATE METRICS ##################### # f/ALFF alff = cpac_alff.create_alff('alff') alff.inputs.hp_input.hp = 0.01 alff.inputs.lp_input.lp = 0.1 wf.connect(selectfiles, 'preproc_epi_full_spectrum', alff, 'inputspec.rest_res') # wf.connect(GM_mask_epiSpace, 'out_file', alff, 'inputspec.rest_mask') wf.connect(selectfiles, 'epi_mask', alff, 'inputspec.rest_mask') wf.connect(alff, 'outputspec.alff_img', ds, 'alff.alff') wf.connect(alff, 'outputspec.falff_img', ds, 'alff.falff') # f/ALFF 2 MNI # fixme spline or default? alff_MNIspace_3mm = Node(fsl.ApplyWarp(), name='alff_MNIspace_3mm') alff_MNIspace_3mm.inputs.interp = 'spline' alff_MNIspace_3mm.plugin_args = {'submit_specs': 'request_memory = 4000'} wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', alff_MNIspace_3mm, 'ref_file') wf.connect(alff, 'outputspec.alff_img', alff_MNIspace_3mm, 'in_file') wf.connect(selectfiles, 'epi_2_MNI_warp', alff_MNIspace_3mm, 'field_file') wf.connect(alff_MNIspace_3mm, 'out_file', ds, 'alff.alff_MNI_3mm') falff_MNIspace_3mm = Node(fsl.ApplyWarp(), name='falff_MNIspace_3mm') falff_MNIspace_3mm.inputs.interp = 'spline' falff_MNIspace_3mm.plugin_args = {'submit_specs': 'request_memory = 4000'} wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', falff_MNIspace_3mm, 'ref_file') wf.connect(alff, 'outputspec.falff_img', falff_MNIspace_3mm, 'in_file') wf.connect(selectfiles, 'epi_2_MNI_warp', falff_MNIspace_3mm, 'field_file') wf.connect(falff_MNIspace_3mm, 'out_file', ds, 'alff.falff_MNI_3mm') # f/ALFF_MNI Z-SCORE alff_MNIspace_3mm_Z = cpac_utils.get_zscore(input_name='alff_MNIspace_3mm', wf_name='alff_MNIspace_3mm_Z') wf.connect(alff_MNIspace_3mm, 'out_file', alff_MNIspace_3mm_Z, 'inputspec.input_file') # wf.connect(selectfiles_anat_templates, 'GM_mask_MNI_3mm', alff_MNIspace_3mm_Z, 'inputspec.mask_file') wf.connect(epi_mask_MNIspace_3mm, 'out_file', alff_MNIspace_3mm_Z, 'inputspec.mask_file') wf.connect(alff_MNIspace_3mm_Z, 'outputspec.z_score_img', ds, 'alff.alff_MNI_3mm_Z') falff_MNIspace_3mm_Z = cpac_utils.get_zscore(input_name='falff_MNIspace_3mm', wf_name='falff_MNIspace_3mm_Z') wf.connect(falff_MNIspace_3mm, 'out_file', falff_MNIspace_3mm_Z, 'inputspec.input_file') # wf.connect(selectfiles_anat_templates, 'GM_mask_MNI_3mm', falff_MNIspace_3mm_Z, 'inputspec.mask_file') wf.connect(epi_mask_MNIspace_3mm, 'out_file', falff_MNIspace_3mm_Z, 'inputspec.mask_file') wf.connect(falff_MNIspace_3mm_Z, 'outputspec.z_score_img', ds, 'alff.falff_MNI_3mm_Z') # f/ALFF_MNI STANDARDIZE BY MEAN alff_MNIspace_3mm_standardized_mean = calc_metrics_utils.standardize_divide_by_mean( wf_name='alff_MNIspace_3mm_standardized_mean') wf.connect(alff_MNIspace_3mm, 'out_file', alff_MNIspace_3mm_standardized_mean, 'inputnode.in_file') wf.connect(epi_mask_MNIspace_3mm, 'out_file', alff_MNIspace_3mm_standardized_mean, 'inputnode.mask_file') wf.connect(alff_MNIspace_3mm_standardized_mean, 'outputnode.out_file', ds, 'alff.alff_MNI_3mm_standardized_mean') falff_MNIspace_3mm_standardized_mean = calc_metrics_utils.standardize_divide_by_mean( wf_name='falff_MNIspace_3mm_standardized_mean') wf.connect(falff_MNIspace_3mm, 'out_file', falff_MNIspace_3mm_standardized_mean, 'inputnode.in_file') wf.connect(epi_mask_MNIspace_3mm, 'out_file', falff_MNIspace_3mm_standardized_mean, 'inputnode.mask_file') wf.connect(falff_MNIspace_3mm_standardized_mean, 'outputnode.out_file', ds, 'alff.falff_MNI_3mm_standardized_mean') # REHO reho = cpac_reho.create_reho() reho.inputs.inputspec.cluster_size = 27 wf.connect(selectfiles, 'preproc_epi_bp', reho, 'inputspec.rest_res_filt') # wf.connect(GM_mask_epiSpace, 'out_file', reho, 'inputspec.rest_mask') wf.connect(selectfiles, 'epi_mask', reho, 'inputspec.rest_mask') wf.connect(reho, 'outputspec.raw_reho_map', ds, 'reho.reho') # REHO 2 MNI # fixme spline or default? reho_MNIspace_3mm = Node(fsl.ApplyWarp(), name='reho_MNIspace_3mm') reho_MNIspace_3mm.inputs.interp = 'spline' reho_MNIspace_3mm.plugin_args = {'submit_specs': 'request_memory = 4000'} wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', reho_MNIspace_3mm, 'ref_file') wf.connect(reho, 'outputspec.raw_reho_map', reho_MNIspace_3mm, 'in_file') wf.connect(selectfiles, 'epi_2_MNI_warp', reho_MNIspace_3mm, 'field_file') wf.connect(reho_MNIspace_3mm, 'out_file', ds, 'reho.reho_MNI_3mm') # REHO_MNI Z-SCORE reho_MNIspace_3mm_Z = cpac_utils.get_zscore(input_name='reho_MNIspace_3mm', wf_name='reho_MNIspace_3mm_Z') wf.connect(alff_MNIspace_3mm, 'out_file', reho_MNIspace_3mm_Z, 'inputspec.input_file') # wf.connect(selectfiles_anat_templates, 'GM_mask_MNI_3mm', reho_MNIspace_3mm_Z, 'inputspec.mask_file') wf.connect(epi_mask_MNIspace_3mm, 'out_file', reho_MNIspace_3mm_Z, 'inputspec.mask_file') wf.connect(reho_MNIspace_3mm_Z, 'outputspec.z_score_img', ds, 'reho.reho_MNI_3mm_Z') # REHO_MNI STANDARDIZE BY MEAN reho_MNIspace_3mm_standardized_mean = calc_metrics_utils.standardize_divide_by_mean( wf_name='reho_MNIspace_3mm_standardized_mean') wf.connect(reho_MNIspace_3mm, 'out_file', reho_MNIspace_3mm_standardized_mean, 'inputnode.in_file') wf.connect(epi_mask_MNIspace_3mm, 'out_file', reho_MNIspace_3mm_standardized_mean, 'inputnode.mask_file') wf.connect(reho_MNIspace_3mm_standardized_mean, 'outputnode.out_file', ds, 'reho.reho_MNI_3mm_standardized_mean') # VMHC # create registration to symmetrical MNI template struct_2_MNI_symm = cpac_registration.create_nonlinear_register(name='struct_2_MNI_symm') wf.connect(selectfiles_anat_templates, 'vmhc_config_file_2mm', struct_2_MNI_symm, 'inputspec.fnirt_config') wf.connect(selectfiles_anat_templates, 'vmhc_symm_brain', struct_2_MNI_symm, 'inputspec.reference_brain') wf.connect(selectfiles_anat_templates, 'vmhc_symm_skull', struct_2_MNI_symm, 'inputspec.reference_skull') wf.connect(selectfiles_anat_templates, 'vmhc_symm_brain_mask_dil', struct_2_MNI_symm, 'inputspec.ref_mask') wf.connect(selectfiles, 't1w', struct_2_MNI_symm, 'inputspec.input_skull') wf.connect(selectfiles, 't1w_brain', struct_2_MNI_symm, 'inputspec.input_brain') wf.connect(struct_2_MNI_symm, 'outputspec.output_brain', ds, 'vmhc.symm_reg.@output_brain') wf.connect(struct_2_MNI_symm, 'outputspec.linear_xfm', ds, 'vmhc.symm_reg.@linear_xfm') wf.connect(struct_2_MNI_symm, 'outputspec.invlinear_xfm', ds, 'vmhc.symm_reg.@invlinear_xfm') wf.connect(struct_2_MNI_symm, 'outputspec.nonlinear_xfm', ds, 'vmhc.symm_reg.@nonlinear_xfm') # fixme vmhc = cpac_vmhc.create_vmhc(use_ants=False, name='vmhc') vmhc.inputs.fwhm_input.fwhm = 4 wf.connect(selectfiles_anat_templates, 'vmhc_symm_brain_3mm', vmhc, 'inputspec.standard_for_func') wf.connect(selectfiles, 'preproc_epi_bp_tNorm', vmhc, 'inputspec.rest_res') wf.connect(selectfiles, 'epi_2_struct_mat', vmhc, 'inputspec.example_func2highres_mat') wf.connect(struct_2_MNI_symm, 'outputspec.nonlinear_xfm', vmhc, 'inputspec.fnirt_nonlinear_warp') # wf.connect(GM_mask_epiSpace, 'out_file', vmhc, 'inputspec.rest_mask') wf.connect(selectfiles, 'epi_mask', vmhc, 'inputspec.rest_mask') wf.connect(vmhc, 'outputspec.rest_res_2symmstandard', ds, 'vmhc.rest_res_2symmstandard') wf.connect(vmhc, 'outputspec.VMHC_FWHM_img', ds, 'vmhc.VMHC_FWHM_img') wf.connect(vmhc, 'outputspec.VMHC_Z_FWHM_img', ds, 'vmhc.VMHC_Z_FWHM_img') wf.connect(vmhc, 'outputspec.VMHC_Z_stat_FWHM_img', ds, 'vmhc.VMHC_Z_stat_FWHM_img') # VARIABILITY SCORES variability = Node(util.Function(input_names=['in_file'], output_names=['out_file_list'], function=calc_metrics_utils.calc_variability), name='variability') wf.connect(selectfiles, 'preproc_epi_bp', variability, 'in_file') wf.connect(variability, 'out_file_list', ds, 'variability.subjectSpace.@out_files') # #fixme spline? variabilty_MNIspace_3mm = MapNode(fsl.ApplyWarp(), iterfield=['in_file'], name='variabilty_MNIspace_3mm') variabilty_MNIspace_3mm.inputs.interp = 'spline' variabilty_MNIspace_3mm.plugin_args = {'submit_specs': 'request_memory = 4000'} wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', variabilty_MNIspace_3mm, 'ref_file') wf.connect(selectfiles, 'epi_2_MNI_warp', variabilty_MNIspace_3mm, 'field_file') wf.connect(variability, 'out_file_list', variabilty_MNIspace_3mm, 'in_file') wf.connect(variabilty_MNIspace_3mm, 'out_file', ds, 'variability.MNI_3mm.@out_file') # CALC Z SCORE variabilty_MNIspace_3mm_Z = cpac_centrality_z_score.get_cent_zscore(wf_name='variabilty_MNIspace_3mm_Z') wf.connect(variabilty_MNIspace_3mm, 'out_file', variabilty_MNIspace_3mm_Z, 'inputspec.input_file') # wf.connect(selectfiles_anat_templates, 'GM_mask_MNI_3mm', variabilty_MNIspace_3mm_Z, 'inputspec.mask_file') wf.connect(epi_mask_MNIspace_3mm, 'out_file', variabilty_MNIspace_3mm_Z, 'inputspec.mask_file') wf.connect(variabilty_MNIspace_3mm_Z, 'outputspec.z_score_img', ds, 'variability.MNI_3mm_Z.@out_file') # STANDARDIZE BY MEAN variabilty_MNIspace_3mm_standardized_mean = calc_metrics_utils.standardize_divide_by_mean( wf_name='variabilty_MNIspace_3mm_standardized_mean') wf.connect(variabilty_MNIspace_3mm, 'out_file', variabilty_MNIspace_3mm_standardized_mean, 'inputnode.in_file') wf.connect(epi_mask_MNIspace_3mm, 'out_file', variabilty_MNIspace_3mm_standardized_mean, 'inputnode.mask_file') wf.connect(variabilty_MNIspace_3mm_standardized_mean, 'outputnode.out_file', ds, 'variability.MNI_3mm_standardized_mean.@out_file') wf.write_graph(dotfilename=wf.name, graph2use='colored', format='pdf') # 'hierarchical') wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf') wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf') if plugin_name == 'CondorDAGMan': wf.run(plugin=plugin_name) if plugin_name == 'MultiProc': wf.run(plugin=plugin_name, plugin_args={'n_procs': use_n_procs})
def create_struct_preproc_pipeline(working_dir, freesurfer_dir, ds_dir, use_fs_brainmask, name='struct_preproc'): """ """ # initiate workflow struct_preproc_wf = Workflow(name=name) struct_preproc_wf.base_dir = os.path.join(working_dir, 'LeiCA_resting', 'rsfMRI_preprocessing') # set fsl output fsl.FSLCommand.set_default_output_type('NIFTI_GZ') # inputnode inputnode = Node(util.IdentityInterface(fields=['t1w', 'subject_id']), name='inputnode') # outputnode outputnode = Node(util.IdentityInterface(fields=[ 't1w_brain', 'struct_brain_mask', 'fast_partial_volume_files', 'wm_mask', 'csf_mask', 'wm_mask_4_bbr', 'gm_mask' ]), name='outputnode') ds = Node(nio.DataSink(base_directory=ds_dir), name='ds') ds.inputs.substitutions = [('_TR_id_', 'TR_')] # CREATE BRAIN MASK if use_fs_brainmask: # brainmask with fs fs_source = Node(interface=nio.FreeSurferSource(), name='fs_source') fs_source.inputs.subjects_dir = freesurfer_dir struct_preproc_wf.connect(inputnode, 'subject_id', fs_source, 'subject_id') # get aparc+aseg from list def get_aparc_aseg(files): for name in files: if 'aparc+aseg' in name: return name aseg = Node(fs.MRIConvert(out_type='niigz', out_file='aseg.nii.gz'), name='aseg') struct_preproc_wf.connect(fs_source, ('aparc_aseg', get_aparc_aseg), aseg, 'in_file') fs_brainmask = Node( fs.Binarize( min=0.5, #dilate=1, out_type='nii.gz'), name='fs_brainmask') struct_preproc_wf.connect(aseg, 'out_file', fs_brainmask, 'in_file') # fill holes in mask, smooth, rebinarize fillholes = Node(fsl.maths.MathsCommand( args='-fillh -s 3 -thr 0.1 -bin', out_file='T1_brain_mask.nii.gz'), name='fillholes') struct_preproc_wf.connect(fs_brainmask, 'binary_file', fillholes, 'in_file') fs_2_struct_mat = Node(util.Function( input_names=['moving_image', 'target_image'], output_names=['fsl_file'], function=tkregister2_fct), name='fs_2_struct_mat') struct_preproc_wf.connect([(fs_source, fs_2_struct_mat, [('T1', 'moving_image'), ('rawavg', 'target_image')])]) struct_brain_mask = Node(fsl.ApplyXfm(interp='nearestneighbour'), name='struct_brain_mask_fs') struct_preproc_wf.connect(fillholes, 'out_file', struct_brain_mask, 'in_file') struct_preproc_wf.connect(inputnode, 't1w', struct_brain_mask, 'reference') struct_preproc_wf.connect(fs_2_struct_mat, 'fsl_file', struct_brain_mask, 'in_matrix_file') struct_preproc_wf.connect(struct_brain_mask, 'out_file', outputnode, 'struct_brain_mask') struct_preproc_wf.connect(struct_brain_mask, 'out_file', ds, 'struct_prep.struct_brain_mask') # multiply t1w with fs brain mask t1w_brain = Node(fsl.maths.BinaryMaths(operation='mul'), name='t1w_brain') struct_preproc_wf.connect(inputnode, 't1w', t1w_brain, 'in_file') struct_preproc_wf.connect(struct_brain_mask, 'out_file', t1w_brain, 'operand_file') struct_preproc_wf.connect(t1w_brain, 'out_file', outputnode, 't1w_brain') struct_preproc_wf.connect(t1w_brain, 'out_file', ds, 'struct_prep.t1w_brain') else: # use bet t1w_brain = Node(fsl.BET(mask=True, outline=True, surfaces=True), name='t1w_brain') struct_preproc_wf.connect(inputnode, 't1w', t1w_brain, 'in_file') struct_preproc_wf.connect(t1w_brain, 'out_file', outputnode, 't1w_brain') def struct_brain_mask_bet_fct(in_file): return in_file struct_brain_mask = Node(util.Function( input_names=['in_file'], output_names=['out_file'], function=struct_brain_mask_bet_fct), name='struct_brain_mask') struct_preproc_wf.connect(t1w_brain, 'mask_file', struct_brain_mask, 'in_file') struct_preproc_wf.connect(struct_brain_mask, 'out_file', outputnode, 'struct_brain_mask') struct_preproc_wf.connect(struct_brain_mask, 'out_file', ds, 'struct_prep.struct_brain_mask') # SEGMENTATION WITH FAST fast = Node(fsl.FAST(), name='fast') struct_preproc_wf.connect(t1w_brain, 'out_file', fast, 'in_files') struct_preproc_wf.connect(fast, 'partial_volume_files', outputnode, 'fast_partial_volume_files') struct_preproc_wf.connect(fast, 'partial_volume_files', ds, 'struct_prep.fast') # functions to select tissue classes def selectindex(files, idx): import numpy as np from nipype.utils.filemanip import filename_to_list, list_to_filename return list_to_filename( np.array(filename_to_list(files))[idx].tolist()) def selectsingle(files, idx): return files[idx] # pve0: CSF # pve1: GM # pve2: WM # binarize tissue classes binarize_tissue = MapNode( fsl.ImageMaths(op_string='-nan -thr 0.99 -ero -bin'), iterfield=['in_file'], name='binarize_tissue') struct_preproc_wf.connect(fast, ('partial_volume_files', selectindex, [0, 2]), binarize_tissue, 'in_file') # OUTPUT WM AND CSF MASKS FOR CPAC DENOISING struct_preproc_wf.connect([(binarize_tissue, outputnode, [(('out_file', selectsingle, 0), 'csf_mask'), (('out_file', selectsingle, 1), 'wm_mask')])]) # WRITE WM MASK WITH P > .5 FOR FSL BBR # use threshold of .5 like FSL's epi_reg script wm_mask_4_bbr = Node(fsl.ImageMaths(op_string='-thr 0.5 -bin'), name='wm_mask_4_bbr') struct_preproc_wf.connect(fast, ('partial_volume_files', selectindex, [2]), wm_mask_4_bbr, 'in_file') struct_preproc_wf.connect(wm_mask_4_bbr, 'out_file', outputnode, 'wm_mask_4_bbr') struct_preproc_wf.write_graph(dotfilename=struct_preproc_wf.name, graph2use='flat', format='pdf') return struct_preproc_wf
base_directory='/', template='%s/%s', template_args=info, sort_filelist=True), name='selectfiles') # For merging seed and nuisance mask paths and then distributing them downstream seed_plus_nuisance = Node(utilMerge(2), name='seed_plus_nuisance') seed_plus_nuisance.inputs.in2 = nuisance_masks # 1. Obtain timeseries for seed and nuisance variables # 1a. Merge all 3D functional images into a single 4D image merge = Node(Merge(dimension='t', output_type='NIFTI', tr=TR), name='merge') # 1b. Take mean of all voxels in each roi at each timepoint ts = MapNode(ImageMeants(), name='ts', iterfield=['mask']) # 1c. - Merge nuisance ts with motion parameters to create nuisance_regressors.txt. # - Take temporal derivatives of nuisance_regressors.txt and append to nuisance_regressors.txt # to create nuisance_regressors_tempderiv.txt # - Square nuisance_regressors_tempderiv.txt and append to nuisance_regressors_tempderiv.txt, # then append seed timeseries in front to create seed_nuisance_regressors.txt def make_regressors_files(regressors_ts_list, mot_params, func): import numpy as np import os num_timepoints = len(func) num_nuisance = len(regressors_ts_list) - 1 # make nuisance_regressors.txt nr = np.zeros((num_timepoints, num_nuisance))
def create_moco_pipeline(name='motion_correction'): # initiate workflow moco = Workflow(name='motion_correction') # set fsl output fsl.FSLCommand.set_default_output_type('NIFTI_GZ') # inputnode inputnode = Node(util.IdentityInterface(fields=['epi']), name='inputnode') # outputnode outputnode = Node(util.IdentityInterface(fields=['epi_moco', 'par_moco', 'mat_moco', 'rms_moco', 'epi_mean', 'rotplot', 'transplot', 'dispplots', 'tsnr_file']), name='outputnode') # mcflirt motion correction to 1st volume mcflirt = Node(fsl.MCFLIRT(save_mats=True, save_plots=True, save_rms=True, #ref_vol=1, mean_vol = True, out_file='rest_realigned.nii.gz' ), name='mcflirt') # plot motion parameters rotplotter = Node(fsl.PlotMotionParams(in_source='fsl', plot_type='rotations', out_file='rotation_plot.png'), name='rotplotter') transplotter = Node(fsl.PlotMotionParams(in_source='fsl', plot_type='translations', out_file='translation_plot.png'), name='transplotter') dispplotter = MapNode(interface=fsl.PlotMotionParams(in_source='fsl', plot_type='displacement', ), name='dispplotter', iterfield=['in_file']) dispplotter.iterables = ('plot_type', ['displacement']) # calculate tmean tmean = Node(fsl.maths.MeanImage(dimension='T', out_file='rest_realigned_mean.nii.gz'), name='tmean') # calculate tsnr tsnr = Node(misc.TSNR(), name='tsnr') # create connections moco.connect([(inputnode, mcflirt, [('epi', 'in_file')]), (mcflirt, tmean, [('out_file', 'in_file')]), (mcflirt, rotplotter, [('par_file', 'in_file')]), (mcflirt, transplotter, [('par_file', 'in_file')]), (mcflirt, dispplotter, [('rms_files', 'in_file')]), (tmean, outputnode, [('out_file', 'epi_mean')]), (mcflirt, outputnode, [('out_file', 'epi_moco'), ('par_file', 'par_moco'), ('mat_file', 'mat_moco'), ('rms_files', 'rms_moco')]), (rotplotter, outputnode, [('out_file', 'rotplot')]), (transplotter, outputnode, [('out_file', 'transplot')]), (dispplotter, outputnode, [('out_file', 'dispplots')]), (mcflirt, tsnr, [('out_file', 'in_file')]), (tsnr, outputnode, [('tsnr_file', 'tsnr_file')]) ]) return moco
susan.get_node('meanfunc2').interface.num_threads = 1 susan.get_node('meanfunc2').interface.mem_gb = 3 susan.get_node('merge').interface.num_threads = 1 susan.get_node('merge').interface.mem_gb = 3 susan.get_node('multi_inputs').interface.num_threads = 1 susan.get_node('multi_inputs').interface.mem_gb = 3 susan.get_node('smooth').interface.num_threads = 1 susan.get_node('smooth').interface.mem_gb = 3 susan.get_node('outputnode').interface.num_threads = 1 susan.get_node('outputnode').interface.mem_gb = 0.1 # ====================================================================== # DEFINE NODE: FUNCTION TO GET THE SUBJECT-SPECIFIC INFORMATION # ====================================================================== subject_info = MapNode(Function(input_names=['events', 'confounds'], output_names=['subject_info', 'event_names'], function=get_subject_info), name='subject_info', iterfield=['events', 'confounds']) # set expected thread and memory usage for the node: subject_info.interface.num_threads = 1 subject_info.interface.mem_gb = 0.1 # subject_info.inputs.events = selectfiles_results.outputs.events # subject_info.inputs.confounds = selectfiles_results.outputs.confounds # subject_info_results = subject_info.run() # subject_info_results.outputs.subject_info # ====================================================================== # DEFINE NODE: REMOVE DUMMY VARIABLES (USING FSL ROI) # ====================================================================== # function: extract region of interest (ROI) from an image trim = MapNode(ExtractROI(), name='trim', iterfield=['in_file']) # define index of the first selected volume (i.e., minimum index):
data[global_mask] = filter_data.T else: filter_data = np.real( np.fft.ifftn(np.fft.fftn(filter_data) * F[:, np.newaxis])) data[global_mask] = filter_data.T img_out = nb.Nifti1Image(data, img.get_affine(), img.get_header()) out_file = os.path.join(os.getcwd(), 'bp_' + in_file.split('/')[-1]) img_out.to_filename(out_file) return (out_file) bpfilter = MapNode(Function(function=bandpass_filter, input_names=['in_file', 'brainmask'], output_names=['out_file']), iterfield='in_file', name='bpfilter') def get_ants_files(ants_output): """ Gets output from ANTs to pass to normalising all the things. """ trans = [ants_output[0], ants_output[1]] return (trans) ants_list = Node(Function(function=get_ants_files, input_names=['ants_output'], output_names=['trans']),
def test_mapnode(config, moving_image, fixed_image): import nipype.interfaces.fsl as fsl from nipype.pipeline.engine import Node, Workflow, MapNode from nipype.interfaces.io import DataSink, DataGrabber from nipype.interfaces.utility import IdentityInterface, Function import os moving_mse = get_mseid(moving_image[0]) fixed_mse = get_mseid(fixed_image[0]) print(moving_mse, fixed_mse) seriesNum_moving = get_seriesnum(moving_image) seriesNum_fixed = get_seriesnum(fixed_image) print("seriesNum for moving and fixed are {}, {} respectively".format(seriesNum_moving, seriesNum_fixed)) register = Workflow(name="test_mapnode") register.base_dir = config["working_directory"] inputnode = Node(IdentityInterface(fields=["moving_image", "fixed_image"]), name="inputspec") inputnode.inputs.moving_image = moving_image inputnode.inputs.fixed_image = fixed_image check_len = Node(Function(input_names=["moving_image", "fixed_image"], output_names=["new_moving_image", "new_fixed_image"], function=check_length), name="check_len") register.connect(inputnode, 'moving_image', check_len, 'moving_image') register.connect(inputnode, 'fixed_image', check_len, 'fixed_image') flt_rigid = MapNode(fsl.FLIRT(), iterfield=['in_file', 'reference'], name="FLIRT_RIGID") flt_rigid.inputs.dof = 6 flt_rigid.output_type = 'NIFTI_GZ' register.connect(check_len, 'new_moving_image', flt_rigid, 'in_file') register.connect(check_len, 'new_fixed_image', flt_rigid, 'reference') sinker = Node(DataSink(), name="DataSink") sinker.inputs.base_directory = '/data/henry7/james' sinker.inputs.container = 'test_mapnode' """ def getsubs(moving_image, fixed_image, moving_mse, fixed_mse, seriesNum_moving, seriesNum_fixed): N = len(moving_image) * len(fixed_image) subs = [] print("N is :" ,N) for i in range(N): for j in seriesNum_moving: seri_moving = '' if j != '': seri_moving = '_' + j for k in seriesNum_fixed: seri_fixed = '' if k != '': seri_fixed = '_' + k subs += [('_FLIRT_RIGID%d'%i, moving_mse + seri_moving + '__' + fixed_mse + seri_fixed)] print("subs are: ", subs) return subs """ def getsubs(moving_image, fixed_image, moving_mse, fixed_mse): N = len(moving_image) * len(fixed_image) subs = [('_flirt', '_trans')] if N == 1: subs += [('_FLIRT_RIGID%d'%0, moving_mse + '__' + fixed_mse)] else: for i in range(N): subs += [('_FLIRT_RIGID%d'%i, moving_mse + '__' + fixed_mse + '_' + str(i+1))] return subs get_subs = Node(Function(input_names=["moving_image", "fixed_image", "moving_mse", "fixed_mse"], output_names=["subs"], function=getsubs), name="get_subs") get_subs.inputs.moving_mse = moving_mse get_subs.inputs.fixed_mse = fixed_mse # get_subs.inputs.seriesNum_moving = seriesNum_moving # get_subs.inputs.seriesNum_fixed = seriesNum_fixed register.connect(inputnode, 'moving_image', get_subs, 'moving_image') register.connect(inputnode, 'fixed_image', get_subs, "fixed_image") register.connect(get_subs, 'subs', sinker, 'substitutions') register.connect(flt_rigid, 'out_file', sinker, '@mapnode_out') register.write_graph(graph2use='orig') register.config["Execution"] = {"keep_inputs": True, "remove_unnecessary_outputs": False} return register
'7tt2w': 'derivatives/preprocessing/{subject_id}/{subject_id}_ses-01_7T_T2w_NlinMoCo_res-iso.3_N4corrected_denoised_brain_preproc.nii.gz', } selectfiles = Node(SelectFiles(templates, base_directory=experiment_dir), name='selectfiles') #PLAN:: antsBE+ n4, mult mask to flair and space > n4 flair+ space, > min mask with fsl > WM mask? > scale to MNI > fslmaths (div) #PLAN 7T: min mask with fsl? > wm mask > scale to CC > fslmaths (div) #should scale to the CC for tse as well? wf.connect([(infosource, selectfiles, [('subject_id', 'subject_id')])]) wf.connect([(infosource, selectfiles, [('session_id', 'session_id')])]) ########### ## flirt ## ########### #3t flirt_n_space = MapNode(fsl.FLIRT(cost_func='mutualinfo', uses_qform=True), name='flirt_node_space', iterfield=['in_file']) wf.connect([(selectfiles, flirt_n_space, [('t1w', 'reference')])]) wf.connect([(selectfiles, flirt_n_space, [('space', 'in_file')])]) flirt_n_flair = MapNode(fsl.FLIRT(cost_func='mutualinfo', uses_qform=True), name='flirt_node_flair', iterfield=['in_file']) wf.connect([(selectfiles, flirt_n_flair, [('t1w', 'reference')])]) wf.connect([(selectfiles, flirt_n_flair, [('flair', 'in_file')])]) #################### ## ants_brain_ext ## #################### ants_be_n = MapNode(BrainExtraction(dimension=3, brain_template='/data/fasttemp/uqtshaw/tomcat/data/derivatives/myelin_mapping/T_template.nii.gz', brain_probability_mask='/data/fasttemp/uqtshaw/tomcat/data/derivatives/myelin_mapping/T_template_BrainCerebellumProbabilityMask.nii.gz'), name='ants_be_node', iterfield=['anatomical_image']) wf.connect([(selectfiles, ants_be_n, [('t1w', 'anatomical_image')])]) ############ ## antsCT ## ############
def firstlevel_wf(subject_id, sink_directory, name='wmaze_frstlvl_wf'): frstlvl_wf = Workflow(name='frstlvl_wf') info = dict( task_mri_files=[['subject_id', 'wmaze']], #dictionary used in datasource motion_noise_files=[['subject_id']]) #function node to call subjectinfo function with name, onset, duration, and amplitude info subject_info = Node(Function(input_names=['subject_id'], output_names=['output'], function=subjectinfo), name='subject_info') subject_info.inputs.ignore_exception = False subject_info.inputs.subject_id = subject_id #function node to define contrasts getcontrasts = Node(Function(input_names=['subject_id', 'info'], output_names=['contrasts'], function=get_contrasts), name='getcontrasts') getcontrasts.inputs.ignore_exception = False getcontrasts.inputs.subject_id = subject_id frstlvl_wf.connect(subject_info, 'output', getcontrasts, 'info') #function node to substitute names of folders and files created during pipeline getsubs = Node( Function( input_names=['cons'], output_names=['subs'], # Calls the function 'get_subs' function=get_subs), name='getsubs') getsubs.inputs.ignore_exception = False getsubs.inputs.subject_id = subject_id frstlvl_wf.connect(subject_info, 'output', getsubs, 'info') frstlvl_wf.connect(getcontrasts, 'contrasts', getsubs, 'cons') #datasource node to get task_mri and motion-noise files datasource = Node(DataGrabber(infields=['subject_id'], outfields=info.keys()), name='datasource') datasource.inputs.template = '*' datasource.inputs.subject_id = subject_id datasource.inputs.base_directory = os.path.abspath( '/home/data/madlab/data/mri/wmaze/preproc/') datasource.inputs.field_template = dict( task_mri_files= '%s/func/smoothed_fullspectrum/_maskfunc2*/*%s*.nii.gz', #functional files motion_noise_files='%s/noise/filter_regressor??.txt' ) #filter regressor noise files datasource.inputs.template_args = info datasource.inputs.sort_filelist = True datasource.inputs.ignore_exception = False datasource.inputs.raise_on_empty = True #function node to remove last three volumes from functional data fslroi_epi = MapNode( ExtractROI(t_min=0, t_size=197), #start from first volume and end on -3 iterfield=['in_file'], name='fslroi_epi') fslroi_epi.output_type = 'NIFTI_GZ' fslroi_epi.terminal_output = 'stream' frstlvl_wf.connect(datasource, 'task_mri_files', fslroi_epi, 'in_file') #function node to modify the motion and noise files to be single regressors motionnoise = Node(Function(input_names=['subjinfo', 'files'], output_names=['subjinfo'], function=motion_noise), name='motionnoise') motionnoise.inputs.ignore_exception = False frstlvl_wf.connect(subject_info, 'output', motionnoise, 'subjinfo') frstlvl_wf.connect(datasource, 'motion_noise_files', motionnoise, 'files') #node to create model specifications compatible with spm/fsl designers (requires subjectinfo to be received in the form of a Bunch) specify_model = Node(SpecifyModel(), name='specify_model') specify_model.inputs.high_pass_filter_cutoff = -1.0 #high-pass filter cutoff in seconds specify_model.inputs.ignore_exception = False specify_model.inputs.input_units = 'secs' #input units in either 'secs' or 'scans' specify_model.inputs.time_repetition = 2.0 #TR frstlvl_wf.connect( fslroi_epi, 'roi_file', specify_model, 'functional_runs') #editted data files for model -- list of 4D files #list of event description files in 3 column format corresponding to onsets, durations, and amplitudes frstlvl_wf.connect(motionnoise, 'subjinfo', specify_model, 'subject_info') #node for basic interface class generating identity mappings modelfit_inputspec = Node(IdentityInterface(fields=[ 'session_info', 'interscan_interval', 'contrasts', 'film_threshold', 'functional_data', 'bases', 'model_serial_correlations' ], mandatory_inputs=True), name='modelfit_inputspec') modelfit_inputspec.inputs.bases = {'dgamma': {'derivs': False}} modelfit_inputspec.inputs.film_threshold = 0.0 modelfit_inputspec.inputs.interscan_interval = 2.0 modelfit_inputspec.inputs.model_serial_correlations = True frstlvl_wf.connect(fslroi_epi, 'roi_file', modelfit_inputspec, 'functional_data') frstlvl_wf.connect(getcontrasts, 'contrasts', modelfit_inputspec, 'contrasts') frstlvl_wf.connect(specify_model, 'session_info', modelfit_inputspec, 'session_info') #node for first level SPM design matrix to demonstrate contrasts and motion/noise regressors level1_design = MapNode(Level1Design(), iterfield=['contrasts', 'session_info'], name='level1_design') level1_design.inputs.ignore_exception = False frstlvl_wf.connect(modelfit_inputspec, 'interscan_interval', level1_design, 'interscan_interval') frstlvl_wf.connect(modelfit_inputspec, 'session_info', level1_design, 'session_info') frstlvl_wf.connect(modelfit_inputspec, 'contrasts', level1_design, 'contrasts') frstlvl_wf.connect(modelfit_inputspec, 'bases', level1_design, 'bases') frstlvl_wf.connect(modelfit_inputspec, 'model_serial_correlations', level1_design, 'model_serial_correlations') #MapNode to generate a design.mat file for each run generate_model = MapNode(FEATModel(), iterfield=['fsf_file', 'ev_files'], name='generate_model') generate_model.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'} generate_model.inputs.ignore_exception = False generate_model.inputs.output_type = 'NIFTI_GZ' generate_model.inputs.terminal_output = 'stream' frstlvl_wf.connect(level1_design, 'fsf_files', generate_model, 'fsf_file') frstlvl_wf.connect(level1_design, 'ev_files', generate_model, 'ev_files') #MapNode to estimate the model using FILMGLS -- fits the design matrix to the voxel timeseries estimate_model = MapNode(FILMGLS(), iterfield=['design_file', 'in_file', 'tcon_file'], name='estimate_model') estimate_model.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'} estimate_model.inputs.ignore_exception = False estimate_model.inputs.mask_size = 5 #Susan-smooth mask size estimate_model.inputs.output_type = 'NIFTI_GZ' estimate_model.inputs.results_dir = 'results' estimate_model.inputs.smooth_autocorr = True #smooth auto-correlation estimates estimate_model.inputs.terminal_output = 'stream' frstlvl_wf.connect(modelfit_inputspec, 'film_threshold', estimate_model, 'threshold') frstlvl_wf.connect(modelfit_inputspec, 'functional_data', estimate_model, 'in_file') frstlvl_wf.connect( generate_model, 'design_file', estimate_model, 'design_file') #mat file containing ascii matrix for design frstlvl_wf.connect(generate_model, 'con_file', estimate_model, 'tcon_file') #contrast file containing contrast vectors #merge node to merge the contrasts - necessary for fsl 5.0.7 and greater merge_contrasts = MapNode(Merge(2), iterfield=['in1'], name='merge_contrasts') frstlvl_wf.connect(estimate_model, 'zstats', merge_contrasts, 'in1') #MapNode to transform the z2pval z2pval = MapNode(ImageMaths(), iterfield=['in_file'], name='z2pval') z2pval.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'} z2pval.inputs.ignore_exception = False z2pval.inputs.op_string = '-ztop' #defines the operation used z2pval.inputs.output_type = 'NIFTI_GZ' z2pval.inputs.suffix = '_pval' z2pval.inputs.terminal_output = 'stream' frstlvl_wf.connect(merge_contrasts, ('out', pop_lambda), z2pval, 'in_file') #outputspec node using IdentityInterface() to receive information from estimate_model, merge_contrasts, z2pval, generate_model, and estimate_model modelfit_outputspec = Node(IdentityInterface(fields=[ 'copes', 'varcopes', 'dof_file', 'pfiles', 'parameter_estimates', 'zstats', 'design_image', 'design_file', 'design_cov', 'sigmasquareds' ], mandatory_inputs=True), name='modelfit_outputspec') frstlvl_wf.connect(estimate_model, 'copes', modelfit_outputspec, 'copes') #lvl1 cope files frstlvl_wf.connect(estimate_model, 'varcopes', modelfit_outputspec, 'varcopes') #lvl1 varcope files frstlvl_wf.connect(merge_contrasts, 'out', modelfit_outputspec, 'zstats') #zstats across runs frstlvl_wf.connect(z2pval, 'out_file', modelfit_outputspec, 'pfiles') frstlvl_wf.connect( generate_model, 'design_image', modelfit_outputspec, 'design_image') #graphical representation of design matrix frstlvl_wf.connect( generate_model, 'design_file', modelfit_outputspec, 'design_file') #mat file containing ascii matrix for design frstlvl_wf.connect( generate_model, 'design_cov', modelfit_outputspec, 'design_cov') #graphical representation of design covariance frstlvl_wf.connect(estimate_model, 'param_estimates', modelfit_outputspec, 'parameter_estimates' ) #parameter estimates for columns of design matrix frstlvl_wf.connect(estimate_model, 'dof_file', modelfit_outputspec, 'dof_file') #degrees of freedom frstlvl_wf.connect(estimate_model, 'sigmasquareds', modelfit_outputspec, 'sigmasquareds') #summary of residuals #datasink node to save output from multiple points in the pipeline sinkd = MapNode(DataSink(), iterfield=[ 'substitutions', 'modelfit.contrasts.@copes', 'modelfit.contrasts.@varcopes', 'modelfit.estimates', 'modelfit.contrasts.@zstats' ], name='sinkd') sinkd.inputs.base_directory = sink_directory sinkd.inputs.container = subject_id frstlvl_wf.connect(getsubs, 'subs', sinkd, 'substitutions') frstlvl_wf.connect(modelfit_outputspec, 'parameter_estimates', sinkd, 'modelfit.estimates') frstlvl_wf.connect(modelfit_outputspec, 'sigmasquareds', sinkd, 'modelfit.estimates.@sigsq') frstlvl_wf.connect(modelfit_outputspec, 'dof_file', sinkd, 'modelfit.dofs') frstlvl_wf.connect(modelfit_outputspec, 'copes', sinkd, 'modelfit.contrasts.@copes') frstlvl_wf.connect(modelfit_outputspec, 'varcopes', sinkd, 'modelfit.contrasts.@varcopes') frstlvl_wf.connect(modelfit_outputspec, 'zstats', sinkd, 'modelfit.contrasts.@zstats') frstlvl_wf.connect(modelfit_outputspec, 'design_image', sinkd, 'modelfit.design') frstlvl_wf.connect(modelfit_outputspec, 'design_cov', sinkd, 'modelfit.design.@cov') frstlvl_wf.connect(modelfit_outputspec, 'design_file', sinkd, 'modelfit.design.@matrix') frstlvl_wf.connect(modelfit_outputspec, 'pfiles', sinkd, 'modelfit.contrasts.@pstats') return frstlvl_wf
'fsaverage' ] # name of the surface subject/space the to be transformed ROIs are in subject_list = ['sub-01'] # create the subject_list variable output_dir = 'output_inverse_transform_ROIs_ALPACA' # name of norm output folder working_dir = 'workingdir_inverse_transform_ROIs_ALPACA' # name of norm working directory ##### Create & specify nodes to be used and connected during the normalization pipeline ##### # Concatenate BBRegister's and ANTS' transforms into a list merge = Node(Merge(2), iterfield=['in2'], name='mergexfm') # Binarize node - binarizes mask again after transformation binarize_post2ant = MapNode(Binarize(min=0.1), iterfield=['in_file'], name='binarize_post2ant') binarize_pt2pp = binarize_post2ant.clone('binarize_pt2pp') # FreeSurferSource - Data grabber specific for FreeSurfer data fssource_lh = Node(FreeSurferSource(subjects_dir=fs_dir, hemi='lh'), run_without_submitting=True, name='fssource_lh') fssource_rh = Node(FreeSurferSource(subjects_dir=fs_dir, hemi='rh'), run_without_submitting=True, name='fssource_rh') # Transform the volumetric ROIs to the target space inverse_transform_mni_volume_post2ant = MapNode(
def create_transform_pipeline(name='transfrom_timeseries'): # set fsl output type fsl.FSLCommand.set_default_output_type('NIFTI_GZ') # initiate workflow transform_ts = Workflow(name='transform_timeseries') # inputnode inputnode=Node(util.IdentityInterface(fields=['orig_ts', 'anat_head', 'mat_moco', 'fullwarp', 'resolution', 'brain_mask' ]), name='inputnode') # outputnode outputnode=Node(util.IdentityInterface(fields=['trans_ts', 'trans_ts_mean', 'trans_ts_masked', 'resamp_brain', 'brain_mask_resamp', 'out_dvars' ]), name='outputnode') #resample anatomy resample = Node(fsl.FLIRT(datatype='float', out_file='T1_resampled.nii.gz'), name = 'resample_anat') transform_ts.connect([(inputnode, resample, [('anat_head', 'in_file'), ('anat_head', 'reference'), ('resolution', 'apply_isoxfm') ]), (resample, outputnode, [('out_file', 'resamp_brain')]) ]) # split timeseries in single volumes split=Node(fsl.Split(dimension='t', out_base_name='timeseries'), name='split') transform_ts.connect([(inputnode, split, [('orig_ts','in_file')])]) # applymoco premat and fullwarpfield applywarp = MapNode(fsl.ApplyWarp(interp='spline', relwarp=True, out_file='rest2anat.nii.gz', datatype='float'), iterfield=['in_file', 'premat'], name='applywarp') transform_ts.connect([(split, applywarp, [('out_files', 'in_file')]), (inputnode, applywarp, [('mat_moco', 'premat'), ('fullwarp','field_file')]), (resample, applywarp, [('out_file', 'ref_file')]) ]) # re-concatenate volumes merge=Node(fsl.Merge(dimension='t', merged_file='rest2anat.nii.gz'), name='merge') transform_ts.connect([(applywarp,merge,[('out_file','in_files')]), (merge, outputnode, [('merged_file', 'trans_ts')])]) # calculate new mean tmean = Node(fsl.maths.MeanImage(dimension='T', out_file='rest_mean2anat_lowres.nii.gz'), name='tmean') transform_ts.connect([(merge, tmean, [('merged_file', 'in_file')]), (tmean, outputnode, [('out_file', 'trans_ts_mean')]) ]) # resample brain mask resample_brain = Node(afni.Resample(resample_mode='NN', outputtype='NIFTI_GZ', out_file='T1_brain_mask_lowres.nii.gz'), name = 'resample_brain') transform_ts.connect([(inputnode, resample_brain, [('brain_mask', 'in_file')]), (tmean, resample_brain, [('out_file', 'master')]), (resample_brain, outputnode, [('out_file', 'brain_mask_resamp')]) ]) #mask the transformed file mask = Node(fsl.ApplyMask(), name="mask") transform_ts.connect([(resample_brain,mask, [('out_file', 'mask_file')]), (merge, mask, [('merged_file', 'in_file')]), (mask, outputnode, [('out_file', 'trans_ts_masked')]) ]) #calculate DVARS dvars = Node(confounds.ComputeDVARS(save_all=True, save_plot=True), name="dvars") transform_ts.connect([(resample_brain, dvars, [('out_file', 'in_mask')]), (merge, dvars, [('merged_file', 'in_file')]), (dvars, outputnode, [('out_all', 'out_dvars')]) ]) return transform_ts
outfields=['struct']), name='datasource') datasource.inputs.base_directory = dataDir datasource.inputs.template = '*' datasource.inputs.field_template = field_template datasource.inputs.template_args = template_args datasource.inputs.subject_id = subs datasource.inputs.sort_filelist = False # Specify workflow name. strucProc = Workflow(name='strucProc', base_dir=outDir + '/tmp') strucProc.connect([(infosource, datasource, [('subject_id', 'subject_id')])]) # New Segment segment = MapNode(interface=NewSegment(), iterfield=['channel_files'], name="segment") segment.inputs.channel_info = (0.0001, 60, (True, True)) segment.inputs.write_deformation_fields = [ False, False ] # inverse and forward defomration fields tpmPath = '/afs/cbs.mpg.de/software/spm/12.6685/9.0/precise/tpm/' # The "True" statement tells NewSegment to create DARTEL output for: tissue1 = ( (tpmPath + 'TPM.nii', 1), 2, (False, True), (False, False)) # grey matter tissue2 = ( (tpmPath + 'TPM.nii', 2), 2, (False, True), (False, False)) # white matter tissue3 = ((tpmPath + 'TPM.nii', 3), 2, (False, False), (False, False)) tissue4 = ((tpmPath + 'TPM.nii', 4), 2, (False, False), (False, False)) tissue5 = ((tpmPath + 'TPM.nii', 5), 2, (False, False), (False, False)) tissue6 = ((tpmPath + 'TPM.nii', 6), 2, (False, False), (False, False))
def run_workflow(session=None, csv_file=None): from nipype import config #config.enable_debug_mode() method = 'fs' # freesurfer's mri_convert is faster if method == 'fs': import nipype.interfaces.freesurfer as fs # freesurfer else: assert method == 'fsl' import nipype.interfaces.fsl as fsl # fsl # ------------------ Specify variables ds_root = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) data_dir = ds_root output_dir = 'derivatives/resampled-isotropic-06mm' working_dir = 'workingdirs' # ------------------ Input Files infosource = Node(IdentityInterface(fields=[ 'subject_id', 'session_id', 'datatype', ]), name="infosource") if csv_file is not None: # Read csv and use pandas to set-up image and ev-processing df = pd.read_csv(csv_file) # init lists sub_img = [] ses_img = [] dt_img = [] # fill lists to iterate mapnodes for index, row in df.iterrows(): for dt in row.datatype.strip("[]").split(" "): if dt in ['anat']: # only for anatomicals sub_img.append(row.subject) ses_img.append(row.session) dt_img.append(dt) # check if the file definitions are ok if len(dt_img) > 0: print('There are images to process. Will continue.') else: print('No images specified. Check your csv-file.') infosource.iterables = [('session_id', ses_img), ('subject_id', sub_img), ('datatype', dt_img)] infosource.synchronize = True else: print('No csv-file specified. Cannot continue.') # SelectFiles templates = { 'image': 'sub-{subject_id}/ses-{session_id}/{datatype}/' 'sub-{subject_id}_ses-{session_id}_*.nii.gz', } inputfiles = Node(nio.SelectFiles(templates, base_directory=data_dir), name="input_files") # ------------------ Output Files # Datasink outputfiles = Node(nio.DataSink(base_directory=ds_root, container=output_dir, parameterization=True), name="output_files") # Use the following DataSink output substitutions outputfiles.inputs.substitutions = [ ('subject_id_', 'sub-'), ('session_id_', 'ses-'), # BIDS Extension Proposal: BEP003 ('_resample.nii.gz', '_res-06x06x06_preproc.nii.gz'), # remove subdirectories: ('resampled-isotropic-06mm/isoxfm-06mm', 'resampled-isotropic-06mm'), ('resampled-isotropic-06mm/mriconv-06mm', 'resampled-isotropic-06mm'), ] # Put result into a BIDS-like format outputfiles.inputs.regexp_substitutions = [ # this works only if datatype is specified in input (r'_datatype_([a-z]*)_ses-([a-zA-Z0-9]*)_sub-([a-zA-Z0-9]*)', r'sub-\3/ses-\2/\1'), (r'_fs_iso06mm[0-9]*/', r''), (r'/_ses-([a-zA-Z0-9]*)_sub-([a-zA-Z0-9]*)', r'/sub-\2/ses-\1/'), # stupid hacks for when datatype is not specified (r'//(sub-[^/]*_bold_res-.*)', r'/func/\1'), (r'//(sub-[^/]*_phasediff_res-.*.nii.gz)', r'/fmap/\1'), (r'//(sub-[^/]*_magnitude1_res-.*.nii.gz)', r'/fmap/\1'), (r'//(sub-[^/]*_epi_res-.*.nii.gz)', r'/fmap/\1'), (r'//(sub-[^/]*_T1w_res-.*.nii.gz)', r'/anat/\1'), (r'//(sub-[^/]*_T2w_res-.*.nii.gz)', r'/anat/\1'), (r'//(sub-[^/]*_dwi_res-.*.nii.gz)', r'/dwi/\1'), ] # -------------------------------------------- Create Pipeline isotropic_flow = Workflow(name='resample_isotropic06mm', base_dir=os.path.join(ds_root, working_dir)) isotropic_flow.connect([(infosource, inputfiles, [ ('subject_id', 'subject_id'), ('session_id', 'session_id'), ('datatype', 'datatype'), ])]) # --- Convert to 1m isotropic voxels if method == 'fs': fs_iso06mm = MapNode( fs.Resample( voxel_size=(0.6, 0.6, 0.6), # suffix is not accepted by fs.Resample # suffix='_res-1x1x1_preproc', # BIDS Extension Proposal: BEP003 ), name='fs_iso06mm', iterfield=['in_file'], ) isotropic_flow.connect(inputfiles, 'image', fs_iso06mm, 'in_file') isotropic_flow.connect(fs_iso06mm, 'resampled_file', outputfiles, 'mriconv-06mm') elif method == 'fsl': # in_file --> out_file isoxfm = Node(fsl.FLIRT(apply_isoxfm=0.6, ), name='isoxfm') isotropic_flow.connect(inputfiles, 'image', isoxfm, 'in_file') isotropic_flow.connect(inputfiles, 'image', isoxfm, 'reference') isotropic_flow.connect(isoxfm, 'out_file', outputfiles, 'isoxfm-06mm') isotropic_flow.stop_on_first_crash = False # True isotropic_flow.keep_inputs = True isotropic_flow.remove_unnecessary_outputs = False isotropic_flow.write_graph() outgraph = isotropic_flow.run()
def secondlevel_wf(subject_id, sink_directory, name='GLM1_scndlvl_wf'): scndlvl_wf = Workflow(name='scndlvl_wf') base_dir = os.path.abspath('/home/data/madlab/data/mri/wmaze/') contrasts = [ 'all_before_B_corr', 'all_before_B_incorr', 'all_remaining', 'all_corr_minus_all_incorr', 'all_incorr_minus_all_corr' ] cnt_file_list = [] for curr_contrast in contrasts: cnt_file_list.append( glob( os.path.join( base_dir, 'frstlvl/model_GLM1/{0}/modelfit/contrasts/_estimate_model*/cope??_{1}.nii.gz' .format(subject_id, curr_contrast)))) dof_runs = [[], [], [], [], []] for i, curr_file_list in enumerate(cnt_file_list): if not isinstance(curr_file_list, list): curr_file_list = [curr_file_list] for curr_file in curr_file_list: dof_runs[i].append( curr_file.split('/')[-2][-1]) #grabs the estimate_model # info = dict(copes=[['subject_id', contrasts]], varcopes=[['subject_id', contrasts]], mask_file=[['subject_id', 'aparc+aseg_thresh']], dof_files=[['subject_id', dof_runs, 'dof']]) #datasource node to get task_mri and motion-noise files datasource = Node(DataGrabber(infields=['subject_id'], outfields=info.keys()), name='datasource') datasource.inputs.template = '*' datasource.inputs.subject_id = subject_id datasource.inputs.base_directory = os.path.abspath( '/home/data/madlab/data/mri/wmaze/') datasource.inputs.field_template = dict( copes= 'frstlvl/model_GLM1/%s/modelfit/contrasts/_estimate_model*/cope*_%s.nii.gz', varcopes= 'frstlvl/model_GLM1/%s/modelfit/contrasts/_estimate_model*/varcope*_%s.nii.gz', mask_file='preproc/%s/ref/_fs_threshold20/%s*_thresh.nii', dof_files='frstlvl/model_GLM1/%s/modelfit/dofs/_estimate_model%s/%s') datasource.inputs.template_args = info datasource.inputs.sort_filelist = True datasource.inputs.ignore_exception = False datasource.inputs.raise_on_empty = True #inputspec to deal with copes and varcopes doublelist issues fixedfx_inputspec = Node(IdentityInterface( fields=['copes', 'varcopes', 'dof_files'], mandatory_inputs=True), name='fixedfx_inputspec') scndlvl_wf.connect(datasource, ('copes', doublelist), fixedfx_inputspec, 'copes') scndlvl_wf.connect(datasource, ('varcopes', doublelist), fixedfx_inputspec, 'varcopes') scndlvl_wf.connect(datasource, ('dof_files', doublelist), fixedfx_inputspec, 'dof_files') #merge all of copes into a single matrix across subject runs copemerge = MapNode(Merge(), iterfield=['in_files'], name='copemerge') copemerge.inputs.dimension = 't' copemerge.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'} copemerge.inputs.ignore_exception = False copemerge.inputs.output_type = 'NIFTI_GZ' copemerge.inputs.terminal_output = 'stream' scndlvl_wf.connect(fixedfx_inputspec, 'copes', copemerge, 'in_files') #generate DOF volume for second level gendofvolume = Node(Function(input_names=['dof_files', 'cope_files'], output_names=['dof_volumes'], function=get_dofvolumes), name='gendofvolume') gendofvolume.inputs.ignore_exception = False scndlvl_wf.connect(fixedfx_inputspec, 'dof_files', gendofvolume, 'dof_files') scndlvl_wf.connect(copemerge, 'merged_file', gendofvolume, 'cope_files') #merge all of the varcopes into a single matrix across subject runs per voxel varcopemerge = MapNode(Merge(), iterfield=['in_files'], name='varcopemerge') varcopemerge.inputs.dimension = 't' varcopemerge.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'} varcopemerge.inputs.ignore_exception = False varcopemerge.inputs.output_type = 'NIFTI_GZ' varcopemerge.inputs.terminal_output = 'stream' scndlvl_wf.connect(fixedfx_inputspec, 'varcopes', varcopemerge, 'in_files') #define contrasts from the names of the copes getcontrasts = Node(Function(input_names=['data_inputs'], output_names=['contrasts'], function=get_contrasts), name='getcontrasts') getcontrasts.inputs.ignore_exception = False scndlvl_wf.connect(datasource, ('copes', doublelist), getcontrasts, 'data_inputs') #rename output files to be more descriptive getsubs = Node(Function(input_names=['subject_id', 'cons'], output_names=['subs'], function=get_subs), name='getsubs') getsubs.inputs.ignore_exception = False getsubs.inputs.subject_id = subject_id scndlvl_wf.connect(getcontrasts, 'contrasts', getsubs, 'cons') #l2model node for fixed effects analysis (aka within subj across runs) l2model = MapNode(L2Model(), iterfield=['num_copes'], name='l2model') l2model.inputs.ignore_exception = False scndlvl_wf.connect(datasource, ('copes', num_copes), l2model, 'num_copes') #FLAMEO Node to run the fixed effects analysis flameo_fe = MapNode(FLAMEO(), iterfield=[ 'cope_file', 'var_cope_file', 'dof_var_cope_file', 'design_file', 't_con_file', 'cov_split_file' ], name='flameo_fe') flameo_fe.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'} flameo_fe.inputs.ignore_exception = False flameo_fe.inputs.log_dir = 'stats' flameo_fe.inputs.output_type = 'NIFTI_GZ' flameo_fe.inputs.run_mode = 'fe' flameo_fe.inputs.terminal_output = 'stream' scndlvl_wf.connect(varcopemerge, 'merged_file', flameo_fe, 'var_cope_file') scndlvl_wf.connect(l2model, 'design_mat', flameo_fe, 'design_file') scndlvl_wf.connect(l2model, 'design_con', flameo_fe, 't_con_file') scndlvl_wf.connect(l2model, 'design_grp', flameo_fe, 'cov_split_file') scndlvl_wf.connect(gendofvolume, 'dof_volumes', flameo_fe, 'dof_var_cope_file') scndlvl_wf.connect(datasource, 'mask_file', flameo_fe, 'mask_file') scndlvl_wf.connect(copemerge, 'merged_file', flameo_fe, 'cope_file') #outputspec node scndlvl_outputspec = Node(IdentityInterface( fields=['res4d', 'copes', 'varcopes', 'zstats', 'tstats'], mandatory_inputs=True), name='scndlvl_outputspec') scndlvl_wf.connect(flameo_fe, 'res4d', scndlvl_outputspec, 'res4d') scndlvl_wf.connect(flameo_fe, 'copes', scndlvl_outputspec, 'copes') scndlvl_wf.connect(flameo_fe, 'var_copes', scndlvl_outputspec, 'varcopes') scndlvl_wf.connect(flameo_fe, 'zstats', scndlvl_outputspec, 'zstats') scndlvl_wf.connect(flameo_fe, 'tstats', scndlvl_outputspec, 'tstats') #datasink node sinkd = Node(DataSink(), name='sinkd') sinkd.inputs.base_directory = sink_directory sinkd.inputs.container = subject_id scndlvl_wf.connect(scndlvl_outputspec, 'copes', sinkd, 'fixedfx.@copes') scndlvl_wf.connect(scndlvl_outputspec, 'varcopes', sinkd, 'fixedfx.@varcopes') scndlvl_wf.connect(scndlvl_outputspec, 'tstats', sinkd, 'fixedfx.@tstats') scndlvl_wf.connect(scndlvl_outputspec, 'zstats', sinkd, 'fixedfx.@zstats') scndlvl_wf.connect(scndlvl_outputspec, 'res4d', sinkd, 'fixedfx.@pvals') scndlvl_wf.connect(getsubs, 'subs', sinkd, 'substitutions') return scndlvl_wf