# Fractional anisotropy (FA) map tensor2faNode = Node(mrtrix.Tensor2FractionalAnisotropy(), name = 'tensor_2_FA') # Remove noisy background by multiplying the FA Image with the binary brainmask mrmultNode = Node(Function(input_names = ['in1', 'in2', 'out_file'], output_names = ['out_file'], function = multiplyMRTrix), name = 'mrmult') # Eigenvector (EV) map tensor2vectorNode = Node(mrtrix.Tensor2Vector(), name = 'tensor_2_vector') # Scale the EV map by the FA Image scaleEvNode = mrmultNode.clone('scale_ev') # Mask of single-fibre voxels erodeNode = Node(mrtrix.Erode(), name = 'erode_wmmask') erodeNode.inputs.number_of_passes = number_of_passes cleanFaNode = mrmultNode.clone('multiplyFA_Mask') thresholdFANode = Node(mrtrix.Threshold(), name = 'threshold_FA') thresholdFANode.inputs.absolute_threshold_value = absolute_threshold_value # Response function coefficient estResponseNode = Node(mrtrix.EstimateResponseForSH(), name = 'estimate_deconv_response') # CSD computation csdNode = Node(mrtrix.ConstrainedSphericalDeconvolution(), name = 'compute_CSD')
def group_multregress_openfmri(dataset_dir, model_id=None, task_id=None, l1output_dir=None, out_dir=None, no_reversal=False, plugin=None, plugin_args=None, flamemodel='flame1', nonparametric=False, use_spm=False): meta_workflow = Workflow(name='mult_regress') meta_workflow.base_dir = work_dir for task in task_id: task_name = get_taskname(dataset_dir, task) cope_ids = l1_contrasts_num(model_id, task_name, dataset_dir) regressors_needed, contrasts, groups, subj_list = get_sub_vars(dataset_dir, task_name, model_id) for idx, contrast in enumerate(contrasts): wk = Workflow(name='model_%03d_task_%03d_contrast_%s' % (model_id, task, contrast[0][0])) info = Node(util.IdentityInterface(fields=['model_id', 'task_id', 'dataset_dir', 'subj_list']), name='infosource') info.inputs.model_id = model_id info.inputs.task_id = task info.inputs.dataset_dir = dataset_dir dg = Node(DataGrabber(infields=['model_id', 'task_id', 'cope_id'], outfields=['copes', 'varcopes']), name='grabber') dg.inputs.template = os.path.join(l1output_dir, 'model%03d/task%03d/%s/%scopes/%smni/%scope%02d.nii%s') if use_spm: dg.inputs.template_args['copes'] = [['model_id', 'task_id', subj_list, '', 'spm/', '', 'cope_id', '']] dg.inputs.template_args['varcopes'] = [['model_id', 'task_id', subj_list, 'var', 'spm/', 'var', 'cope_id', '.gz']] else: dg.inputs.template_args['copes'] = [['model_id', 'task_id', subj_list, '', '', '', 'cope_id', '.gz']] dg.inputs.template_args['varcopes'] = [['model_id', 'task_id', subj_list, 'var', '', 'var', 'cope_id', '.gz']] dg.iterables=('cope_id', cope_ids) dg.inputs.sort_filelist = False wk.connect(info, 'model_id', dg, 'model_id') wk.connect(info, 'task_id', dg, 'task_id') model = Node(MultipleRegressDesign(), name='l2model') model.inputs.groups = groups model.inputs.contrasts = contrasts[idx] model.inputs.regressors = regressors_needed[idx] mergecopes = Node(Merge(dimension='t'), name='merge_copes') wk.connect(dg, 'copes', mergecopes, 'in_files') if flamemodel != 'ols': mergevarcopes = Node(Merge(dimension='t'), name='merge_varcopes') wk.connect(dg, 'varcopes', mergevarcopes, 'in_files') mask_file = fsl.Info.standard_image('MNI152_T1_2mm_brain_mask.nii.gz') flame = Node(FLAMEO(), name='flameo') flame.inputs.mask_file = mask_file flame.inputs.run_mode = flamemodel #flame.inputs.infer_outliers = True wk.connect(model, 'design_mat', flame, 'design_file') wk.connect(model, 'design_con', flame, 't_con_file') wk.connect(mergecopes, 'merged_file', flame, 'cope_file') if flamemodel != 'ols': wk.connect(mergevarcopes, 'merged_file', flame, 'var_cope_file') wk.connect(model, 'design_grp', flame, 'cov_split_file') if nonparametric: palm = Node(Function(input_names=['cope_file', 'design_file', 'contrast_file', 'group_file', 'mask_file', 'cluster_threshold'], output_names=['palm_outputs'], function=run_palm), name='palm') palm.inputs.cluster_threshold = 3.09 palm.inputs.mask_file = mask_file palm.plugin_args = {'sbatch_args': '-p om_all_nodes -N1 -c2 --mem=10G', 'overwrite': True} wk.connect(model, 'design_mat', palm, 'design_file') wk.connect(model, 'design_con', palm, 'contrast_file') wk.connect(mergecopes, 'merged_file', palm, 'cope_file') wk.connect(model, 'design_grp', palm, 'group_file') smoothest = Node(SmoothEstimate(), name='smooth_estimate') wk.connect(flame, 'zstats', smoothest, 'zstat_file') smoothest.inputs.mask_file = mask_file cluster = Node(Cluster(), name='cluster') wk.connect(smoothest,'dlh', cluster, 'dlh') wk.connect(smoothest, 'volume', cluster, 'volume') cluster.inputs.connectivity = 26 cluster.inputs.threshold = 2.3 cluster.inputs.pthreshold = 0.05 cluster.inputs.out_threshold_file = True cluster.inputs.out_index_file = True cluster.inputs.out_localmax_txt_file = True wk.connect(flame, 'zstats', cluster, 'in_file') ztopval = Node(ImageMaths(op_string='-ztop', suffix='_pval'), name='z2pval') wk.connect(flame, 'zstats', ztopval,'in_file') sinker = Node(DataSink(), name='sinker') sinker.inputs.base_directory = os.path.join(out_dir, 'task%03d' % task, contrast[0][0]) sinker.inputs.substitutions = [('_cope_id', 'contrast'), ('_maths_', '_reversed_')] wk.connect(flame, 'zstats', sinker, 'stats') wk.connect(cluster, 'threshold_file', sinker, 'stats.@thr') wk.connect(cluster, 'index_file', sinker, 'stats.@index') wk.connect(cluster, 'localmax_txt_file', sinker, 'stats.@localmax') if nonparametric: wk.connect(palm, 'palm_outputs', sinker, 'stats.palm') if not no_reversal: zstats_reverse = Node( BinaryMaths() , name='zstats_reverse') zstats_reverse.inputs.operation = 'mul' zstats_reverse.inputs.operand_value = -1 wk.connect(flame, 'zstats', zstats_reverse, 'in_file') cluster2=cluster.clone(name='cluster2') wk.connect(smoothest, 'dlh', cluster2, 'dlh') wk.connect(smoothest, 'volume', cluster2, 'volume') wk.connect(zstats_reverse, 'out_file', cluster2, 'in_file') ztopval2 = ztopval.clone(name='ztopval2') wk.connect(zstats_reverse, 'out_file', ztopval2, 'in_file') wk.connect(zstats_reverse, 'out_file', sinker, 'stats.@neg') wk.connect(cluster2, 'threshold_file', sinker, 'stats.@neg_thr') wk.connect(cluster2, 'index_file',sinker, 'stats.@neg_index') wk.connect(cluster2, 'localmax_txt_file', sinker, 'stats.@neg_localmax') meta_workflow.add_nodes([wk]) return meta_workflow
def group_onesample_openfmri(dataset_dir,model_id=None,task_id=None,l1output_dir=None,out_dir=None, no_reversal=False): wk = Workflow(name='one_sample') wk.base_dir = os.path.abspath(work_dir) info = Node(util.IdentityInterface(fields=['model_id','task_id','dataset_dir']), name='infosource') info.inputs.model_id=model_id info.inputs.task_id=task_id info.inputs.dataset_dir=dataset_dir num_copes=contrasts_num(model_id,task_id,dataset_dir) dg = Node(DataGrabber(infields=['model_id','task_id','cope_id'], outfields=['copes', 'varcopes']),name='grabber') dg.inputs.template = os.path.join(l1output_dir,'model%03d/task%03d/*/%scopes/mni/%scope%02d.nii.gz') dg.inputs.template_args['copes'] = [['model_id','task_id','', '', 'cope_id']] dg.inputs.template_args['varcopes'] = [['model_id','task_id','var', 'var', 'cope_id']] dg.iterables=('cope_id',num_copes) dg.inputs.sort_filelist = True wk.connect(info,'model_id',dg,'model_id') wk.connect(info,'task_id',dg,'task_id') model = Node(L2Model(), name='l2model') wk.connect(dg, ('copes', get_len), model, 'num_copes') mergecopes = Node(Merge(dimension='t'), name='merge_copes') wk.connect(dg, 'copes', mergecopes, 'in_files') mergevarcopes = Node(Merge(dimension='t'), name='merge_varcopes') wk.connect(dg, 'varcopes', mergevarcopes, 'in_files') mask_file = fsl.Info.standard_image('MNI152_T1_2mm_brain_mask.nii.gz') flame = Node(FLAMEO(), name='flameo') flame.inputs.mask_file = mask_file flame.inputs.run_mode = 'flame1' wk.connect(model, 'design_mat', flame, 'design_file') wk.connect(model, 'design_con', flame, 't_con_file') wk.connect(mergecopes, 'merged_file', flame, 'cope_file') wk.connect(mergevarcopes, 'merged_file', flame, 'var_cope_file') wk.connect(model, 'design_grp', flame, 'cov_split_file') smoothest = Node(SmoothEstimate(), name='smooth_estimate') wk.connect(flame, 'zstats', smoothest, 'zstat_file') smoothest.inputs.mask_file = mask_file cluster = Node(Cluster(), name='cluster') wk.connect(smoothest,'dlh', cluster, 'dlh') wk.connect(smoothest, 'volume', cluster, 'volume') cluster.inputs.connectivity = 26 cluster.inputs.threshold=2.3 cluster.inputs.pthreshold = 0.05 cluster.inputs.out_threshold_file = True cluster.inputs.out_index_file = True cluster.inputs.out_localmax_txt_file = True wk.connect(flame, 'zstats', cluster, 'in_file') ztopval = Node(ImageMaths(op_string='-ztop', suffix='_pval'), name='z2pval') wk.connect(flame, 'zstats', ztopval,'in_file') sinker = Node(DataSink(), name='sinker') sinker.inputs.base_directory = os.path.abspath(out_dir) sinker.inputs.substitutions = [('_cope_id', 'contrast'), ('_maths__', '_reversed_')] wk.connect(flame, 'zstats', sinker, 'stats') wk.connect(cluster, 'threshold_file', sinker, 'stats.@thr') wk.connect(cluster, 'index_file', sinker, 'stats.@index') wk.connect(cluster, 'localmax_txt_file', sinker, 'stats.@localmax') if no_reversal == False: zstats_reverse = Node( BinaryMaths() , name='zstats_reverse') zstats_reverse.inputs.operation = 'mul' zstats_reverse.inputs.operand_value= -1 wk.connect(flame, 'zstats', zstats_reverse, 'in_file') cluster2=cluster.clone(name='cluster2') wk.connect(smoothest,'dlh',cluster2,'dlh') wk.connect(smoothest,'volume',cluster2,'volume') wk.connect(zstats_reverse,'out_file',cluster2,'in_file') ztopval2 = ztopval.clone(name='ztopval2') wk.connect(zstats_reverse,'out_file',ztopval2,'in_file') wk.connect(zstats_reverse,'out_file',sinker,'stats.@neg') wk.connect(cluster2,'threshold_file',sinker,'stats.@neg_thr') wk.connect(cluster2,'index_file',sinker,'stats.@neg_index') wk.connect(cluster2,'localmax_txt_file',sinker,'stats.@neg_localmax') return wk
def group_multregress_openfmri(dataset_dir, model_id=None, task_id=None, l1output_dir=None, out_dir=None, no_reversal=False, plugin=None, plugin_args=None, flamemodel='flame1', nonparametric=False, use_spm=False): meta_workflow = Workflow(name='mult_regress') meta_workflow.base_dir = work_dir for task in task_id: task_name = get_taskname(dataset_dir, task) cope_ids = l1_contrasts_num(model_id, task_name, dataset_dir) regressors_needed, contrasts, groups, subj_list = get_sub_vars( dataset_dir, task_name, model_id) for idx, contrast in enumerate(contrasts): wk = Workflow(name='model_%03d_task_%03d_contrast_%s' % (model_id, task, contrast[0][0])) info = Node(util.IdentityInterface( fields=['model_id', 'task_id', 'dataset_dir', 'subj_list']), name='infosource') info.inputs.model_id = model_id info.inputs.task_id = task info.inputs.dataset_dir = dataset_dir dg = Node(DataGrabber(infields=['model_id', 'task_id', 'cope_id'], outfields=['copes', 'varcopes']), name='grabber') dg.inputs.template = os.path.join( l1output_dir, 'model%03d/task%03d/%s/%scopes/%smni/%scope%02d.nii%s') if use_spm: dg.inputs.template_args['copes'] = [[ 'model_id', 'task_id', subj_list, '', 'spm/', '', 'cope_id', '' ]] dg.inputs.template_args['varcopes'] = [[ 'model_id', 'task_id', subj_list, 'var', 'spm/', 'var', 'cope_id', '.gz' ]] else: dg.inputs.template_args['copes'] = [[ 'model_id', 'task_id', subj_list, '', '', '', 'cope_id', '.gz' ]] dg.inputs.template_args['varcopes'] = [[ 'model_id', 'task_id', subj_list, 'var', '', 'var', 'cope_id', '.gz' ]] dg.iterables = ('cope_id', cope_ids) dg.inputs.sort_filelist = False wk.connect(info, 'model_id', dg, 'model_id') wk.connect(info, 'task_id', dg, 'task_id') model = Node(MultipleRegressDesign(), name='l2model') model.inputs.groups = groups model.inputs.contrasts = contrasts[idx] model.inputs.regressors = regressors_needed[idx] mergecopes = Node(Merge(dimension='t'), name='merge_copes') wk.connect(dg, 'copes', mergecopes, 'in_files') if flamemodel != 'ols': mergevarcopes = Node(Merge(dimension='t'), name='merge_varcopes') wk.connect(dg, 'varcopes', mergevarcopes, 'in_files') mask_file = fsl.Info.standard_image( 'MNI152_T1_2mm_brain_mask.nii.gz') flame = Node(FLAMEO(), name='flameo') flame.inputs.mask_file = mask_file flame.inputs.run_mode = flamemodel #flame.inputs.infer_outliers = True wk.connect(model, 'design_mat', flame, 'design_file') wk.connect(model, 'design_con', flame, 't_con_file') wk.connect(mergecopes, 'merged_file', flame, 'cope_file') if flamemodel != 'ols': wk.connect(mergevarcopes, 'merged_file', flame, 'var_cope_file') wk.connect(model, 'design_grp', flame, 'cov_split_file') if nonparametric: palm = Node(Function(input_names=[ 'cope_file', 'design_file', 'contrast_file', 'group_file', 'mask_file', 'cluster_threshold' ], output_names=['palm_outputs'], function=run_palm), name='palm') palm.inputs.cluster_threshold = 3.09 palm.inputs.mask_file = mask_file palm.plugin_args = { 'sbatch_args': '-p om_all_nodes -N1 -c2 --mem=10G', 'overwrite': True } wk.connect(model, 'design_mat', palm, 'design_file') wk.connect(model, 'design_con', palm, 'contrast_file') wk.connect(mergecopes, 'merged_file', palm, 'cope_file') wk.connect(model, 'design_grp', palm, 'group_file') smoothest = Node(SmoothEstimate(), name='smooth_estimate') wk.connect(flame, 'zstats', smoothest, 'zstat_file') smoothest.inputs.mask_file = mask_file cluster = Node(Cluster(), name='cluster') wk.connect(smoothest, 'dlh', cluster, 'dlh') wk.connect(smoothest, 'volume', cluster, 'volume') cluster.inputs.connectivity = 26 cluster.inputs.threshold = 2.3 cluster.inputs.pthreshold = 0.05 cluster.inputs.out_threshold_file = True cluster.inputs.out_index_file = True cluster.inputs.out_localmax_txt_file = True wk.connect(flame, 'zstats', cluster, 'in_file') ztopval = Node(ImageMaths(op_string='-ztop', suffix='_pval'), name='z2pval') wk.connect(flame, 'zstats', ztopval, 'in_file') sinker = Node(DataSink(), name='sinker') sinker.inputs.base_directory = os.path.join( out_dir, 'task%03d' % task, contrast[0][0]) sinker.inputs.substitutions = [('_cope_id', 'contrast'), ('_maths_', '_reversed_')] wk.connect(flame, 'zstats', sinker, 'stats') wk.connect(cluster, 'threshold_file', sinker, 'stats.@thr') wk.connect(cluster, 'index_file', sinker, 'stats.@index') wk.connect(cluster, 'localmax_txt_file', sinker, 'stats.@localmax') if nonparametric: wk.connect(palm, 'palm_outputs', sinker, 'stats.palm') if not no_reversal: zstats_reverse = Node(BinaryMaths(), name='zstats_reverse') zstats_reverse.inputs.operation = 'mul' zstats_reverse.inputs.operand_value = -1 wk.connect(flame, 'zstats', zstats_reverse, 'in_file') cluster2 = cluster.clone(name='cluster2') wk.connect(smoothest, 'dlh', cluster2, 'dlh') wk.connect(smoothest, 'volume', cluster2, 'volume') wk.connect(zstats_reverse, 'out_file', cluster2, 'in_file') ztopval2 = ztopval.clone(name='ztopval2') wk.connect(zstats_reverse, 'out_file', ztopval2, 'in_file') wk.connect(zstats_reverse, 'out_file', sinker, 'stats.@neg') wk.connect(cluster2, 'threshold_file', sinker, 'stats.@neg_thr') wk.connect(cluster2, 'index_file', sinker, 'stats.@neg_index') wk.connect(cluster2, 'localmax_txt_file', sinker, 'stats.@neg_localmax') meta_workflow.add_nodes([wk]) return meta_workflow
def create_workflow(files, subject_id, n_vol=0, despike=True, TR=None, slice_times=None, slice_thickness=None, fieldmap_images=[], norm_threshold=1, num_components=6, vol_fwhm=None, surf_fwhm=None, lowpass_freq=-1, highpass_freq=-1, sink_directory=os.getcwd(), FM_TEdiff=2.46, FM_sigma=2, FM_echo_spacing=.7, target_subject=['fsaverage3', 'fsaverage4'], name='resting'): wf = Workflow(name=name) # Skip starting volumes remove_vol = MapNode(fsl.ExtractROI(t_min=n_vol, t_size=-1), iterfield=['in_file'], name="remove_volumes") remove_vol.inputs.in_file = files # Run AFNI's despike. This is always run, however, whether this is fed to # realign depends on the input configuration despiker = MapNode(afni.Despike(outputtype='NIFTI_GZ'), iterfield=['in_file'], name='despike') #despiker.plugin_args = {'qsub_args': '-l nodes=1:ppn='} wf.connect(remove_vol, 'roi_file', despiker, 'in_file') # Run Nipy joint slice timing and realignment algorithm realign = Node(nipy.SpaceTimeRealigner(), name='realign') realign.inputs.tr = TR realign.inputs.slice_times = slice_times realign.inputs.slice_info = 2 if despike: wf.connect(despiker, 'out_file', realign, 'in_file') else: wf.connect(remove_vol, 'roi_file', realign, 'in_file') # Comute TSNR on realigned data regressing polynomials upto order 2 tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr') wf.connect(realign, 'out_file', tsnr, 'in_file') # Compute the median image across runs calc_median = Node(Function(input_names=['in_files'], output_names=['median_file'], function=median, imports=imports), name='median') wf.connect(tsnr, 'detrended_file', calc_median, 'in_files') # Coregister the median to the surface register = Node(freesurfer.BBRegister(), name='bbregister') register.inputs.subject_id = subject_id register.inputs.init = 'fsl' register.inputs.contrast_type = 't2' register.inputs.out_fsl_file = True register.inputs.epi_mask = True # Compute fieldmaps and unwarp using them if fieldmap_images: fieldmap = Node(interface=EPIDeWarp(), name='fieldmap_unwarp') fieldmap.inputs.tediff = FM_TEdiff fieldmap.inputs.esp = FM_echo_spacing fieldmap.inputs.sigma = FM_sigma fieldmap.inputs.mag_file = fieldmap_images[0] fieldmap.inputs.dph_file = fieldmap_images[1] wf.connect(calc_median, 'median_file', fieldmap, 'exf_file') dewarper = MapNode(interface=fsl.FUGUE(), iterfield=['in_file'], name='dewarper') wf.connect(tsnr, 'detrended_file', dewarper, 'in_file') wf.connect(fieldmap, 'exf_mask', dewarper, 'mask_file') wf.connect(fieldmap, 'vsm_file', dewarper, 'shift_in_file') wf.connect(fieldmap, 'exfdw', register, 'source_file') else: wf.connect(calc_median, 'median_file', register, 'source_file') # Get the subject's freesurfer source directory fssource = Node(FreeSurferSource(), name='fssource') fssource.inputs.subject_id = subject_id fssource.inputs.subjects_dir = os.environ['SUBJECTS_DIR'] # Extract wm+csf, brain masks by eroding freesurfer labels and then # transform the masks into the space of the median wmcsf = Node(freesurfer.Binarize(), name='wmcsfmask') mask = wmcsf.clone('anatmask') wmcsftransform = Node(freesurfer.ApplyVolTransform(inverse=True, interp='nearest'), name='wmcsftransform') wmcsftransform.inputs.subjects_dir = os.environ['SUBJECTS_DIR'] wmcsf.inputs.wm_ven_csf = True wmcsf.inputs.match = [4, 5, 14, 15, 24, 31, 43, 44, 63] wmcsf.inputs.binary_file = 'wmcsf.nii.gz' wmcsf.inputs.erode = int(np.ceil(slice_thickness)) wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), wmcsf, 'in_file') if fieldmap_images: wf.connect(fieldmap, 'exf_mask', wmcsftransform, 'source_file') else: wf.connect(calc_median, 'median_file', wmcsftransform, 'source_file') wf.connect(register, 'out_reg_file', wmcsftransform, 'reg_file') wf.connect(wmcsf, 'binary_file', wmcsftransform, 'target_file') mask.inputs.binary_file = 'mask.nii.gz' mask.inputs.dilate = int(np.ceil(slice_thickness)) + 1 mask.inputs.erode = int(np.ceil(slice_thickness)) mask.inputs.min = 0.5 wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), mask, 'in_file') masktransform = wmcsftransform.clone("masktransform") if fieldmap_images: wf.connect(fieldmap, 'exf_mask', masktransform, 'source_file') else: wf.connect(calc_median, 'median_file', masktransform, 'source_file') wf.connect(register, 'out_reg_file', masktransform, 'reg_file') wf.connect(mask, 'binary_file', masktransform, 'target_file') # Compute Art outliers art = Node(interface=ArtifactDetect(use_differences=[True, False], use_norm=True, norm_threshold=norm_threshold, zintensity_threshold=3, parameter_source='NiPy', bound_by_brainmask=True, save_plot=False, mask_type='file'), name="art") if fieldmap_images: wf.connect(dewarper, 'unwarped_file', art, 'realigned_files') else: wf.connect(tsnr, 'detrended_file', art, 'realigned_files') wf.connect(realign, 'par_file', art, 'realignment_parameters') wf.connect(masktransform, 'transformed_file', art, 'mask_file') # Compute motion regressors motreg = Node(Function( input_names=['motion_params', 'order', 'derivatives'], output_names=['out_files'], function=motion_regressors, imports=imports), name='getmotionregress') wf.connect(realign, 'par_file', motreg, 'motion_params') # Create a filter to remove motion and art confounds createfilter1 = Node(Function( input_names=['motion_params', 'comp_norm', 'outliers'], output_names=['out_files'], function=build_filter1, imports=imports), name='makemotionbasedfilter') wf.connect(motreg, 'out_files', createfilter1, 'motion_params') wf.connect(art, 'norm_files', createfilter1, 'comp_norm') wf.connect(art, 'outlier_files', createfilter1, 'outliers') # Filter the motion and art confounds filter1 = MapNode(fsl.GLM(out_res_name='timeseries.nii.gz', demean=True), iterfield=['in_file', 'design'], name='filtermotion') if fieldmap_images: wf.connect(dewarper, 'unwarped_file', filter1, 'in_file') else: wf.connect(tsnr, 'detrended_file', filter1, 'in_file') wf.connect(createfilter1, 'out_files', filter1, 'design') wf.connect(masktransform, 'transformed_file', filter1, 'mask') # Create a filter to remove noise components based on white matter and CSF createfilter2 = MapNode(Function( input_names=['realigned_file', 'mask_file', 'num_components'], output_names=['out_files'], function=extract_noise_components, imports=imports), iterfield=['realigned_file'], name='makecompcorrfilter') createfilter2.inputs.num_components = num_components wf.connect(filter1, 'out_res', createfilter2, 'realigned_file') wf.connect(masktransform, 'transformed_file', createfilter2, 'mask_file') # Filter noise components filter2 = MapNode(fsl.GLM(out_res_name='timeseries_cleaned.nii.gz', demean=True), iterfield=['in_file', 'design'], name='filtercompcorr') wf.connect(filter1, 'out_res', filter2, 'in_file') wf.connect(createfilter2, 'out_files', filter2, 'design') wf.connect(masktransform, 'transformed_file', filter2, 'mask') # Smoothing using surface and volume smoothing smooth = MapNode(freesurfer.Smooth(), iterfield=['in_file'], name='smooth') smooth.inputs.proj_frac_avg = (0.1, 0.9, 0.1) if surf_fwhm is None: surf_fwhm = 5 * slice_thickness smooth.inputs.surface_fwhm = surf_fwhm if vol_fwhm is None: vol_fwhm = 2 * slice_thickness smooth.inputs.vol_fwhm = vol_fwhm wf.connect(filter2, 'out_res', smooth, 'in_file') wf.connect(register, 'out_reg_file', smooth, 'reg_file') # Bandpass filter the data bandpass = MapNode(fsl.TemporalFilter(), iterfield=['in_file'], name='bandpassfilter') if highpass_freq < 0: bandpass.inputs.highpass_sigma = -1 else: bandpass.inputs.highpass_sigma = 1. / (2 * TR * highpass_freq) if lowpass_freq < 0: bandpass.inputs.lowpass_sigma = -1 else: bandpass.inputs.lowpass_sigma = 1. / (2 * TR * lowpass_freq) wf.connect(smooth, 'smoothed_file', bandpass, 'in_file') # Convert aparc to subject functional space aparctransform = wmcsftransform.clone("aparctransform") if fieldmap_images: wf.connect(fieldmap, 'exf_mask', aparctransform, 'source_file') else: wf.connect(calc_median, 'median_file', aparctransform, 'source_file') wf.connect(register, 'out_reg_file', aparctransform, 'reg_file') wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), aparctransform, 'target_file') # Sample the average time series in aparc ROIs sampleaparc = MapNode(freesurfer.SegStats(avgwf_txt_file=True, default_color_table=True), iterfield=['in_file'], name='aparc_ts') sampleaparc.inputs.segment_id = ([8] + range(10, 14) + [17, 18, 26, 47] + range(49, 55) + [58] + range(1001, 1036) + range(2001, 2036)) wf.connect(aparctransform, 'transformed_file', sampleaparc, 'segmentation_file') wf.connect(bandpass, 'out_file', sampleaparc, 'in_file') # Sample the time series onto the surface of the target surface. Performs # sampling into left and right hemisphere target = Node(IdentityInterface(fields=['target_subject']), name='target') target.iterables = ('target_subject', filename_to_list(target_subject)) samplerlh = MapNode(freesurfer.SampleToSurface(), iterfield=['source_file'], name='sampler_lh') samplerlh.inputs.sampling_method = "average" samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1) samplerlh.inputs.sampling_units = "frac" samplerlh.inputs.interp_method = "trilinear" #samplerlh.inputs.cortex_mask = True samplerlh.inputs.out_type = 'niigz' samplerlh.inputs.subjects_dir = os.environ['SUBJECTS_DIR'] samplerrh = samplerlh.clone('sampler_rh') samplerlh.inputs.hemi = 'lh' wf.connect(bandpass, 'out_file', samplerlh, 'source_file') wf.connect(register, 'out_reg_file', samplerlh, 'reg_file') wf.connect(target, 'target_subject', samplerlh, 'target_subject') samplerrh.set_input('hemi', 'rh') wf.connect(bandpass, 'out_file', samplerrh, 'source_file') wf.connect(register, 'out_reg_file', samplerrh, 'reg_file') wf.connect(target, 'target_subject', samplerrh, 'target_subject') # Combine left and right hemisphere to text file combiner = MapNode(Function(input_names=['left', 'right'], output_names=['out_file'], function=combine_hemi, imports=imports), iterfield=['left', 'right'], name="combiner") wf.connect(samplerlh, 'out_file', combiner, 'left') wf.connect(samplerrh, 'out_file', combiner, 'right') # Compute registration between the subject's structural and MNI template # This is currently set to perform a very quick registration. However, the # registration can be made significantly more accurate for cortical # structures by increasing the number of iterations # All parameters are set using the example from: # https://github.com/stnava/ANTs/blob/master/Scripts/newAntsExample.sh reg = Node(ants.Registration(), name='antsRegister') reg.inputs.output_transform_prefix = "output_" reg.inputs.transforms = ['Translation', 'Rigid', 'Affine', 'SyN'] reg.inputs.transform_parameters = [(0.1, ), (0.1, ), (0.1, ), (0.2, 3.0, 0.0)] # reg.inputs.number_of_iterations = ([[10000, 111110, 11110]]*3 + # [[100, 50, 30]]) reg.inputs.number_of_iterations = [[100, 100, 100]] * 3 + [[100, 20, 10]] reg.inputs.dimension = 3 reg.inputs.write_composite_transform = True reg.inputs.collapse_output_transforms = False reg.inputs.metric = ['Mattes'] * 3 + [['Mattes', 'CC']] reg.inputs.metric_weight = [1] * 3 + [[0.5, 0.5]] reg.inputs.radius_or_number_of_bins = [32] * 3 + [[32, 4]] reg.inputs.sampling_strategy = ['Regular'] * 3 + [[None, None]] reg.inputs.sampling_percentage = [0.3] * 3 + [[None, None]] reg.inputs.convergence_threshold = [1.e-8] * 3 + [-0.01] reg.inputs.convergence_window_size = [20] * 3 + [5] reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 3 + [[1, 0.5, 0]] reg.inputs.sigma_units = ['vox'] * 4 reg.inputs.shrink_factors = [[6, 4, 2]] + [[3, 2, 1]] * 2 + [[4, 2, 1]] reg.inputs.use_estimate_learning_rate_once = [True] * 4 reg.inputs.use_histogram_matching = [False] * 3 + [True] reg.inputs.output_warped_image = 'output_warped_image.nii.gz' reg.inputs.fixed_image = \ os.path.abspath('OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz') reg.inputs.num_threads = 4 reg.plugin_args = {'qsub_args': '-l nodes=1:ppn=4'} # Convert T1.mgz to nifti for using with ANTS convert = Node(freesurfer.MRIConvert(out_type='niigz'), name='convert2nii') wf.connect(fssource, 'T1', convert, 'in_file') # Mask the T1.mgz file with the brain mask computed earlier maskT1 = Node(fsl.BinaryMaths(operation='mul'), name='maskT1') wf.connect(mask, 'binary_file', maskT1, 'operand_file') wf.connect(convert, 'out_file', maskT1, 'in_file') wf.connect(maskT1, 'out_file', reg, 'moving_image') # Convert the BBRegister transformation to ANTS ITK format convert2itk = MapNode(C3dAffineTool(), iterfield=['transform_file', 'source_file'], name='convert2itk') convert2itk.inputs.fsl2ras = True convert2itk.inputs.itk_transform = True wf.connect(register, 'out_fsl_file', convert2itk, 'transform_file') if fieldmap_images: wf.connect(fieldmap, 'exf_mask', convert2itk, 'source_file') else: wf.connect(calc_median, 'median_file', convert2itk, 'source_file') wf.connect(convert, 'out_file', convert2itk, 'reference_file') # Concatenate the affine and ants transforms into a list pickfirst = lambda x: x[0] merge = MapNode(Merge(2), iterfield=['in2'], name='mergexfm') wf.connect(convert2itk, 'itk_transform', merge, 'in2') wf.connect(reg, ('composite_transform', pickfirst), merge, 'in1') # Apply the combined transform to the time series file sample2mni = MapNode(ants.ApplyTransforms(), iterfield=['input_image', 'transforms'], name='sample2mni') sample2mni.inputs.input_image_type = 3 sample2mni.inputs.interpolation = 'BSpline' sample2mni.inputs.invert_transform_flags = [False, False] sample2mni.inputs.reference_image = \ os.path.abspath('OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz') sample2mni.inputs.terminal_output = 'file' wf.connect(bandpass, 'out_file', sample2mni, 'input_image') wf.connect(merge, 'out', sample2mni, 'transforms') # Sample the time series file for each subcortical roi ts2txt = MapNode(Function( input_names=['timeseries_file', 'label_file', 'indices'], output_names=['out_file'], function=extract_subrois, imports=imports), iterfield=['timeseries_file'], name='getsubcortts') ts2txt.inputs.indices = [8] + range(10, 14) + [17, 18, 26, 47] +\ range(49, 55) + [58] ts2txt.inputs.label_file = \ os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_' '2mm.nii.gz')) wf.connect(sample2mni, 'output_image', ts2txt, 'timeseries_file') # Save the relevant data into an output directory datasink = Node(interface=DataSink(), name="datasink") datasink.inputs.base_directory = sink_directory datasink.inputs.container = subject_id datasink.inputs.substitutions = [('_target_subject_', '')] datasink.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2') wf.connect(despiker, 'out_file', datasink, 'resting.qa.despike') wf.connect(realign, 'par_file', datasink, 'resting.qa.motion') wf.connect(tsnr, 'tsnr_file', datasink, 'resting.qa.tsnr') wf.connect(tsnr, 'mean_file', datasink, 'resting.qa.tsnr.@mean') wf.connect(tsnr, 'stddev_file', datasink, 'resting.qa.@tsnr_stddev') if fieldmap_images: wf.connect(fieldmap, 'exf_mask', datasink, 'resting.reference') else: wf.connect(calc_median, 'median_file', datasink, 'resting.reference') wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm') wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity') wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files') wf.connect(mask, 'binary_file', datasink, 'resting.mask') wf.connect(masktransform, 'transformed_file', datasink, 'resting.mask.@transformed_file') wf.connect(register, 'out_reg_file', datasink, 'resting.registration.bbreg') wf.connect(reg, ('composite_transform', pickfirst), datasink, 'resting.registration.ants') wf.connect(register, 'min_cost_file', datasink, 'resting.qa.bbreg.@mincost') wf.connect(smooth, 'smoothed_file', datasink, 'resting.timeseries.fullpass') wf.connect(bandpass, 'out_file', datasink, 'resting.timeseries.bandpassed') wf.connect(sample2mni, 'output_image', datasink, 'resting.timeseries.mni') wf.connect(createfilter1, 'out_files', datasink, 'resting.regress.@regressors') wf.connect(createfilter2, 'out_files', datasink, 'resting.regress.@compcorr') wf.connect(sampleaparc, 'summary_file', datasink, 'resting.parcellations.aparc') wf.connect(sampleaparc, 'avgwf_txt_file', datasink, 'resting.parcellations.aparc.@avgwf') wf.connect(ts2txt, 'out_file', datasink, 'resting.parcellations.grayo.@subcortical') datasink2 = Node(interface=DataSink(), name="datasink2") datasink2.inputs.base_directory = sink_directory datasink2.inputs.container = subject_id datasink2.inputs.substitutions = [('_target_subject_', '')] datasink2.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2') wf.connect(combiner, 'out_file', datasink2, 'resting.parcellations.grayo.@surface') return wf
def create_workflow(files, anat_file, subject_id, TR, num_slices, norm_threshold=1, num_components=5, vol_fwhm=None, lowpass_freq=-1, highpass_freq=-1, sink_directory=os.getcwd(), name='resting'): wf = Workflow(name=name) # Rename files in case they are named identically name_unique = MapNode(Rename(format_string='rest_%(run)02d'), iterfield=['in_file', 'run'], name='rename') name_unique.inputs.keep_ext = True name_unique.inputs.run = range(1, len(files) + 1) name_unique.inputs.in_file = files realign = Node(interface=spm.Realign(), name="realign") realign.inputs.jobtype = 'estwrite' slice_timing = Node(interface=spm.SliceTiming(), name="slice_timing") slice_timing.inputs.num_slices = num_slices slice_timing.inputs.time_repetition = TR slice_timing.inputs.time_acquisition = TR - TR/float(num_slices) slice_timing.inputs.slice_order = range(1, num_slices + 1, 2) + range(2, num_slices + 1, 2) slice_timing.inputs.ref_slice = int(num_slices/2) """Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid body registration of the functional data to the structural data. """ coregister = Node(interface=spm.Coregister(), name="coregister") coregister.inputs.jobtype = 'estimate' coregister.inputs.target = anat_file """Use :class:`nipype.algorithms.rapidart` to determine which of the images in the functional series are outliers based on deviations in intensity or movement. """ art = Node(interface=ArtifactDetect(), name="art") art.inputs.use_differences = [True, False] art.inputs.use_norm = True art.inputs.norm_threshold = norm_threshold art.inputs.zintensity_threshold = 3 art.inputs.mask_type = 'spm_global' art.inputs.parameter_source = 'SPM' segment = Node(interface=spm.Segment(), name="segment") segment.inputs.save_bias_corrected = True segment.inputs.data = anat_file """Uncomment the following line for faster execution """ #segment.inputs.gaussians_per_class = [1, 1, 1, 4] """Warp functional and structural data to SPM's T1 template using :class:`nipype.interfaces.spm.Normalize`. The tutorial data set includes the template image, T1.nii. """ normalize_func = Node(interface=spm.Normalize(), name = "normalize_func") normalize_func.inputs.jobtype = "write" normalize_func.inputs.write_voxel_sizes =[2., 2., 2.] """Smooth the functional data using :class:`nipype.interfaces.spm.Smooth`. """ smooth = Node(interface=spm.Smooth(), name = "smooth") smooth.inputs.fwhm = vol_fwhm """Here we are connecting all the nodes together. Notice that we add the merge node only if you choose to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal voxel sizes. """ wf.connect([(name_unique, realign, [('out_file', 'in_files')]), (realign, coregister, [('mean_image', 'source')]), (segment, normalize_func, [('transformation_mat', 'parameter_file')]), (realign, slice_timing, [('realigned_files', 'in_files')]), (slice_timing, normalize_func, [('timecorrected_files', 'apply_to_files')]), (normalize_func, smooth, [('normalized_files', 'in_files')]), (realign, art, [('realignment_parameters', 'realignment_parameters')]), (smooth, art, [('smoothed_files', 'realigned_files')]), ]) def selectN(files, N=1): from nipype.utils.filemanip import filename_to_list, list_to_filename return list_to_filename(filename_to_list(files)[:N]) mask = Node(fsl.BET(), name='getmask') mask.inputs.mask = True wf.connect(normalize_func, ('normalized_files', selectN, 1), mask, 'in_file') # get segmentation in normalized functional space segment.inputs.wm_output_type = [False, False, True] segment.inputs.csf_output_type = [False, False, True] segment.inputs.gm_output_type = [False, False, True] def merge_files(in1, in2): out_files = filename_to_list(in1) out_files.extend(filename_to_list(in2)) return out_files merge = Node(Merge(3), name='merge') wf.connect(segment, 'native_wm_image', merge, 'in1') wf.connect(segment, 'native_csf_image', merge, 'in2') wf.connect(segment, 'native_gm_image', merge, 'in3') normalize_segs = Node(interface=spm.Normalize(), name = "normalize_segs") normalize_segs.inputs.jobtype = "write" normalize_segs.inputs.write_voxel_sizes = [2., 2., 2.] wf.connect(merge, 'out', normalize_segs, 'apply_to_files') wf.connect(segment, 'transformation_mat', normalize_segs, 'parameter_file') # binarize and erode bin_and_erode = MapNode(fsl.ImageMaths(), iterfield=['in_file'], name='bin_and_erode') bin_and_erode.inputs.op_string = '-thr 0.99 -bin -ero' wf.connect(normalize_segs, 'normalized_files', bin_and_erode, 'in_file') # filter some noise # Compute motion regressors motreg = Node(Function(input_names=['motion_params', 'order', 'derivatives'], output_names=['out_files'], function=motion_regressors, imports=imports), name='getmotionregress') wf.connect(realign, 'realignment_parameters', motreg, 'motion_params') # Create a filter to remove motion and art confounds createfilter1 = Node(Function(input_names=['motion_params', 'comp_norm', 'outliers', 'detrend_poly'], output_names=['out_files'], function=build_filter1, imports=imports), name='makemotionbasedfilter') createfilter1.inputs.detrend_poly = 2 wf.connect(motreg, 'out_files', createfilter1, 'motion_params') wf.connect(art, 'norm_files', createfilter1, 'comp_norm') wf.connect(art, 'outlier_files', createfilter1, 'outliers') # Filter the motion and art confounds and detrend filter1 = MapNode(fsl.GLM(out_f_name='F_mcart.nii', out_pf_name='pF_mcart.nii', demean=True), iterfield=['in_file', 'design', 'out_res_name'], name='filtermotion') wf.connect(normalize_func, 'normalized_files', filter1, 'in_file') wf.connect(normalize_func, ('normalized_files', rename, '_filtermotart'), filter1, 'out_res_name') wf.connect(createfilter1, 'out_files', filter1, 'design') #wf.connect(masktransform, 'transformed_file', filter1, 'mask') # Create a filter to remove noise components based on white matter and CSF createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file', 'num_components', 'extra_regressors'], output_names=['out_files'], function=extract_noise_components, imports=imports), iterfield=['realigned_file', 'extra_regressors'], name='makecompcorrfilter') createfilter2.inputs.num_components = num_components wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors') wf.connect(filter1, 'out_res', createfilter2, 'realigned_file') wf.connect(bin_and_erode, ('out_file', selectN, 2), createfilter2, 'mask_file') # Filter noise components from unsmoothed data filter2 = MapNode(fsl.GLM(out_f_name='F.nii', out_pf_name='pF.nii', demean=True), iterfield=['in_file', 'design', 'out_res_name'], name='filter_noise_nosmooth') wf.connect(normalize_func, 'normalized_files', filter2, 'in_file') wf.connect(normalize_func, ('normalized_files', rename, '_unsmooth_cleaned'), filter2, 'out_res_name') wf.connect(createfilter2, 'out_files', filter2, 'design') wf.connect(mask, 'mask_file', filter2, 'mask') # Filter noise components from smoothed data filter3 = MapNode(fsl.GLM(out_f_name='F.nii', out_pf_name='pF.nii', demean=True), iterfield=['in_file', 'design', 'out_res_name'], name='filter_noise_smooth') wf.connect(smooth, ('smoothed_files', rename, '_cleaned'), filter3, 'out_res_name') wf.connect(smooth, 'smoothed_files', filter3, 'in_file') wf.connect(createfilter2, 'out_files', filter3, 'design') wf.connect(mask, 'mask_file', filter3, 'mask') # Bandpass filter the data bandpass1 = Node(Function(input_names=['files', 'lowpass_freq', 'highpass_freq', 'fs'], output_names=['out_files'], function=bandpass_filter, imports=imports), name='bandpass_unsmooth') bandpass1.inputs.fs = 1./TR bandpass1.inputs.highpass_freq = highpass_freq bandpass1.inputs.lowpass_freq = lowpass_freq wf.connect(filter2, 'out_res', bandpass1, 'files') bandpass2 = bandpass1.clone(name='bandpass_smooth') wf.connect(filter3, 'out_res', bandpass2, 'files') bandpass = Node(Function(input_names=['in1', 'in2'], output_names=['out_file'], function=merge_files, imports=imports), name='bandpass_merge') wf.connect(bandpass1, 'out_files', bandpass, 'in1') wf.connect(bandpass2, 'out_files', bandpass, 'in2') # Save the relevant data into an output directory datasink = Node(interface=DataSink(), name="datasink") datasink.inputs.base_directory = sink_directory datasink.inputs.container = subject_id #datasink.inputs.substitutions = [('_target_subject_', '')] #datasink.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2') wf.connect(realign, 'realignment_parameters', datasink, 'resting.qa.motion') wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm') wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity') wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files') wf.connect(smooth, 'smoothed_files', datasink, 'resting.timeseries.fullpass') wf.connect(bin_and_erode, 'out_file', datasink, 'resting.mask_files') wf.connect(mask, 'mask_file', datasink, 'resting.mask_files.@brainmask') wf.connect(filter1, 'out_f', datasink, 'resting.qa.compmaps.@mc_F') wf.connect(filter1, 'out_pf', datasink, 'resting.qa.compmaps.@mc_pF') wf.connect(filter2, 'out_f', datasink, 'resting.qa.compmaps') wf.connect(filter2, 'out_pf', datasink, 'resting.qa.compmaps.@p') wf.connect(filter3, 'out_f', datasink, 'resting.qa.compmaps.@sF') wf.connect(filter3, 'out_pf', datasink, 'resting.qa.compmaps.@sp') wf.connect(bandpass, 'out_file', datasink, 'resting.timeseries.bandpassed') wf.connect(createfilter1, 'out_files', datasink, 'resting.regress.@regressors') wf.connect(createfilter2, 'out_files', datasink, 'resting.regress.@compcorr') return wf
def create_workflow(files, subject_id, n_vol=0, despike=True, TR=None, slice_times=None, slice_thickness=None, fieldmap_images=[], norm_threshold=1, num_components=6, vol_fwhm=None, surf_fwhm=None, lowpass_freq=-1, highpass_freq=-1, sink_directory=os.getcwd(), FM_TEdiff=2.46, FM_sigma=2, FM_echo_spacing=.7, target_subject=['fsaverage3', 'fsaverage4'], name='resting'): wf = Workflow(name=name) # Skip starting volumes remove_vol = MapNode(fsl.ExtractROI(t_min=n_vol, t_size=-1), iterfield=['in_file'], name="remove_volumes") remove_vol.inputs.in_file = files # Run AFNI's despike. This is always run, however, whether this is fed to # realign depends on the input configuration despiker = MapNode(afni.Despike(outputtype='NIFTI_GZ'), iterfield=['in_file'], name='despike') #despiker.plugin_args = {'qsub_args': '-l nodes=1:ppn='} wf.connect(remove_vol, 'roi_file', despiker, 'in_file') # Run Nipy joint slice timing and realignment algorithm realign = Node(nipy.SpaceTimeRealigner(), name='realign') realign.inputs.tr = TR realign.inputs.slice_times = slice_times realign.inputs.slice_info = 2 if despike: wf.connect(despiker, 'out_file', realign, 'in_file') else: wf.connect(remove_vol, 'roi_file', realign, 'in_file') # Comute TSNR on realigned data regressing polynomials upto order 2 tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr') wf.connect(realign, 'out_file', tsnr, 'in_file') # Compute the median image across runs calc_median = Node(Function(input_names=['in_files'], output_names=['median_file'], function=median, imports=imports), name='median') wf.connect(tsnr, 'detrended_file', calc_median, 'in_files') # Coregister the median to the surface register = Node(freesurfer.BBRegister(), name='bbregister') register.inputs.subject_id = subject_id register.inputs.init = 'fsl' register.inputs.contrast_type = 't2' register.inputs.out_fsl_file = True register.inputs.epi_mask = True # Compute fieldmaps and unwarp using them if fieldmap_images: fieldmap = Node(interface=EPIDeWarp(), name='fieldmap_unwarp') fieldmap.inputs.tediff = FM_TEdiff fieldmap.inputs.esp = FM_echo_spacing fieldmap.inputs.sigma = FM_sigma fieldmap.inputs.mag_file = fieldmap_images[0] fieldmap.inputs.dph_file = fieldmap_images[1] wf.connect(calc_median, 'median_file', fieldmap, 'exf_file') dewarper = MapNode(interface=fsl.FUGUE(), iterfield=['in_file'], name='dewarper') wf.connect(tsnr, 'detrended_file', dewarper, 'in_file') wf.connect(fieldmap, 'exf_mask', dewarper, 'mask_file') wf.connect(fieldmap, 'vsm_file', dewarper, 'shift_in_file') wf.connect(fieldmap, 'exfdw', register, 'source_file') else: wf.connect(calc_median, 'median_file', register, 'source_file') # Get the subject's freesurfer source directory fssource = Node(FreeSurferSource(), name='fssource') fssource.inputs.subject_id = subject_id fssource.inputs.subjects_dir = os.environ['SUBJECTS_DIR'] # Extract wm+csf, brain masks by eroding freesurfer lables and then # transform the masks into the space of the median wmcsf = Node(freesurfer.Binarize(), name='wmcsfmask') mask = wmcsf.clone('anatmask') wmcsftransform = Node(freesurfer.ApplyVolTransform(inverse=True, interp='nearest'), name='wmcsftransform') wmcsftransform.inputs.subjects_dir = os.environ['SUBJECTS_DIR'] wmcsf.inputs.wm_ven_csf = True wmcsf.inputs.match = [4, 5, 14, 15, 24, 31, 43, 44, 63] wmcsf.inputs.binary_file = 'wmcsf.nii.gz' wmcsf.inputs.erode = int(np.ceil(slice_thickness)) wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), wmcsf, 'in_file') if fieldmap_images: wf.connect(fieldmap, 'exf_mask', wmcsftransform, 'source_file') else: wf.connect(calc_median, 'median_file', wmcsftransform, 'source_file') wf.connect(register, 'out_reg_file', wmcsftransform, 'reg_file') wf.connect(wmcsf, 'binary_file', wmcsftransform, 'target_file') mask.inputs.binary_file = 'mask.nii.gz' mask.inputs.dilate = int(np.ceil(slice_thickness)) + 1 mask.inputs.erode = int(np.ceil(slice_thickness)) mask.inputs.min = 0.5 wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), mask, 'in_file') masktransform = wmcsftransform.clone("masktransform") if fieldmap_images: wf.connect(fieldmap, 'exf_mask', masktransform, 'source_file') else: wf.connect(calc_median, 'median_file', masktransform, 'source_file') wf.connect(register, 'out_reg_file', masktransform, 'reg_file') wf.connect(mask, 'binary_file', masktransform, 'target_file') # Compute Art outliers art = Node(interface=ArtifactDetect(use_differences=[True, False], use_norm=True, norm_threshold=norm_threshold, zintensity_threshold=3, parameter_source='NiPy', bound_by_brainmask=True, save_plot=False, mask_type='file'), name="art") if fieldmap_images: wf.connect(dewarper, 'unwarped_file', art, 'realigned_files') else: wf.connect(tsnr, 'detrended_file', art, 'realigned_files') wf.connect(realign, 'par_file', art, 'realignment_parameters') wf.connect(masktransform, 'transformed_file', art, 'mask_file') # Compute motion regressors motreg = Node(Function(input_names=['motion_params', 'order', 'derivatives'], output_names=['out_files'], function=motion_regressors, imports=imports), name='getmotionregress') wf.connect(realign, 'par_file', motreg, 'motion_params') # Create a filter to remove motion and art confounds createfilter1 = Node(Function(input_names=['motion_params', 'comp_norm', 'outliers'], output_names=['out_files'], function=build_filter1, imports=imports), name='makemotionbasedfilter') wf.connect(motreg, 'out_files', createfilter1, 'motion_params') wf.connect(art, 'norm_files', createfilter1, 'comp_norm') wf.connect(art, 'outlier_files', createfilter1, 'outliers') # Filter the motion and art confounds filter1 = MapNode(fsl.GLM(out_res_name='timeseries.nii.gz', demean=True), iterfield=['in_file', 'design'], name='filtermotion') if fieldmap_images: wf.connect(dewarper, 'unwarped_file', filter1, 'in_file') else: wf.connect(tsnr, 'detrended_file', filter1, 'in_file') wf.connect(createfilter1, 'out_files', filter1, 'design') wf.connect(masktransform, 'transformed_file', filter1, 'mask') # Create a filter to remove noise components based on white matter and CSF createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file', 'num_components'], output_names=['out_files'], function=extract_noise_components, imports=imports), iterfield=['realigned_file'], name='makecompcorrfilter') createfilter2.inputs.num_components = num_components wf.connect(filter1, 'out_res', createfilter2, 'realigned_file') wf.connect(masktransform, 'transformed_file', createfilter2, 'mask_file') # Filter noise components filter2 = MapNode(fsl.GLM(out_res_name='timeseries_cleaned.nii.gz', demean=True), iterfield=['in_file', 'design'], name='filtercompcorr') wf.connect(filter1, 'out_res', filter2, 'in_file') wf.connect(createfilter2, 'out_files', filter2, 'design') wf.connect(masktransform, 'transformed_file', filter2, 'mask') # Smoothing using surface and volume smoothing smooth = MapNode(freesurfer.Smooth(), iterfield=['in_file'], name='smooth') smooth.inputs.proj_frac_avg = (0.1, 0.9, 0.1) if surf_fwhm is None: surf_fwhm = 5 * slice_thickness smooth.inputs.surface_fwhm = surf_fwhm if vol_fwhm is None: vol_fwhm = 2 * slice_thickness smooth.inputs.vol_fwhm = vol_fwhm wf.connect(filter2, 'out_res', smooth, 'in_file') wf.connect(register, 'out_reg_file', smooth, 'reg_file') # Bandpass filter the data bandpass = MapNode(fsl.TemporalFilter(), iterfield=['in_file'], name='bandpassfilter') if highpass_freq < 0: bandpass.inputs.highpass_sigma = -1 else: bandpass.inputs.highpass_sigma = 1. / (2 * TR * highpass_freq) if lowpass_freq < 0: bandpass.inputs.lowpass_sigma = -1 else: bandpass.inputs.lowpass_sigma = 1. / (2 * TR * lowpass_freq) wf.connect(smooth, 'smoothed_file', bandpass, 'in_file') # Convert aparc to subject functional space aparctransform = wmcsftransform.clone("aparctransform") if fieldmap_images: wf.connect(fieldmap, 'exf_mask', aparctransform, 'source_file') else: wf.connect(calc_median, 'median_file', aparctransform, 'source_file') wf.connect(register, 'out_reg_file', aparctransform, 'reg_file') wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), aparctransform, 'target_file') # Sample the average time series in aparc ROIs sampleaparc = MapNode(freesurfer.SegStats(avgwf_txt_file=True, default_color_table=True), iterfield=['in_file'], name='aparc_ts') sampleaparc.inputs.segment_id = ([8] + range(10, 14) + [17, 18, 26, 47] + range(49, 55) + [58] + range(1001, 1036) + range(2001, 2036)) wf.connect(aparctransform, 'transformed_file', sampleaparc, 'segmentation_file') wf.connect(bandpass, 'out_file', sampleaparc, 'in_file') # Sample the time series onto the surface of the target surface. Performs # sampling into left and right hemisphere target = Node(IdentityInterface(fields=['target_subject']), name='target') target.iterables = ('target_subject', filename_to_list(target_subject)) samplerlh = MapNode(freesurfer.SampleToSurface(), iterfield=['source_file'], name='sampler_lh') samplerlh.inputs.sampling_method = "average" samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1) samplerlh.inputs.sampling_units = "frac" samplerlh.inputs.interp_method = "trilinear" #samplerlh.inputs.cortex_mask = True samplerlh.inputs.out_type = 'niigz' samplerlh.inputs.subjects_dir = os.environ['SUBJECTS_DIR'] samplerrh = samplerlh.clone('sampler_rh') samplerlh.inputs.hemi = 'lh' wf.connect(bandpass, 'out_file', samplerlh, 'source_file') wf.connect(register, 'out_reg_file', samplerlh, 'reg_file') wf.connect(target, 'target_subject', samplerlh, 'target_subject') samplerrh.set_input('hemi', 'rh') wf.connect(bandpass, 'out_file', samplerrh, 'source_file') wf.connect(register, 'out_reg_file', samplerrh, 'reg_file') wf.connect(target, 'target_subject', samplerrh, 'target_subject') # Combine left and right hemisphere to text file combiner = MapNode(Function(input_names=['left', 'right'], output_names=['out_file'], function=combine_hemi, imports=imports), iterfield=['left', 'right'], name="combiner") wf.connect(samplerlh, 'out_file', combiner, 'left') wf.connect(samplerrh, 'out_file', combiner, 'right') # Compute registration between the subject's structural and MNI template # This is currently set to perform a very quick registration. However, the # registration can be made significantly more accurate for cortical # structures by increasing the number of iterations # All parameters are set using the example from: # https://github.com/stnava/ANTs/blob/master/Scripts/newAntsExample.sh reg = Node(ants.Registration(), name='antsRegister') reg.inputs.output_transform_prefix = "output_" reg.inputs.transforms = ['Translation', 'Rigid', 'Affine', 'SyN'] reg.inputs.transform_parameters = [(0.1,), (0.1,), (0.1,), (0.2, 3.0, 0.0)] # reg.inputs.number_of_iterations = ([[10000, 111110, 11110]]*3 + # [[100, 50, 30]]) reg.inputs.number_of_iterations = [[100, 100, 100]] * 3 + [[100, 20, 10]] reg.inputs.dimension = 3 reg.inputs.write_composite_transform = True reg.inputs.collapse_output_transforms = False reg.inputs.metric = ['Mattes'] * 3 + [['Mattes', 'CC']] reg.inputs.metric_weight = [1] * 3 + [[0.5, 0.5]] reg.inputs.radius_or_number_of_bins = [32] * 3 + [[32, 4]] reg.inputs.sampling_strategy = ['Regular'] * 3 + [[None, None]] reg.inputs.sampling_percentage = [0.3] * 3 + [[None, None]] reg.inputs.convergence_threshold = [1.e-8] * 3 + [-0.01] reg.inputs.convergence_window_size = [20] * 3 + [5] reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 3 + [[1, 0.5, 0]] reg.inputs.sigma_units = ['vox'] * 4 reg.inputs.shrink_factors = [[6, 4, 2]] + [[3, 2, 1]]*2 + [[4, 2, 1]] reg.inputs.use_estimate_learning_rate_once = [True] * 4 reg.inputs.use_histogram_matching = [False] * 3 + [True] reg.inputs.output_warped_image = 'output_warped_image.nii.gz' reg.inputs.fixed_image = \ os.path.abspath('OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz') reg.inputs.num_threads = 4 reg.plugin_args = {'qsub_args': '-l nodes=1:ppn=4'} # Convert T1.mgz to nifti for using with ANTS convert = Node(freesurfer.MRIConvert(out_type='niigz'), name='convert2nii') wf.connect(fssource, 'T1', convert, 'in_file') # Mask the T1.mgz file with the brain mask computed earlier maskT1 = Node(fsl.BinaryMaths(operation='mul'), name='maskT1') wf.connect(mask, 'binary_file', maskT1, 'operand_file') wf.connect(convert, 'out_file', maskT1, 'in_file') wf.connect(maskT1, 'out_file', reg, 'moving_image') # Convert the BBRegister transformation to ANTS ITK format convert2itk = MapNode(C3dAffineTool(), iterfield=['transform_file', 'source_file'], name='convert2itk') convert2itk.inputs.fsl2ras = True convert2itk.inputs.itk_transform = True wf.connect(register, 'out_fsl_file', convert2itk, 'transform_file') if fieldmap_images: wf.connect(fieldmap, 'exf_mask', convert2itk, 'source_file') else: wf.connect(calc_median, 'median_file', convert2itk, 'source_file') wf.connect(convert, 'out_file', convert2itk, 'reference_file') # Concatenate the affine and ants transforms into a list pickfirst = lambda x: x[0] merge = MapNode(Merge(2), iterfield=['in2'], name='mergexfm') wf.connect(convert2itk, 'itk_transform', merge, 'in2') wf.connect(reg, ('composite_transform', pickfirst), merge, 'in1') # Apply the combined transform to the time series file sample2mni = MapNode(ants.ApplyTransforms(), iterfield=['input_image', 'transforms'], name='sample2mni') sample2mni.inputs.input_image_type = 3 sample2mni.inputs.interpolation = 'BSpline' sample2mni.inputs.invert_transform_flags = [False, False] sample2mni.inputs.reference_image = \ os.path.abspath('OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz') sample2mni.inputs.terminal_output = 'file' wf.connect(bandpass, 'out_file', sample2mni, 'input_image') wf.connect(merge, 'out', sample2mni, 'transforms') # Sample the time series file for each subcortical roi ts2txt = MapNode(Function(input_names=['timeseries_file', 'label_file', 'indices'], output_names=['out_file'], function=extract_subrois, imports=imports), iterfield=['timeseries_file'], name='getsubcortts') ts2txt.inputs.indices = [8] + range(10, 14) + [17, 18, 26, 47] +\ range(49, 55) + [58] ts2txt.inputs.label_file = \ os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_' '2mm.nii.gz')) wf.connect(sample2mni, 'output_image', ts2txt, 'timeseries_file') # Save the relevant data into an output directory datasink = Node(interface=DataSink(), name="datasink") datasink.inputs.base_directory = sink_directory datasink.inputs.container = subject_id datasink.inputs.substitutions = [('_target_subject_', '')] datasink.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2') wf.connect(despiker, 'out_file', datasink, 'resting.qa.despike') wf.connect(realign, 'par_file', datasink, 'resting.qa.motion') wf.connect(tsnr, 'tsnr_file', datasink, 'resting.qa.tsnr') wf.connect(tsnr, 'mean_file', datasink, 'resting.qa.tsnr.@mean') wf.connect(tsnr, 'stddev_file', datasink, 'resting.qa.@tsnr_stddev') if fieldmap_images: wf.connect(fieldmap, 'exf_mask', datasink, 'resting.reference') else: wf.connect(calc_median, 'median_file', datasink, 'resting.reference') wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm') wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity') wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files') wf.connect(mask, 'binary_file', datasink, 'resting.mask') wf.connect(masktransform, 'transformed_file', datasink, 'resting.mask.@transformed_file') wf.connect(register, 'out_reg_file', datasink, 'resting.registration.bbreg') wf.connect(reg, ('composite_transform', pickfirst), datasink, 'resting.registration.ants') wf.connect(register, 'min_cost_file', datasink, 'resting.qa.bbreg.@mincost') wf.connect(smooth, 'smoothed_file', datasink, 'resting.timeseries.fullpass') wf.connect(bandpass, 'out_file', datasink, 'resting.timeseries.bandpassed') wf.connect(sample2mni, 'output_image', datasink, 'resting.timeseries.mni') wf.connect(createfilter1, 'out_files', datasink, 'resting.regress.@regressors') wf.connect(createfilter2, 'out_files', datasink, 'resting.regress.@compcorr') wf.connect(sampleaparc, 'summary_file', datasink, 'resting.parcellations.aparc') wf.connect(sampleaparc, 'avgwf_txt_file', datasink, 'resting.parcellations.aparc.@avgwf') wf.connect(ts2txt, 'out_file', datasink, 'resting.parcellations.grayo.@subcortical') datasink2 = Node(interface=DataSink(), name="datasink2") datasink2.inputs.base_directory = sink_directory datasink2.inputs.container = subject_id datasink2.inputs.substitutions = [('_target_subject_', '')] datasink2.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2') wf.connect(combiner, 'out_file', datasink2, 'resting.parcellations.grayo.@surface') return wf
bbregNode.inputs.out_fsl_file = True bbregNode.inputs.args = "--tol1d 1e-3" #bbregNode.inputs.subject_id = reconallFolderName # ### Surface2Vol # Transform Left Hemisphere surf2volNode_lh = Node(freesurfer.utils.Surface2VolTransform(), name = 'surf2vol_lh') surf2volNode_lh.inputs.hemi = 'lh' surf2volNode_lh.inputs.mkmask = True #surf2volNode_lh.inputs.subject_id = reconallFolderName surf2volNode_lh.inputs.vertexvol_file = 'test' # Transform right hemisphere surf2volNode_rh = surf2volNode_lh.clone('surf2vol_rh') surf2volNode_rh.inputs.hemi = 'rh' # Merge the hemispheres mergeHemisNode = Node(fsl.BinaryMaths(), name = 'mergeHemis') mergeHemisNode.inputs.operation = 'add' mergeHemisNode.inputs.output_type = 'NIFTI_GZ' # ### Registration # Rotate high-res (1mm) WM-border to match dwi data w/o resampling applyReg_anat2diff_1mm = Node(freesurfer.ApplyVolTransform(), name = 'wmoutline2diff_1mm') applyReg_anat2diff_1mm.inputs.inverse = True applyReg_anat2diff_1mm.inputs.interp = 'nearest' applyReg_anat2diff_1mm.inputs.no_resample = True
bbregNode.inputs.subject_id = reconallFolderName # ### Surface2Vol # In[ ]: # Transform Left Hemisphere lhWhiteFilename = 'lh_white.nii.gz' surf2volNode_lh = Node(freesurfer.utils.Surface2VolTransform(), name='surf2vol_lh') surf2volNode_lh.inputs.hemi = 'lh' surf2volNode_lh.inputs.mkmask = True surf2volNode_lh.inputs.subject_id = reconallFolderName # Transform right hemisphere surf2volNode_rh = surf2volNode_lh.clone('surf2vol_rh') surf2volNode_rh.inputs.hemi = 'rh' # Merge the hemispheres mergeHemisNode = Node(fsl.BinaryMaths(), name='mergeHemis') mergeHemisNode.inputs.operation = 'add' mergeHemisNode.inputs.output_type = 'NIFTI_GZ' # ### Registration # In[ ]: # Rotate high-res (1mm) WM-border to match dwi data w/o resampling applyReg_anat2diff_1mm = Node(freesurfer.ApplyVolTransform(), name='wmoutline2diff_1mm') applyReg_anat2diff_1mm.inputs.inverse = True
def group_onesample_openfmri(dataset_dir, model_id=None, task_id=None, l1output_dir=None, out_dir=None, no_reversal=False): wk = Workflow(name='one_sample') wk.base_dir = os.path.abspath(work_dir) info = Node( util.IdentityInterface(fields=['model_id', 'task_id', 'dataset_dir']), name='infosource') info.inputs.model_id = model_id info.inputs.task_id = task_id info.inputs.dataset_dir = dataset_dir num_copes = contrasts_num(model_id, task_id, dataset_dir) dg = Node(DataGrabber(infields=['model_id', 'task_id', 'cope_id'], outfields=['copes', 'varcopes']), name='grabber') dg.inputs.template = os.path.join( l1output_dir, 'model%03d/task%03d/*/%scopes/mni/%scope%02d.nii.gz') dg.inputs.template_args['copes'] = [[ 'model_id', 'task_id', '', '', 'cope_id' ]] dg.inputs.template_args['varcopes'] = [[ 'model_id', 'task_id', 'var', 'var', 'cope_id' ]] dg.iterables = ('cope_id', num_copes) dg.inputs.sort_filelist = True wk.connect(info, 'model_id', dg, 'model_id') wk.connect(info, 'task_id', dg, 'task_id') model = Node(L2Model(), name='l2model') wk.connect(dg, ('copes', get_len), model, 'num_copes') mergecopes = Node(Merge(dimension='t'), name='merge_copes') wk.connect(dg, 'copes', mergecopes, 'in_files') mergevarcopes = Node(Merge(dimension='t'), name='merge_varcopes') wk.connect(dg, 'varcopes', mergevarcopes, 'in_files') mask_file = fsl.Info.standard_image('MNI152_T1_2mm_brain_mask.nii.gz') flame = Node(FLAMEO(), name='flameo') flame.inputs.mask_file = mask_file flame.inputs.run_mode = 'flame1' wk.connect(model, 'design_mat', flame, 'design_file') wk.connect(model, 'design_con', flame, 't_con_file') wk.connect(mergecopes, 'merged_file', flame, 'cope_file') wk.connect(mergevarcopes, 'merged_file', flame, 'var_cope_file') wk.connect(model, 'design_grp', flame, 'cov_split_file') smoothest = Node(SmoothEstimate(), name='smooth_estimate') wk.connect(flame, 'zstats', smoothest, 'zstat_file') smoothest.inputs.mask_file = mask_file cluster = Node(Cluster(), name='cluster') wk.connect(smoothest, 'dlh', cluster, 'dlh') wk.connect(smoothest, 'volume', cluster, 'volume') cluster.inputs.connectivity = 26 cluster.inputs.threshold = 2.3 cluster.inputs.pthreshold = 0.05 cluster.inputs.out_threshold_file = True cluster.inputs.out_index_file = True cluster.inputs.out_localmax_txt_file = True wk.connect(flame, 'zstats', cluster, 'in_file') ztopval = Node(ImageMaths(op_string='-ztop', suffix='_pval'), name='z2pval') wk.connect(flame, 'zstats', ztopval, 'in_file') sinker = Node(DataSink(), name='sinker') sinker.inputs.base_directory = os.path.abspath(out_dir) sinker.inputs.substitutions = [('_cope_id', 'contrast'), ('_maths__', '_reversed_')] wk.connect(flame, 'zstats', sinker, 'stats') wk.connect(cluster, 'threshold_file', sinker, 'stats.@thr') wk.connect(cluster, 'index_file', sinker, 'stats.@index') wk.connect(cluster, 'localmax_txt_file', sinker, 'stats.@localmax') if no_reversal == False: zstats_reverse = Node(BinaryMaths(), name='zstats_reverse') zstats_reverse.inputs.operation = 'mul' zstats_reverse.inputs.operand_value = -1 wk.connect(flame, 'zstats', zstats_reverse, 'in_file') cluster2 = cluster.clone(name='cluster2') wk.connect(smoothest, 'dlh', cluster2, 'dlh') wk.connect(smoothest, 'volume', cluster2, 'volume') wk.connect(zstats_reverse, 'out_file', cluster2, 'in_file') ztopval2 = ztopval.clone(name='ztopval2') wk.connect(zstats_reverse, 'out_file', ztopval2, 'in_file') wk.connect(zstats_reverse, 'out_file', sinker, 'stats.@neg') wk.connect(cluster2, 'threshold_file', sinker, 'stats.@neg_thr') wk.connect(cluster2, 'index_file', sinker, 'stats.@neg_index') wk.connect(cluster2, 'localmax_txt_file', sinker, 'stats.@neg_localmax') return wk
bbregNode.inputs.subject_id = reconallFolderName # ### Surface2Vol # In[ ]: # Transform Left Hemisphere lhWhiteFilename = "lh_white.nii.gz" surf2volNode_lh = Node(freesurfer.utils.Surface2VolTransform(), name="surf2vol_lh") surf2volNode_lh.inputs.hemi = "lh" surf2volNode_lh.inputs.mkmask = True surf2volNode_lh.inputs.subject_id = reconallFolderName # Transform right hemisphere surf2volNode_rh = surf2volNode_lh.clone("surf2vol_rh") surf2volNode_rh.inputs.hemi = "rh" # Merge the hemispheres mergeHemisNode = Node(fsl.BinaryMaths(), name="mergeHemis") mergeHemisNode.inputs.operation = "add" mergeHemisNode.inputs.output_type = "NIFTI_GZ" # ### Registration # In[ ]: # Rotate high-res (1mm) WM-border to match dwi data w/o resampling applyReg_anat2diff_1mm = Node(freesurfer.ApplyVolTransform(), name="wmoutline2diff_1mm") applyReg_anat2diff_1mm.inputs.inverse = True
# reconallNode.plugin_args = {'overwrite': True, 'oarsub_args': '-l nodes=1,walltime=16:00:00'} # Convert the T1 mgz image to nifti format for later usage # mriConverter = Node(freesurfer.preprocess.MRIConvert(), name = 'convertAparcAseg') # mriConverter.inputs.out_type = 'niigz' # mriConverter.inputs.out_orientation = 'RAS' mriConverter = Node(Function(input_names=['in_file', 'out_file'], output_names=['out_file'], function=mri_convert_bm), name='convertAparcAseg') # Convert the Brainmask file # brainmaskConv = Node(freesurfer.preprocess.MRIConvert(), name = 'convertBrainmask') # brainmaskConv.inputs.out_type = 'niigz' # brainmaskConv.inputs.out_orientation = 'RAS' brainmaskConv = mriConverter.clone('convertBrainmask') # ### Diffusion Data (dwMRI) preprocessing # First extract the diffusion vectors and the pulse intensity (bvec and bval) # Use dcm2nii for this task dcm2niiNode = Node(Dcm2nii(), name='dcm2niiAndBvecs') dcm2niiNode.inputs.gzip_output = True dcm2niiNode.inputs.date_in_filename = False dcm2niiNode.inputs.events_in_filename = False # Extract the first image of the DTI series i.e. the b0 image extrctB0Node = Node(Function(input_names=['dwMriFile'], output_names=['b0'], function=extractB0), name='Extract_b0')
# reconallNode.plugin_args = {'overwrite': True, 'oarsub_args': '-l nodes=1,walltime=16:00:00'} # Convert the T1 mgz image to nifti format for later usage # mriConverter = Node(freesurfer.preprocess.MRIConvert(), name = 'convertAparcAseg') # mriConverter.inputs.out_type = 'niigz' # mriConverter.inputs.out_orientation = 'RAS' mriConverter = Node(Function(input_names = ['in_file', 'out_file'], output_names = ['out_file'], function = mri_convert_bm), name = 'convertAparcAseg') # Convert the Brainmask file # brainmaskConv = Node(freesurfer.preprocess.MRIConvert(), name = 'convertBrainmask') # brainmaskConv.inputs.out_type = 'niigz' # brainmaskConv.inputs.out_orientation = 'RAS' brainmaskConv = mriConverter.clone('convertBrainmask') # ### Diffusion Data (dwMRI) preprocessing # First extract the diffusion vectors and the pulse intensity (bvec and bval) # Use dcm2nii for this task dcm2niiNode = Node(Dcm2nii(), name = 'dcm2niiAndBvecs') dcm2niiNode.inputs.gzip_output = True dcm2niiNode.inputs.date_in_filename = False dcm2niiNode.inputs.events_in_filename = False # Extract the first image of the DTI series i.e. the b0 image extrctB0Node = Node(Function(input_names = ['dwMriFile'], output_names = ['b0'], function = extractB0), name = 'Extract_b0')
dwi2tensorNode = Node(mrtrix.DWI2Tensor(), name='dwi_2_tensor') # Fractional anisotropy (FA) map tensor2faNode = Node(mrtrix.Tensor2FractionalAnisotropy(), name='tensor_2_FA') # Remove noisy background by multiplying the FA Image with the binary brainmask mrmultNode = Node(Function(input_names=['in1', 'in2', 'out_file'], output_names=['out_file'], function=multiplyMRTrix), name='mrmult') # Eigenvector (EV) map tensor2vectorNode = Node(mrtrix.Tensor2Vector(), name='tensor_2_vector') # Scale the EV map by the FA Image scaleEvNode = mrmultNode.clone('scale_ev') # Mask of single-fibre voxels erodeNode = Node(mrtrix.Erode(), name='erode_wmmask') erodeNode.inputs.number_of_passes = number_of_passes cleanFaNode = mrmultNode.clone('multiplyFA_Mask') thresholdFANode = Node(mrtrix.Threshold(), name='threshold_FA') thresholdFANode.inputs.absolute_threshold_value = absolute_threshold_value # Response function coefficient estResponseNode = Node(mrtrix.EstimateResponseForSH(), name='estimate_deconv_response') # CSD computation