def specify_model(layout, func_files, info): specify_model_results = [] for s in range(len(SUBJECTS)): specify_model_results.append([]) for r in range(num_runs): if s in EXCLUDING and EXCLUDING[s] == r: continue func_file = func_files[s][r] if num_runs == 1: filename = 'sub-%s_task-%s_bold_space-T1w_preproc.nii.gz' % ( func_file.subject, task) else: filename = 'sub-%s_task-%s_run-%s_bold_space-T1w_preproc.nii.gz' % ( func_file.subject, task, str(func_file.run).zfill(2)) spec = model.SpecifyModel() spec.inputs.input_units = 'secs' spec.inputs.functional_runs = [ os.path.join(PREPROC_DIR, 'sub-%s' % func_file.subject, 'func', filename) ] spec.inputs.time_repetition = layout.get_metadata( func_files[s][r].path)['RepetitionTime'] spec.inputs.high_pass_filter_cutoff = 128. spec.inputs.subject_info = info[s][r] specify_model_results[s].append(spec.run()) return specify_model_results
def model_fitting(source_img, prepped_img, subject_info, task): taskdir = os.path.join(outputdir, task) if not os.path.exists(taskdir): os.mkdir(taskdir) # skull strip the preprocessed BOLD bet = fsl.BET() bet.inputs.in_file = prepped_img bet.inputs.frac = 0.7 bet.inputs.functional = True bet.inputs.out_file = os.path.join(taskdir, task + "_input_functional_bet.nii.gz") bet_res = bet.run() bettedinput = bet_res.outputs.out_file task_vs_baseline = [task + " vs baseline", 'T', [task], [1]] # set up contrasts contrasts = [task_vs_baseline] modelfit = pe.Workflow(name='modelfit', base_dir=taskdir) # generate the model fitting workflow modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") # generate design info level1design = pe.Node(interface=fsl.Level1Design(), name="level1design") # generate fsf file modelgen = pe.MapNode( # generate .mat file interface=fsl.FEATModel(), name='modelgen', iterfield=['fsf_file', 'ev_files']) feat = pe.Node( # feat statistics interface=fsl.FEAT(), name='feat', iterfield=['fsf_file']) # put it all together modelfit.connect([ (modelspec, level1design, [('session_info', 'session_info')]), (level1design, modelgen, [('fsf_files', 'fsf_file'), ('ev_files', 'ev_files')]), (level1design, feat, [('fsf_files', 'fsf_file')])]) # define inputs to workflow modelspec.inputs.input_units = 'secs' modelspec.inputs.functional_runs = bettedinput modelspec.inputs.time_repetition = source_img.entities['RepetitionTime'] modelspec.inputs.high_pass_filter_cutoff = 90 modelspec.inputs.subject_info = subject_info level1design.inputs.interscan_interval = source_img.entities['RepetitionTime'] level1design.inputs.bases = {'gamma': {'gammasigma': 3, 'gammadelay': 6, 'derivs': True}} level1design.inputs.contrasts = contrasts level1design.inputs.model_serial_correlations = True # Run the model-fitting pipeline. Main outputs are a feat directory (w/ functional img) and a design.mat file res = modelfit.run() # outputs feat_dir = list(res.nodes)[3].result.outputs.feat_dir thresh_img = feat_dir + "/thresh_zstat1.nii.gz" return thresh_img
def create_design(tsv_file, functional_run): import os import numpy as np import nipype.interfaces.fsl as fsl from nipype.algorithms import modelgen from nipype.interfaces.base import Bunch # print(os.getcwd()) # os.chdir('/Users/amr/Documents/events_csv_create/subj_001_session_002') # tsv_file = '/Users/amr/Documents/events_csv_create/subj_001_session_002/sub-001_task-MGT_run-02_events.tsv' data = np.genfromtxt(fname=tsv_file, delimiter="\t", skip_header=1, filling_values=1) gain = data[(data[:, 2] - data[:, 3]) > 0][:, (0, 1, 5)] np.savetxt('gain.txt', gain, delimiter='\t', fmt='%f') loss = data[(data[:, 2] - data[:, 3]) < 0][:, (0, 1, 5)] np.savetxt('loss.txt', loss, delimiter='\t', fmt='%f') print(gain.shape, loss.shape) #name of the contrasts, names of the event files cont1 = ('gain activation', 'T', ['gain', 'loss'], [1, 0]) cont2 = ('loss activation', 'T', ['gain', 'loss'], [0, 1]) cont3 = ('Task', 'F', [cont1, cont2]) contrasts = [cont1, cont2, cont3] gain = 'gain.txt' loss = 'loss.txt' specify_model = modelgen.SpecifyModel() specify_model.inputs.input_units = 'secs' specify_model.inputs.functional_runs = [functional_run] specify_model.inputs.time_repetition = 1 #TR specify_model.inputs.high_pass_filter_cutoff = 90 #hpf in secs specify_model.inputs.event_files = [gain, loss] specify_model = specify_model.run() session_info = specify_model.outputs.session_info #==================================================================================================================== level1design = fsl.model.Level1Design() level1design.inputs.interscan_interval = 1 #TR level1design.inputs.bases = {'dgamma': {'derivs': True}} level1design.inputs.contrasts = contrasts level1design.inputs.session_info = session_info level1design.inputs.model_serial_correlations = False level1design.run() #==================================================================================================================== model = fsl.model.FEATModel() model.inputs.fsf_file = 'run0.fsf' model.inputs.ev_files = [gain, loss] model.run() design_file = os.path.abspath('run0.mat') tcon_file = os.path.abspath('run0.con') fcon_file = os.path.abspath('run0.fts') return design_file, tcon_file, fcon_file
skip.inputs.t_size = -1 # %% susan = pe.Node(interface=fsl.SUSAN(), name = 'susan') #create_susan_smooth() susan.inputs.fwhm = fwhm susan.inputs.brightness_threshold = 1000.0 # %% modelfit = pe.Workflow(name='modelfit', base_dir= output_dir) """ Use :class:`nipype.algorithms.modelgen.SpecifyModel` to generate design information. """ modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") modelspec.inputs.input_units = 'secs' modelspec.inputs.time_repetition = tr modelspec.inputs.high_pass_filter_cutoff= 120 """ Use :class:`nipype.interfaces.fsl.Level1Design` to generate a run specific fsf file for analysis """ ## Building contrasts level1design = pe.Node(interface=fsl.Level1Design(), name="level1design") cont1 = ['Trauma1_0>Sad1_0', 'T', ['trauma1_0', 'sad1_0'], [1, -1]] cont2 = ['Trauma1_0>Relax1_0', 'T', ['trauma1_0', 'relax1_0'], [1, -1]] cont3 = ['Sad1_0>Relax1_0', 'T', ['sad1_0', 'relax1_0'], [1, -1]]
cont01 = ['activation', 'T', conditions, [1]] cont02 = ['deactivation', 'T', conditions, [-1]] contrast_list = [cont01, cont02] ########### # # SETTING UP THE FIRST LEVEL ANALYSIS NODES # ########### # model specification modelspec = Node(modelgen.SpecifyModel(subject_info=subject_info, input_units='secs', time_repetition=TR, high_pass_filter_cutoff=100), name="modelspec") # first-level design level1design = Node(fsl.Level1Design(bases={'dgamma':{'derivs': True}}, interscan_interval=TR, model_serial_correlations=True, contrasts=contrast_list), name="level1design") # creating all the other files necessary to run the model modelgen = Node(fsl.FEATModel(), name='modelgen') # then running through FEAT
def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, task_id=None, output_dir=None, subj_prefix='*', hpcutoff=120., use_derivatives=True, fwhm=6.0, subjects_dir=None, target=None): """Analyzes an open fmri dataset Parameters ---------- data_dir : str Path to the base data directory work_dir : str Nipype working directory (defaults to cwd) """ """ Load nipype workflows """ preproc = create_featreg_preproc(whichvol='first') modelfit = create_modelfit_workflow() fixed_fx = create_fixed_effects_flow() if subjects_dir: registration = create_fs_reg_workflow() else: registration = create_reg_workflow() """ Remove the plotting connection so that plot iterables don't propagate to the model stage """ preproc.disconnect(preproc.get_node('plot_motion'), 'out_file', preproc.get_node('outputspec'), 'motion_plots') """ Set up openfmri data specific components """ subjects = sorted([path.split(os.path.sep)[-1] for path in glob(os.path.join(data_dir, subj_prefix))]) infosource = pe.Node(niu.IdentityInterface(fields=['subject_id', 'model_id', 'task_id']), name='infosource') if len(subject) == 0: infosource.iterables = [('subject_id', subjects), ('model_id', [model_id]), ('task_id', task_id)] else: infosource.iterables = [('subject_id', [subjects[subjects.index(subj)] for subj in subject]), ('model_id', [model_id]), ('task_id', task_id)] subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir', 'task_id', 'model_id'], output_names=['run_id', 'conds', 'TR'], function=get_subjectinfo), name='subjectinfo') subjinfo.inputs.base_dir = data_dir """ Return data components as anat, bold and behav """ contrast_file = os.path.join(data_dir, 'models', 'model%03d' % model_id, 'task_contrasts.txt') has_contrast = os.path.exists(contrast_file) if has_contrast: datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id', 'task_id', 'model_id'], outfields=['anat', 'bold', 'behav', 'contrasts']), name='datasource') else: datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id', 'task_id', 'model_id'], outfields=['anat', 'bold', 'behav']), name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '*' if has_contrast: datasource.inputs.field_template = {'anat': '%s/anatomy/T1_001.nii.gz', 'bold': '%s/BOLD/task%03d_r*/bold.nii.gz', 'behav': ('%s/model/model%03d/onsets/task%03d_' 'run%03d/cond*.txt'), 'contrasts': ('models/model%03d/' 'task_contrasts.txt')} datasource.inputs.template_args = {'anat': [['subject_id']], 'bold': [['subject_id', 'task_id']], 'behav': [['subject_id', 'model_id', 'task_id', 'run_id']], 'contrasts': [['model_id']]} else: datasource.inputs.field_template = {'anat': '%s/anatomy/T1_001.nii.gz', 'bold': '%s/BOLD/task%03d_r*/bold.nii.gz', 'behav': ('%s/model/model%03d/onsets/task%03d_' 'run%03d/cond*.txt')} datasource.inputs.template_args = {'anat': [['subject_id']], 'bold': [['subject_id', 'task_id']], 'behav': [['subject_id', 'model_id', 'task_id', 'run_id']]} datasource.inputs.sort_filelist = True """ Create meta workflow """ wf = pe.Workflow(name='openfmri') wf.connect(infosource, 'subject_id', subjinfo, 'subject_id') wf.connect(infosource, 'model_id', subjinfo, 'model_id') wf.connect(infosource, 'task_id', subjinfo, 'task_id') wf.connect(infosource, 'subject_id', datasource, 'subject_id') wf.connect(infosource, 'model_id', datasource, 'model_id') wf.connect(infosource, 'task_id', datasource, 'task_id') wf.connect(subjinfo, 'run_id', datasource, 'run_id') wf.connect([(datasource, preproc, [('bold', 'inputspec.func')]), ]) def get_highpass(TR, hpcutoff): return hpcutoff / (2 * TR) gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'], output_names=['highpass'], function=get_highpass), name='gethighpass') wf.connect(subjinfo, 'TR', gethighpass, 'TR') wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass') """ Setup a basic set of contrasts, a t-test per condition """ def get_contrasts(contrast_file, task_id, conds): import numpy as np import os contrast_def = [] if os.path.exists(contrast_file): with open(contrast_file, 'rt') as fp: contrast_def.extend([np.array(row.split()) for row in fp.readlines() if row.strip()]) contrasts = [] for row in contrast_def: if row[0] != 'task%03d' % task_id: continue con = [row[1], 'T', ['cond%03d' % (i + 1) for i in range(len(conds))], row[2:].astype(float).tolist()] contrasts.append(con) # add auto contrasts for each column for i, cond in enumerate(conds): con = [cond, 'T', ['cond%03d' % (i + 1)], [1]] contrasts.append(con) return contrasts contrastgen = pe.Node(niu.Function(input_names=['contrast_file', 'task_id', 'conds'], output_names=['contrasts'], function=get_contrasts), name='contrastgen') art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False], use_norm=True, norm_threshold=1, zintensity_threshold=3, parameter_source='FSL', mask_type='file'), iterfield=['realigned_files', 'realignment_parameters', 'mask_file'], name="art") modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") modelspec.inputs.input_units = 'secs' def check_behav_list(behav, run_id, conds): from nipype.external import six import numpy as np num_conds = len(conds) if isinstance(behav, six.string_types): behav = [behav] behav_array = np.array(behav).flatten() num_elements = behav_array.shape[0] return behav_array.reshape(num_elements/num_conds, num_conds).tolist() reshape_behav = pe.Node(niu.Function(input_names=['behav', 'run_id', 'conds'], output_names=['behav'], function=check_behav_list), name='reshape_behav') wf.connect(subjinfo, 'TR', modelspec, 'time_repetition') wf.connect(datasource, 'behav', reshape_behav, 'behav') wf.connect(subjinfo, 'run_id', reshape_behav, 'run_id') wf.connect(subjinfo, 'conds', reshape_behav, 'conds') wf.connect(reshape_behav, 'behav', modelspec, 'event_files') wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval') wf.connect(subjinfo, 'conds', contrastgen, 'conds') if has_contrast: wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file') else: contrastgen.inputs.contrast_file = '' wf.connect(infosource, 'task_id', contrastgen, 'task_id') wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts') wf.connect([(preproc, art, [('outputspec.motion_parameters', 'realignment_parameters'), ('outputspec.realigned_files', 'realigned_files'), ('outputspec.mask', 'mask_file')]), (preproc, modelspec, [('outputspec.highpassed_files', 'functional_runs'), ('outputspec.motion_parameters', 'realignment_parameters')]), (art, modelspec, [('outlier_files', 'outlier_files')]), (modelspec, modelfit, [('session_info', 'inputspec.session_info')]), (preproc, modelfit, [('outputspec.highpassed_files', 'inputspec.functional_data')]) ]) # Comute TSNR on realigned data regressing polynomials upto order 2 tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr') wf.connect(preproc, "outputspec.realigned_files", tsnr, "in_file") # Compute the median image across runs calc_median = Node(Function(input_names=['in_files'], output_names=['median_file'], function=median, imports=imports), name='median') wf.connect(tsnr, 'detrended_file', calc_median, 'in_files') """ Reorder the copes so that now it combines across runs """ def sort_copes(copes, varcopes, contrasts): import numpy as np if not isinstance(copes, list): copes = [copes] varcopes = [varcopes] num_copes = len(contrasts) n_runs = len(copes) all_copes = np.array(copes).flatten() all_varcopes = np.array(varcopes).flatten() outcopes = all_copes.reshape(len(all_copes)/num_copes, num_copes).T.tolist() outvarcopes = all_varcopes.reshape(len(all_varcopes)/num_copes, num_copes).T.tolist() return outcopes, outvarcopes, n_runs cope_sorter = pe.Node(niu.Function(input_names=['copes', 'varcopes', 'contrasts'], output_names=['copes', 'varcopes', 'n_runs'], function=sort_copes), name='cope_sorter') pickfirst = lambda x: x[0] wf.connect(contrastgen, 'contrasts', cope_sorter, 'contrasts') wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst), 'flameo.mask_file')]), (modelfit, cope_sorter, [('outputspec.copes', 'copes')]), (modelfit, cope_sorter, [('outputspec.varcopes', 'varcopes')]), (cope_sorter, fixed_fx, [('copes', 'inputspec.copes'), ('varcopes', 'inputspec.varcopes'), ('n_runs', 'l2model.num_copes')]), (modelfit, fixed_fx, [('outputspec.dof_file', 'inputspec.dof_files'), ]) ]) wf.connect(calc_median, 'median_file', registration, 'inputspec.mean_image') if subjects_dir: wf.connect(infosource, 'subject_id', registration, 'inputspec.subject_id') registration.inputs.inputspec.subjects_dir = subjects_dir registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz') if target: registration.inputs.inputspec.target_image = target else: wf.connect(datasource, 'anat', registration, 'inputspec.anatomical_image') registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz') registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz') registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm' def merge_files(copes, varcopes, zstats): out_files = [] splits = [] out_files.extend(copes) splits.append(len(copes)) out_files.extend(varcopes) splits.append(len(varcopes)) out_files.extend(zstats) splits.append(len(zstats)) return out_files, splits mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes', 'zstats'], output_names=['out_files', 'splits'], function=merge_files), name='merge_files') wf.connect([(fixed_fx.get_node('outputspec'), mergefunc, [('copes', 'copes'), ('varcopes', 'varcopes'), ('zstats', 'zstats'), ])]) wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files') def split_files(in_files, splits): copes = in_files[:splits[0]] varcopes = in_files[splits[0]:(splits[0] + splits[1])] zstats = in_files[(splits[0] + splits[1]):] return copes, varcopes, zstats splitfunc = pe.Node(niu.Function(input_names=['in_files', 'splits'], output_names=['copes', 'varcopes', 'zstats'], function=split_files), name='split_files') wf.connect(mergefunc, 'splits', splitfunc, 'splits') wf.connect(registration, 'outputspec.transformed_files', splitfunc, 'in_files') if subjects_dir: get_roi_mean = pe.MapNode(fs.SegStats(default_color_table=True), iterfield=['in_file'], name='get_aparc_means') get_roi_mean.inputs.avgwf_txt_file = True wf.connect(fixed_fx.get_node('outputspec'), 'copes', get_roi_mean, 'in_file') wf.connect(registration, 'outputspec.aparc', get_roi_mean, 'segmentation_file') get_roi_tsnr = pe.MapNode(fs.SegStats(default_color_table=True), iterfield=['in_file'], name='get_aparc_tsnr') get_roi_tsnr.inputs.avgwf_txt_file = True wf.connect(tsnr, 'tsnr_file', get_roi_tsnr, 'in_file') wf.connect(registration, 'outputspec.aparc', get_roi_tsnr, 'segmentation_file') """ Connect to a datasink """ def get_subs(subject_id, conds, run_id, model_id, task_id): subs = [('_subject_id_%s_' % subject_id, '')] subs.append(('_model_id_%d' % model_id, 'model%03d' %model_id)) subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id)) subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp', 'mean')) subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt', 'affine')) for i in range(len(conds)): subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1))) subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1))) subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1))) subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1))) subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1))) subs.append(('_warpall%d/cope1_warp.' % i, 'cope%02d.' % (i + 1))) subs.append(('_warpall%d/varcope1_warp.' % (len(conds) + i), 'varcope%02d.' % (i + 1))) subs.append(('_warpall%d/zstat1_warp.' % (2 * len(conds) + i), 'zstat%02d.' % (i + 1))) subs.append(('_warpall%d/cope1_trans.' % i, 'cope%02d.' % (i + 1))) subs.append(('_warpall%d/varcope1_trans.' % (len(conds) + i), 'varcope%02d.' % (i + 1))) subs.append(('_warpall%d/zstat1_trans.' % (2 * len(conds) + i), 'zstat%02d.' % (i + 1))) subs.append(('__get_aparc_means%d/' % i, '/cope%02d_' % (i + 1))) for i, run_num in enumerate(run_id): subs.append(('__get_aparc_tsnr%d/' % i, '/run%02d_' % run_num)) subs.append(('__art%d/' % i, '/run%02d_' % run_num)) subs.append(('__dilatemask%d/' % i, '/run%02d_' % run_num)) subs.append(('__realign%d/' % i, '/run%02d_' % run_num)) subs.append(('__modelgen%d/' % i, '/run%02d_' % run_num)) subs.append(('/model%03d/task%03d/' % (model_id, task_id), '/')) subs.append(('/model%03d/task%03d_' % (model_id, task_id), '/')) subs.append(('_bold_dtype_mcf_bet_thresh_dil', '_mask')) subs.append(('_output_warped_image', '_anat2target')) subs.append(('median_flirt_brain_mask', 'median_brain_mask')) subs.append(('median_bbreg_brain_mask', 'median_brain_mask')) return subs subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds', 'run_id', 'model_id', 'task_id'], output_names=['substitutions'], function=get_subs), name='subsgen') wf.connect(subjinfo, 'run_id', subsgen, 'run_id') datasink = pe.Node(interface=nio.DataSink(), name="datasink") wf.connect(infosource, 'subject_id', datasink, 'container') wf.connect(infosource, 'subject_id', subsgen, 'subject_id') wf.connect(infosource, 'model_id', subsgen, 'model_id') wf.connect(infosource, 'task_id', subsgen, 'task_id') wf.connect(contrastgen, 'contrasts', subsgen, 'conds') wf.connect(subsgen, 'substitutions', datasink, 'substitutions') wf.connect([(fixed_fx.get_node('outputspec'), datasink, [('res4d', 'res4d'), ('copes', 'copes'), ('varcopes', 'varcopes'), ('zstats', 'zstats'), ('tstats', 'tstats')]) ]) wf.connect([(modelfit.get_node('modelgen'), datasink, [('design_cov', 'qa.model'), ('design_image', 'qa.model.@matrix_image'), ('design_file', 'qa.model.@matrix'), ])]) wf.connect([(preproc, datasink, [('outputspec.motion_parameters', 'qa.motion'), ('outputspec.motion_plots', 'qa.motion.plots'), ('outputspec.mask', 'qa.mask')])]) wf.connect(registration, 'outputspec.mean2anat_mask', datasink, 'qa.mask.mean2anat') wf.connect(art, 'norm_files', datasink, 'qa.art.@norm') wf.connect(art, 'intensity_files', datasink, 'qa.art.@intensity') wf.connect(art, 'outlier_files', datasink, 'qa.art.@outlier_files') wf.connect(registration, 'outputspec.anat2target', datasink, 'qa.anat2target') wf.connect(tsnr, 'tsnr_file', datasink, 'qa.tsnr.@map') if subjects_dir: wf.connect(registration, 'outputspec.min_cost_file', datasink, 'qa.mincost') wf.connect([(get_roi_tsnr, datasink, [('avgwf_txt_file', 'qa.tsnr'), ('summary_file', 'qa.tsnr.@summary')])]) wf.connect([(get_roi_mean, datasink, [('avgwf_txt_file', 'copes.roi'), ('summary_file', 'copes.roi.@summary')])]) wf.connect([(splitfunc, datasink, [('copes', 'copes.mni'), ('varcopes', 'varcopes.mni'), ('zstats', 'zstats.mni'), ])]) wf.connect(calc_median, 'median_file', datasink, 'mean') wf.connect(registration, 'outputspec.transformed_mean', datasink, 'mean.mni') wf.connect(registration, 'outputspec.func2anat_transform', datasink, 'xfm.mean2anat') wf.connect(registration, 'outputspec.anat2target_transform', datasink, 'xfm.anat2target') """ Set processing parameters """ preproc.inputs.inputspec.fwhm = fwhm gethighpass.inputs.hpcutoff = hpcutoff modelspec.inputs.high_pass_filter_cutoff = hpcutoff modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivatives}} modelfit.inputs.inputspec.model_serial_correlations = True modelfit.inputs.inputspec.film_threshold = 1000 datasink.inputs.base_directory = output_dir return wf
def rs_firstlevel(unsmooth_fn, smooth_fn, roi_mask, output_dir, work_dir): import nipype.algorithms.modelgen as model # model generation from niflow.nipype1.workflows.fmri.fsl import create_modelfit_workflow from nipype.interfaces import fsl as fsl from nipype.interfaces.base import Bunch meants = fsl.utils.ImageMeants() meants.inputs.in_file = unsmooth_fn meants.inputs.mask = roi_mask meants.inputs.out_file = op.join( work_dir, '{0}_{1}.txt'.format( op.basename(unsmooth_fn).split('.')[0], op.basename(roi_mask).split('.')[0])) meants.run() mask_fn = "_".join(op.basename(smooth_fn).split('.')[0].split('_')[:-1]) meants.inputs.mask = op.join(op.dirname(smooth_fn), '{prefix}_mask.nii.gz'.format(prefix=mask_fn)) meants.inputs.out_file = op.join( work_dir, '{0}_gsr.txt'.format(op.basename(unsmooth_fn).split('.')[0])) meants.run() roi_ts = np.atleast_2d( np.loadtxt( op.join( work_dir, '{0}_{1}.txt'.format( op.basename(unsmooth_fn).split('.')[0], op.basename(roi_mask).split('.')[0])))) gsr_ts = np.atleast_2d( np.loadtxt( op.join( work_dir, '{0}_gsr.txt'.format(op.basename(unsmooth_fn).split('.')[0])))) subject_info = Bunch( conditions=['mean'], onsets=[list(np.arange(0, 0.72 * len(roi_ts[0]), 0.72))], durations=[[0.72]], amplitudes=[np.ones(len(roi_ts[0]))], regressor_names=['roi', 'gsr'], regressors=[roi_ts[0], gsr_ts[0]]) level1_workflow = pe.Workflow(name='level1flow') inputnode = pe.Node( interface=util.IdentityInterface(fields=['func', 'subjectinfo']), name='inputspec') modelspec = pe.Node(model.SpecifyModel(), name="modelspec") modelspec.inputs.input_units = 'secs' modelspec.inputs.time_repetition = 0.72 modelspec.inputs.high_pass_filter_cutoff = 0 modelfit = create_modelfit_workflow() modelfit.get_node('modelestimate').inputs.smooth_autocorr = False modelfit.get_node('modelestimate').inputs.autocorr_noestimate = True modelfit.get_node('modelestimate').inputs.mask_size = 0 modelfit.inputs.inputspec.interscan_interval = 0.72 modelfit.inputs.inputspec.bases = {'none': {'none': None}} modelfit.inputs.inputspec.model_serial_correlations = False modelfit.inputs.inputspec.film_threshold = 1000 contrasts = [['corr', 'T', ['mean', 'roi', 'gsr'], [0, 1, 0]]] modelfit.inputs.inputspec.contrasts = contrasts """ This node will write out image files in output directory """ datasink = pe.Node(nio.DataSink(), name='sinker') datasink.inputs.base_directory = work_dir level1_workflow.connect([ (inputnode, modelspec, [('func', 'functional_runs')]), (inputnode, modelspec, [('subjectinfo', 'subject_info')]), (modelspec, modelfit, [('session_info', 'inputspec.session_info')]), (inputnode, modelfit, [('func', 'inputspec.functional_data')]), (modelfit, datasink, [('outputspec.copes', 'copes'), ('outputspec.varcopes', 'varcopes'), ('outputspec.dof_file', 'dof_file'), ('outputspec.zfiles', 'zfiles')]) ]) level1_workflow.inputs.inputspec.func = smooth_fn level1_workflow.inputs.inputspec.subjectinfo = subject_info level1_workflow.base_dir = work_dir level1_workflow.run() #copy data to directory shutil.rmtree(op.join(work_dir, 'level1flow')) files_to_copy = glob(op.join(work_dir, '*', '_modelestimate0', '*')) for tmp_fn in files_to_copy: shutil.copy(tmp_fn, output_dir) shutil.rmtree(work_dir)
return len(files) """ ====================== model fitting workflow ====================== NODES """ #Master Node modelfit = pe.Workflow(name='modelfit') #generate design information modelspec = pe.Node(interface=model.SpecifyModel( input_units=input_units, time_repetition=TR, high_pass_filter_cutoff=hpcutoff), name="modelspec") #generate a run specific fsf file for analysis level1design = pe.Node(interface=fsl.Level1Design( interscan_interval=TR, bases={'dgamma': { 'derivs': False }}, contrasts=contrasts, model_serial_correlations=True), name="level1design") #generate a run specific mat file for use by FILMGLS modelgen = pe.MapNode(interface=fsl.FEATModel(),
return modelspec, contrasts # Make node model_grabber = pe.Node(niu.Function(input_names=['contrasts_file', 'events_file', 'confounds_file'], output_names=['modelspec', 'contrasts'], function=ModelGrabber), 'model_grabber') # **ModelSpec**: Model specification # In[17]: modelspec = pe.Node(modelgen.SpecifyModel(), 'modelspec') modelspec.inputs.time_repetition = TR modelspec.inputs.input_units = 'secs' modelspec.inputs.high_pass_filter_cutoff = 128.0 # **level1design:** Generate FEAT-specific files # In[18]: level1design = pe.Node(fsl.model.Level1Design(), 'l1design') level1design.inputs.bases = {'dgamma':{'derivs': True}} level1design.inputs.model_serial_correlations = True
def init_glm_wf(conditions, contrasts, repetition_time, use_mov_pars, name="glm"): """ create workflow to calculate a first level glm for task functional data :param conditions: dictionary of conditions with onsets and durations by condition names :param contrasts: dictionary of contrasts by names :param repetition_time: repetition time :param use_mov_pars: if true, regress out movement parameters when calculating the glm :param name: workflow name (Default value = "glm") """ workflow = pe.Workflow(name=name) # inputs are the bold file, the mask file and the confounds file # that contains the movement parameters inputnode = pe.Node(niu.IdentityInterface( fields=["bold_file", "mask_file", "confounds_file"]), name="inputnode") # transform (unordered) conditions dictionary into three (ordered) lists names = list(conditions.keys()) onsets = [conditions[k]["onsets"] for k in names] durations = [conditions[k]["durations"] for k in names] # first level model specification modelspec = pe.Node(interface=model.SpecifyModel( input_units="secs", high_pass_filter_cutoff=128., time_repetition=repetition_time, subject_info=Bunch(conditions=names, onsets=onsets, durations=durations)), name="modelspec") # transform contrasts dictionary to nipype list data structure contrasts_ = [[k, "T"] + [list(i) for i in zip(*[(n, val) for n, val in v.items()])] for k, v in contrasts.items()] connames = [k[0] for k in contrasts_] # outputs are cope, varcope and zstat for each contrast and a dof_file outputnode = pe.Node(niu.IdentityInterface(fields=sum( [["%s_img" % conname, "%s_varcope" % conname, "%s_zstat" % conname] for conname in connames], []) + ["dof_file"]), name="outputnode") outputnode._interface.names = connames # generate design from first level specification level1design = pe.Node(interface=fsl.Level1Design( contrasts=contrasts_, interscan_interval=repetition_time, model_serial_correlations=True, bases={"dgamma": { "derivs": False }}), name="level1design") # generate required input files for FILMGLS from design modelgen = pe.Node(interface=fsl.FEATModel(), name="modelgen", iterfield=["fsf_file", "ev_files"]) # calculate range of image values to determine cutoff value # for FILMGLS stats = pe.Node(interface=fsl.ImageStats(op_string="-R"), name="stats") # actuallt estimate the firsy level model modelestimate = pe.Node(interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5), name="modelestimate", iterfield=["design_file", "in_file", "tcon_file"]) # mask regression outputs maskimgs = pe.MapNode(interface=fsl.ApplyMask(), name="maskimgs", iterfield=["in_file"]) maskvarcopes = pe.MapNode(interface=fsl.ApplyMask(), name="maskvarcopes", iterfield=["in_file"]) maskzstats = pe.MapNode(interface=fsl.ApplyMask(), name="maskzstats", iterfield=["in_file"]) # split regression outputs by name splitimgs = pe.Node(interface=niu.Split(splits=[1 for conname in connames]), name="splitimgs") splitvarcopes = pe.Node( interface=niu.Split(splits=[1 for conname in connames]), name="splitvarcopes") splitzstats = pe.Node( interface=niu.Split(splits=[1 for conname in connames]), name="splitzstats") # pass movement parameters to glm model specification if requested c = [("bold_file", "functional_runs")] if use_mov_pars: c.append(("confounds_file", "realignment_parameters")) workflow.connect([ (inputnode, modelspec, c), (inputnode, modelestimate, [("bold_file", "in_file")]), (modelspec, level1design, [("session_info", "session_info")]), (level1design, modelgen, [("fsf_files", "fsf_file"), ("ev_files", "ev_files")]), (inputnode, stats, [("bold_file", "in_file")]), (stats, modelestimate, [(("out_stat", get_float), "threshold")]), (modelgen, modelestimate, [("design_file", "design_file"), ("con_file", "tcon_file")]), (inputnode, maskimgs, [("mask_file", "mask_file")]), (inputnode, maskvarcopes, [("mask_file", "mask_file")]), (inputnode, maskzstats, [("mask_file", "mask_file")]), (modelestimate, maskimgs, [ (("copes", flatten), "in_file"), ]), (modelestimate, maskvarcopes, [ (("varcopes", flatten), "in_file"), ]), (modelestimate, maskzstats, [ (("zstats", flatten), "in_file"), ]), (modelestimate, outputnode, [("dof_file", "dof_file")]), (maskimgs, splitimgs, [ ("out_file", "inlist"), ]), (maskvarcopes, splitvarcopes, [ ("out_file", "inlist"), ]), (maskzstats, splitzstats, [ ("out_file", "inlist"), ]), ]) # connect outputs named for the contrasts for i, conname in enumerate(connames): workflow.connect(splitimgs, "out%i" % (i + 1), outputnode, "%s_img" % conname) workflow.connect(splitvarcopes, "out%i" % (i + 1), outputnode, "%s_varcope" % conname) workflow.connect(splitzstats, "out%i" % (i + 1), outputnode, "%s_zstat" % conname) return workflow, connames
print(info) return info getonsets = pe.Node(niu.Function(input_names=['subject_id','runnum','taskname'], output_names=['info'], function=get_onsets), name='getonsets') firstlevel.connect(infosource,'subject_id',getonsets,'subject_id') firstlevel.connect(runinfo,'runcode',getonsets,'runnum') firstlevel.connect(taskinfo,'taskname',getonsets,'taskname') specifymodel=pe.Node(interface=model.SpecifyModel(),name='specifymodel') specifymodel.inputs.input_units = 'secs' specifymodel.inputs.time_repetition = 2.0 specifymodel.inputs.high_pass_filter_cutoff = hpf_cutoff firstlevel.connect(getonsets,'info',specifymodel,'subject_info') firstlevel.connect(datasource_func,'func',specifymodel,'functional_runs') level1design = pe.Node(interface=fsl.model.Level1Design(),name='level1design') level1design.inputs.interscan_interval =2.0 level1design.inputs.bases = {'dgamma':{'derivs': True}} level1design.inputs.model_serial_correlations=True firstlevel.connect(specifymodel,'session_info',level1design,'session_info') firstlevel.connect(taskinfo,('taskname',get_contrasts),level1design,'contrasts')
NodeHash_2ee27f90.inputs.operation = 'mul' #Wraps command **fslmaths** NodeHash_c0d1e30 = pe.MapNode(interface = fsl.TemporalFilter(), name = 'NodeName_c0d1e30', iterfield = ['in_file']) NodeHash_c0d1e30.inputs.highpass_sigma = 25 #Wraps command **fslmaths** NodeHash_17446a20 = pe.MapNode(interface = fsl.MeanImage(), name = 'NodeName_17446a20', iterfield = ['in_file']) NodeHash_17446a20.inputs.dimension = 'T' #Wraps command **fslmaths** NodeHash_b5a5810 = pe.MapNode(interface = fsl.BinaryMaths(), name = 'NodeName_b5a5810', iterfield = ['in_file', 'operand_file']) NodeHash_b5a5810.inputs.operation = 'add' #Makes a model specification compatible with spm/fsl designers. NodeHash_1e7a3420 = pe.MapNode(interface = modelgen.SpecifyModel(), name = 'NodeName_1e7a3420', iterfield = ['functional_runs', 'subject_info']) NodeHash_1e7a3420.inputs.high_pass_filter_cutoff = 0 NodeHash_1e7a3420.inputs.input_units = 'secs' NodeHash_1e7a3420.inputs.time_repetition = 2.0 #Generate FEAT specific files NodeHash_9bb0d40 = pe.MapNode(interface = fsl.Level1Design(), name = 'NodeName_9bb0d40', iterfield = ['session_info']) NodeHash_9bb0d40.inputs.bases = {'dgamma':{'derivs': False}} NodeHash_9bb0d40.inputs.contrasts = [('con-incon', 'T', ['congruent_correct', 'congruent_correct'], [-1, 1])] NodeHash_9bb0d40.inputs.interscan_interval = 2.0 NodeHash_9bb0d40.inputs.model_serial_correlations = True #Wraps command **feat_model** NodeHash_6b33f50 = pe.MapNode(interface = fsl.FEATModel(), name = 'NodeName_6b33f50', iterfield = ['ev_files', 'fsf_file']) #Wraps command **film_gls**
def create_lvl1pipe_wf(options): ''' Input [Mandatory]: ~~~~~~~~~~~ Set in command call: options: dictionary with the following entries remove_steadystateoutlier [boolean]: Should always be True. Remove steady state outliers from bold timecourse, specified in fmriprep confounds file. smooth [boolean]: If True, then /smooth subfolder created and populated with results. If False, then /nosmooth subfolder created and populated with results. censoring [string]: Either '' or 'despike', which implements nipype.interfaces.afni.Despike ICA_AROMA [boolean]: Use AROMA error components, from fmriprep confounds file. run_contrasts [boolean]: If False, then components related to contrasts and p values are removed from nipype.workflows.fmri.fsl.estimate.create_modelfit_workflow() keep_resid [boolean]: If False, then only sum of squares residuals will be outputted. If True, then timecourse residuals kept. poly_trend [integer. Use None to skip]: If given, polynomial trends will be added to run confounds, up to the order of the integer e.g. "0", gives an intercept, "1" gives intercept + linear trend, "2" gives intercept + linear trend + quadratic. DO NOT use in conjnuction with high pass filters. dct_basis [integer. Use None to skip]: If given, adds a discrete cosine transform, with a length (in seconds) of the interger specified. Adds unit scaled cosine basis functions to Design_Matrix columns, based on spm-style discrete cosine transform for use in high-pass filtering. Does not add intercept/constant. DO NOT use in conjnuction with high pass filters. ~~~~~~~~~~~ Set through inputs.inputspec input_dir [string]: path to folder containing fmriprep preprocessed data. e.g. model_wf.inputs.inputspec.input_dir = '/home/neuro/data' output_dir [string]: path to desired output folder. Workflow will create a new subfolder based on proj_name. e.g. model_wf.inputs.inputspec.output_dir = '/home/neuro/output' proj_name [string]: name for project subfolder within output_dir. Ideally something unique, or else workflow will write to an existing folder. e.g. model_wf.inputs.inputspec.proj_name = 'FSMAP_stress' design_col [string]: Name of column within events.tsv with values corresponding to entries specified in params. e.g. model_wf.inputs.inputspec.design_col = 'trial_type' params [list fo strings]: values within events.tsv design_col that correspond to events to be modeled. e.g. ['Instructions', 'Speech_prep', 'No_speech'] conditions [list, of either strings or lists], each condition must be a string within the events.tsv design_col. These conditions correspond to event conditions to be modeled. Give a list, instead of a string, to model parametric terms. These parametric terms give a event condition, then a parametric term, which is another column in the events.tsv file. The parametric term can be centered and normed using entries 3 and 4 in the list. e.g. model_wf.inputs.inputspec.params = ['condition1', 'condition2', ['condition1', 'parametric1', 'no_cent', 'no_norm'], ['condition2', 'paramatric2', 'cent', 'norm']] entry 1 is a condition within the design_col column entry 2 is a column in the events folder, which will be used for parametric weightings. entry 3 is either 'no_cent', or 'cent', indicating whether to center the parametric variable. entry 4 is either 'no_norm', or 'norm', indicating whether to normalize the parametric variable. Onsets and durations will be taken from corresponding values for entry 1 parametric weighting specified by entry 2, scaled/centered as specified, then appended to the design matrix. contrasts [list of lists]: Specifies contrasts to be performed. using params selected above. e.g. model_wf.inputs.inputspec.contrasts = [['Instructions', 'T', ['Instructions'], [1]], ['Speech_prep', 'T', ['Speech_prep'], [1]], ['No_speech', 'T', ['No_speech'], [1]], ['Speech_prep>No_speech', 'T', ['Speech_prep', 'No_speech'], [1, -1]]] noise_regressors [list of strings]: column names in confounds.tsv, specifying desired noise regressors for model. IF noise_transforms are to be applied to a regressor, add '*' to the name. e.g. model_wf.inputs.inputspec.noise_regressors = ['CSF', 'WhiteMatter', 'GlobalSignal', 'X*', 'Y*', 'Z*', 'RotX*', 'RotY*', 'RotZ*'] noise_transforms [list of strings]: noise transforms to be applied to select noise_regressors above. Possible values are 'quad', 'tderiv', and 'quadtderiv', standing for quadratic function of value, temporal derivative of value, and quadratic function of temporal derivative. e.g. model_wf.inputs.inputspec.noise_transforms = ['quad', 'tderiv', 'quadtderiv'] TR [float]: Scanner TR value in seconds. e.g. model_wf.inputs.inputspec.TR = 2. FILM_threshold [integer]: Cutoff value for modeling threshold. 1000: p <.001; 1: p <=1, i.e. unthresholded. e.g. model_wf.inputs.inputspec.FILM_threshold = 1 hpf_cutoff [float]: high pass filter value. DO NOT USE THIS in conjunction with poly_trend or dct_basis. e.g. model_wf.inputs.inputspec.hpf_cutoff = 120. bases: (a dictionary with keys which are 'hrf' or 'fourier' or 'fourier_han' or 'gamma' or 'fir' and with values which are any value) dict {'name':{'basesparam1':val,...}} name : string Name of basis function (hrf, fourier, fourier_han, gamma, fir) hrf : derivs : 2-element list Model HRF Derivatives. No derivatives: [0,0], Time derivatives : [1,0], Time and Dispersion derivatives: [1,1] fourier, fourier_han, gamma, fir: length : int Post-stimulus window length (in seconds) order : int Number of basis functions e.g. model_wf.inputs.inputspec.bases = {'dgamma':{'derivs': False}} model_serial_correlations [boolean]: Allow prewhitening, with 5mm spatial smoothing. model_wf.inputs.inputspec.model_serial_correlations = True sinker_subs [list of tuples]: passed to nipype.interfaces.io.Datasink. Changes names when passing to output directory. e.g. model_wf.inputs.inputspec.sinker_subs = [('pe1', 'pe1_instructions'), ('pe2', 'pe2_speech_prep'), ('pe3', 'pe3_no_speech')] bold_template [dictionary with string entry]: Specifies path, with wildcard, to grab all relevant BOLD files. Each subject_list entry should uniquely identify the ONE relevant file. e.g. model_wf.inputs.inputspec.bold_template = {'bold': '/home/neuro/data/sub-*/func/sub-*_task-stress_bold_space-MNI152NLin2009cAsym_preproc.nii.gz'} This would grab the functional run for all subjects, and when subject_id = 'sub-001', there is ONE file in the list that the ID could possible correspond to. To handle multiple runs, list the run information in the subject_id. e.g. 'sub-01_task-trag_run-01'. mask_template [dictionary with string entry]: Specifies path, with wildcard, to grab all relevant MASK files, corresponding to functional images. Each subject_list entry should uniquely identify the ONE relevant file. e.g. model_wf.inputs.inputspec.mask_template = {'mask': '/home/neuro/data/sub-*/func/sub-*_task-stress_bold_space-MNI152NLin2009cAsym_brainmask.nii.gz'} See bold_template for more detail. task_template [dictionary with string entry]: Specifies path, with wildcard, to grab all relevant events.tsv files, corresponding to functional images. Each subject_list entry should uniquely identify the ONE relevant file. e.g. model_wf.inputs.inputspec.task_template = {'task': '/home/neuro/data/sub-*/func/sub-*_task-stress_events.tsv'} See bold_template for more detail. confound_template [dictionary with string entry]: Specifies path, with wildcard, to grab all relevant confounds.tsv files, corresponding to functional images. Each subject_list entry should uniquely identify the ONE relevant file. e.g. model_wf.inputs.inputspec.confound_template = {'confound': '/home/neuro/data/sub-*/func/sub-*_task-stress_bold_confounds.tsv'} See bold_template for more detail. smooth_gm_mask_template [dictionary with string entry]: Specifies path, with wildcard, to grab all relevant grey matter mask .nii.gz files, pulling from each subject's /anat fodler. Each subject_list entry should uniquely identify the ONE relevant file (BUT SEE THE NOTE BELOW). e.g. model_wf.inputs.inputspec.smooth_gm_mask_template = {'gm_mask': '/scratch/data/sub-*/anat/sub-*_T1w_space-MNI152NLin2009cAsym_class-GM_probtissue.nii.gz'} NOTE: If the subject_id value has more information than just the ID (e.g. sub-01_task-trag_run-01), then JUST the sub-01 portion will be used to identify the grey matter mask. This is because multiple runs will have the same anatomical data. i.e. sub-01_run-01, sub-01_run-02, sub-01_run-03, all correspond to sub-01_T1w_space-MNI152NLin2009cAsym_class-GM_probtissue.nii.gz. fwhm [float]. Redundant if options['smooth']: False Determines smoothing kernel. Multiple kernels can be run in parallel by iterating through an outside workflow. Also see subject_id below for another example of iterables. e.g. model_wf.inputs.inputspec.fwhm = 1.5 OR Iterable e.g. import nipype.pipeline.engine as pe fwhm_list = [1.5, 6] infosource = pe.Node(IdentityInterface(fields=['fwhm']), name='infosource') infosource.iterables = [('fwhm', fwhm_list)] full_model_wf = pe.Workflow(name='full_model_wf') full_model_wf.connect([(infosource, model_wf, [('subject_id', 'inputspec.subject_id')])]) full_model_wf.run() subject_id [string]: Identifies subject in conjnuction with template. See bold_template note above. Can also be entered as an iterable from an outside workflow, in which case iterables are run in parallel to the extent that cpu cores are available. e.g. model_wf.inputs.inputspec.subject_id = 'sub-01' OR Iterable e.g. import nipype.pipeline.engine as pe subject_list = ['sub-001', 'sub-002'] infosource = pe.Node(IdentityInterface(fields=['subject_id']), name='infosource') infosource.iterables = [('subject_id', subject_list)] full_model_wf = pe.Workflow(name='full_model_wf') full_model_wf.connect([(infosource, model_wf, [('subject_id', 'inputspec.subject_id')])]) full_model_wf.run() ''' import nipype.pipeline.engine as pe # pypeline engine import nipype.interfaces.fsl as fsl import os from nipype import IdentityInterface, SelectFiles from nipype.interfaces.utility.wrappers import Function ################## Setup workflow. lvl1pipe_wf = pe.Workflow(name='lvl_one_pipe') inputspec = pe.Node(IdentityInterface( fields=['input_dir', 'output_dir', 'design_col', 'noise_regressors', 'noise_transforms', 'TR', # in seconds. 'FILM_threshold', 'hpf_cutoff', 'conditions', 'contrasts', 'bases', 'model_serial_correlations', 'sinker_subs', 'bold_template', 'mask_template', 'task_template', 'confound_template', 'smooth_gm_mask_template', 'gmmask_args', 'subject_id', 'fwhm', 'proj_name', ], mandatory_inputs=False), name='inputspec') ################## Select Files def get_file(subj_id, template): import glob temp_list = [] out_list = [] if '_' in subj_id and '/anat/' in list(template.values())[0]: subj_id = subj_id[:subj_id.find('_')] # if looking for gmmask, and subj_id includes additional info (e.g. sub-001_task-trag_run-01) then just take the subject id component, as the run info will not be present for the anatomical data. for x in glob.glob(list(template.values())[0]): if subj_id in x: temp_list.append(x) for file in temp_list: # ensure no duplicate entries. if file not in out_list: out_list.append(file) if len(out_list) == 0: assert (len(out_list) == 1), 'Each combination of template and subject ID should return 1 file. 0 files were returned.' if len(out_list) > 1: assert (len(out_list) == 1), 'Each combination of template and subject ID should return 1 file. Multiple files returned.' out_file = out_list[0] return out_file get_bold = pe.Node(Function( input_names=['subj_id', 'template'], output_names=['out_file'], function=get_file), name='get_bold') get_mask = pe.Node(Function( input_names=['subj_id', 'template'], output_names=['out_file'], function=get_file), name='get_mask') get_task = pe.Node(Function( input_names=['subj_id', 'template'], output_names=['out_file'], function=get_file), name='get_task') get_confile = pe.Node(Function( input_names=['subj_id', 'template'], output_names=['out_file'], function=get_file), name='get_confile') # get_bold.inputs.subj_id # From inputspec # get_bold.inputs.templates # From inputspec if options['smooth']: get_gmmask = pe.Node(Function( input_names=['subj_id', 'template'], output_names=['out_file'], function=get_file), name='get_gmmask') mod_gmmask = pe.Node(fsl.maths.MathsCommand(), name='mod_gmmask') # mod_gmmask.inputs.in_file = # from get_gmmask # mod_gmmask.inputs.args = from inputspec def fit_mask(mask_file, ref_file): from nilearn.image import resample_img import nibabel as nib import os out_file = resample_img(nib.load(mask_file), target_affine=nib.load(ref_file).affine, target_shape=nib.load(ref_file).shape[0:3], interpolation='nearest') nib.save(out_file, os.path.join(os.getcwd(), mask_file.split('.nii')[0]+'_fit.nii.gz')) out_mask = os.path.join(os.getcwd(), mask_file.split('.nii')[0]+'_fit.nii.gz') return out_mask fit_mask = pe.Node(Function( input_names=['mask_file', 'ref_file'], output_names=['out_mask'], function=fit_mask), name='fit_mask') ################## Setup confounds def get_terms(confound_file, noise_transforms, noise_regressors, TR, options): ''' Gathers confounds (and transformations) into a pandas dataframe. Input [Mandatory]: confound_file [string]: path to confound.tsv file, given by fmriprep. noise_transforms [list of strings]: noise transforms to be applied to select noise_regressors above. Possible values are 'quad', 'tderiv', and 'quadtderiv', standing for quadratic function of value, temporal derivative of value, and quadratic function of temporal derivative. e.g. model_wf.inputs.inputspec.noise_transforms = ['quad', 'tderiv', 'quadtderiv'] noise_regressors [list of strings]: column names in confounds.tsv, specifying desired noise regressors for model. IF noise_transforms are to be applied to a regressor, add '*' to the name. e.g. model_wf.inputs.inputspec.noise_regressors = ['CSF', 'WhiteMatter', 'GlobalSignal', 'X*', 'Y*', 'Z*', 'RotX*', 'RotY*', 'RotZ*'] TR [float]: Scanner TR value in seconds. options: dictionary with the following entries remove_steadystateoutlier [boolean]: Should always be True. Remove steady state outliers from bold timecourse, specified in fmriprep confounds file. ICA_AROMA [boolean]: Use AROMA error components, from fmriprep confounds file. poly_trend [integer. Use None to skip]: If given, polynomial trends will be added to run confounds, up to the order of the integer e.g. "0", gives an intercept, "1" gives intercept + linear trend, "2" gives intercept + linear trend + quadratic. dct_basis [integer. Use None to skip]: If given, adds a discrete cosine transform, with a length (in seconds) of the interger specified. Adds unit scaled cosine basis functions to Design_Matrix columns, based on spm-style discrete cosine transform for use in high-pass filtering. Does not add intercept/constant. ''' import numpy as np import pandas as pd from nltools.data import Design_Matrix df_cf = pd.DataFrame(pd.read_csv(confound_file, sep='\t', parse_dates=False)) transfrm_list = [] for idx, entry in enumerate(noise_regressors): # get entries marked with *, indicating they should be transformed. if '*' in entry: transfrm_list.append(entry.replace('*', '')) # add entry to transformation list if it has *. noise_regressors[idx] = entry.replace('*', '') confounds = df_cf[noise_regressors] transfrmd_cnfds = df_cf[transfrm_list] # for transforms TR_time = pd.Series(np.arange(0.0, TR*transfrmd_cnfds.shape[0], TR)) # time series for derivatives. if 'quad' in noise_transforms: quad = np.square(transfrmd_cnfds) confounds = confounds.join(quad, rsuffix='_quad') if 'tderiv' in noise_transforms: tderiv = pd.DataFrame(pd.Series(np.gradient(transfrmd_cnfds[col]), TR_time) for col in transfrmd_cnfds).T tderiv.columns = transfrmd_cnfds.columns tderiv.index = confounds.index confounds = confounds.join(tderiv, rsuffix='_tderiv') if 'quadtderiv' in noise_transforms: quadtderiv = np.square(tderiv) confounds = confounds.join(quadtderiv, rsuffix='_quadtderiv') if options['remove_steadystateoutlier']: if not df_cf[df_cf.columns[df_cf.columns.to_series().str.contains('^non_steady_state_outlier')]].empty: confounds = confounds.join(df_cf[df_cf.columns[df_cf.columns.to_series().str.contains('^non_steady_state_outlier')]]) elif not df_cf[df_cf.columns[df_cf.columns.to_series().str.contains('^NonSteadyStateOutlier')]].empty: confounds = confounds.join(df_cf[df_cf.columns[df_cf.columns.to_series().str.contains('^NonSteadyStateOutlier')]]) # old syntax if options['ICA_AROMA']: if not df_cf[df_cf.columns[df_cf.columns.to_series().str.contains('^aroma_motion')]].empty: confounds = confounds.join(df_cf[df_cf.columns[df_cf.columns.to_series().str.contains('^aroma_motion')]]) elif not df_cf[df_cf.columns[df_cf.columns.to_series().str.contains('^AROMAAggrComp')]].empty: confounds = confounds.join(df_cf[df_cf.columns[df_cf.columns.to_series().str.contains('^AROMAAggrComp')]]) # old syntax confounds = Design_Matrix(confounds, sampling_freq=1/TR) if isinstance(options['poly_trend'], int): confounds = confounds.add_poly(order = options['poly_trend']) # these do not play nice with high pass filters. if isinstance(options['dct_basis'], int): confounds = confounds.add_dct_basis(duration=options['dct_basis']) # these do not play nice with high pass filters. return confounds get_confounds = pe.Node(Function(input_names=['confound_file', 'noise_transforms', 'noise_regressors', 'TR', 'options'], output_names=['confounds'], function=get_terms), name='get_confounds') # get_confounds.inputs.confound_file = # From get_confile # get_confounds.inputs.noise_transforms = # From inputspec # get_confounds.inputs.noise_regressors = # From inputspec # get_confounds.inputs.TR = # From inputspec get_confounds.inputs.options = options ################## Create bunch to run FSL first level model. def get_subj_info(task_file, design_col, confounds, conditions): ''' Makes a Bunch, giving all necessary data about conditions, onsets, and durations to FSL first level model. Needs a task file to run. Inputs: task file [string], path to the subject events.tsv file, as per BIDS format. design_col [string], column name within task file, identifying event conditions to model. confounds [pandas dataframe], pd.df of confounds, gathered from get_confounds node. conditions [list], e.g. ['condition1', 'condition2', ['condition1', 'parametric1', 'no_cent', 'no_norm'], ['condition2', 'paramatric2', 'cent', 'norm']] each string entry (e.g. 'condition1') specifies a event condition in the design_col column. each list entry includes 4 strings: entry 1 is a condition within the design_col column entry 2 is a column in the events folder, which will be used for parametric weightings. entry 3 is either 'no_cent', or 'cent', indicating whether to center the parametric variable. entry 4 is either 'no_norm', or 'norm', indicating whether to normalize the parametric variable. Onsets and durations will be taken from corresponding values for entry 1 parametric weighting specified by entry 2, scaled/centered as specified, then appended to the design matrix. ''' from nipype.interfaces.base import Bunch import pandas as pd import numpy as np from sklearn.preprocessing import scale onsets = [] durations = [] amplitudes = [] df = pd.read_csv(task_file, sep='\t', parse_dates=False) for idx, cond in enumerate(conditions): if isinstance(cond, list): if cond[2] == 'no_cent': # determine whether to center/scale c = False elif cond[2] == 'cent': c = True if cond[3] == 'no_norm': n = False elif cond[3] == 'norm': n = True # grab parametric terms. onsets.append(list(df[df[design_col] == cond[0]].onset)) durations.append(list(df[df[design_col] == cond[0]].duration)) amp_temp = list(scale(df[df[design_col] == cond[0]][cond[1]].tolist(), with_mean=c, with_std=n)) # scale amp_temp = pd.Series(amp_temp, dtype=object).fillna(0).tolist() # fill na amplitudes.append(amp_temp) # append conditions[idx] = cond[0]+'_'+cond[1] # combine condition/parametric names and replace. elif isinstance(cond, str): onsets.append(list(df[df[design_col] == cond].onset)) durations.append(list(df[df[design_col] == cond].duration)) # dummy code 1's for non-parametric conditions. amplitudes.append(list(np.repeat(1, len(df[df[design_col] == cond].onset)))) else: print('cannot identify condition:', cond) # return None output = Bunch(conditions= conditions, onsets=onsets, durations=durations, amplitudes=amplitudes, tmod=None, pmod=None, regressor_names=confounds.columns.values, regressors=confounds.T.values.tolist()) # movement regressors added here. List of lists. return output make_bunch = pe.Node(Function(input_names=['task_file', 'design_col', 'confounds', 'conditions'], output_names=['subject_info'], function=get_subj_info), name='make_bunch') # make_bunch.inputs.task_file = # From get_task # make_bunch.inputs.confounds = # From get_confounds # make_bunch.inputs.design_col = # From inputspec # make_bunch.inputs.conditions = # From inputspec def mk_outdir(output_dir, options, proj_name): import os from time import gmtime, strftime prefix = proj_name if options['smooth']: new_out_dir = os.path.join(output_dir, prefix, 'smooth') else: new_out_dir = os.path.join(output_dir, prefix, 'nosmooth') if not os.path.isdir(new_out_dir): os.makedirs(new_out_dir) return new_out_dir make_outdir = pe.Node(Function(input_names=['output_dir', 'options', 'proj_name'], output_names=['new_out_dir'], function=mk_outdir), name='make_outdir') # make_outdir.inputs.proj_name = from inputspec # make_outdir.inputs.output_dir = from inputspec make_outdir.inputs.options = options ################## Mask functional data. from jtnipyutil.util import mask_img maskBold = pe.Node(Function(input_names=['img_file', 'mask_file'], output_names=['out_file'], function=mask_img), name='maskBold') # maskBold.inputs.img_file # From get_bold, or smooth_wf # maskBold.inputs.mask_file # From get_mask ################## Despike from nipype.interfaces.afni import Despike despike = pe.Node(Despike(), name='despike') # despike.inputs.in_file = # From Mask despike.inputs.outputtype = 'NIFTI_GZ' from nipype.workflows.fmri.fsl.preprocess import create_susan_smooth smooth_wf = create_susan_smooth() # smooth_wf.inputs.inputnode.in_files = # from maskBold # smooth_wf.inputs.inputnode.fwhm = # from inputspec ################## Model Generation. import nipype.algorithms.modelgen as model specify_model = pe.Node(interface=model.SpecifyModel(), name='specify_model') specify_model.inputs.input_units = 'secs' # specify_model.functional_runs # From maskBold, despike, or smooth_wf # specify_model.subject_info # From make_bunch.outputs.subject_info # specify_model.high_pass_filter_cutoff # From inputspec # specify_model.time_repetition # From inputspec ################## Estimate workflow from nipype.workflows.fmri.fsl import estimate # fsl workflow modelfit = estimate.create_modelfit_workflow() modelfit.base_dir = '.' # modelfit.inputs.inputspec.session_info = # From specify_model # modelfit.inputs.inputspec.functional_data = # from maskBold # modelfit.inputs.inputspec.interscan_interval = # From inputspec # modelfit.inputs.inputspec.film_threshold = # From inputspec # modelfit.inputs.inputspec.bases = # From inputspec # modelfit.inputs.inputspec.model_serial_correlations = # From inputspec # modelfit.inputs.inputspec.contrasts = # From inputspec if not options['run_contrasts']: # drop contrast part of modelfit if contrasts aren't required. modelestimate = modelfit.get_node('modelestimate') merge_contrasts = modelfit.get_node('merge_contrasts') ztop = modelfit.get_node('ztop') outputspec = modelfit.get_node('outputspec') modelfit.disconnect([(modelestimate, merge_contrasts, [('zstats', 'in1'), ('zfstats', 'in2')]), (merge_contrasts, ztop, [('out', 'in_file')]), (merge_contrasts, outputspec, [('out', 'zfiles')]), (ztop, outputspec, [('out_file', 'pfiles')]) ]) modelfit.remove_nodes([merge_contrasts, ztop]) ################## DataSink from nipype.interfaces.io import DataSink import os.path sinker = pe.Node(DataSink(), name='sinker') # sinker.inputs.substitutions = # From inputspec # sinker.inputs.base_directory = # frm make_outdir def negate(input): return not input def unlist(input): return input[0] lvl1pipe_wf.connect([ # grab subject/run info (inputspec, get_bold, [('subject_id', 'subj_id'), ('bold_template', 'template')]), (inputspec, get_mask, [('subject_id', 'subj_id'), ('mask_template', 'template')]), (inputspec, get_task, [('subject_id', 'subj_id'), ('task_template', 'template')]), (inputspec, get_confile, [('subject_id', 'subj_id'), ('confound_template', 'template')]), (inputspec, get_confounds, [('noise_transforms', 'noise_transforms'), ('noise_regressors', 'noise_regressors'), ('TR', 'TR')]), (inputspec, make_bunch, [('design_col', 'design_col'), ('conditions', 'conditions')]), (inputspec, make_outdir, [('output_dir', 'output_dir'), ('proj_name', 'proj_name')]), (inputspec, specify_model, [('hpf_cutoff', 'high_pass_filter_cutoff'), ('TR', 'time_repetition')]), (inputspec, modelfit, [('TR', 'inputspec.interscan_interval'), ('FILM_threshold', 'inputspec.film_threshold'), ('bases', 'inputspec.bases'), ('model_serial_correlations', 'inputspec.model_serial_correlations'), (('model_serial_correlations', negate), 'modelestimate.autocorr_noestimate'), ('contrasts', 'inputspec.contrasts')]), (get_confile, get_confounds, [('out_file', 'confound_file')]), (get_confounds, make_bunch, [('confounds', 'confounds')]), (get_task, make_bunch, [('out_file', 'task_file')]), (make_bunch, specify_model, [('subject_info', 'subject_info')]), (get_mask, maskBold, [('out_file', 'mask_file')]), ]) if options['censoring'] == 'despike': lvl1pipe_wf.connect([ (get_bold, despike, [('out_file', 'in_file')]) ]) if options['smooth']: lvl1pipe_wf.connect([ (inputspec, smooth_wf, [('fwhm', 'inputnode.fwhm')]), (inputspec, get_gmmask, [('subject_id', 'subj_id'), ('smooth_gm_mask_template', 'template')]), (get_gmmask, mod_gmmask, [('out_file', 'in_file')]), (inputspec, mod_gmmask, [('gmmask_args', 'args')]), (mod_gmmask, fit_mask, [('out_file', 'mask_file')]), (get_bold, fit_mask, [('out_file', 'ref_file')]), (fit_mask, smooth_wf, [('out_mask', 'inputnode.mask_file')]), (fit_mask, sinker, [('out_mask', 'smoothing_mask')]), (despike, smooth_wf, [('out_file', 'inputnode.in_files')]), (smooth_wf, maskBold, [(('outputnode.smoothed_files', unlist), 'img_file')]), (maskBold, specify_model, [('out_file', 'functional_runs')]), (maskBold, modelfit, [('out_file', 'inputspec.functional_data')]) ]) else: lvl1pipe_wf.connect([ (despike, specify_model, [('out_file', 'functional_runs')]), (despike, modelfit, [('out_file', 'inputspec.functional_data')]), (despike, sinker, [('out_file', 'despike')]) ]) else: if options['smooth']: lvl1pipe_wf.connect([ (inputspec, smooth_wf, [('fwhm', 'inputnode.fwhm')]), (inputspec, get_gmmask, [('subject_id', 'subj_id'), ('smooth_gm_mask_template', 'template')]), (get_gmmask, mod_gmmask, [('out_file', 'in_file')]), (inputspec, mod_gmmask, [('gmmask_args', 'args')]), (mod_gmmask, fit_mask, [('out_file', 'mask_file')]), (get_bold, fit_mask, [('out_file', 'ref_file')]), (fit_mask, smooth_wf, [('out_mask', 'inputnode.mask_file')]), (fit_mask, sinker, [('out_mask', 'smoothing_mask')]), (get_bold, smooth_wf, [('out_file', 'inputnode.in_files')]), (smooth_wf, maskBold, [(('outputnode.smoothed_files', unlist), 'img_file')]), (maskBold, specify_model, [('out_file', 'functional_runs')]), (maskBold, modelfit, [('out_file', 'inputspec.functional_data')]) ]) else: lvl1pipe_wf.connect([ (get_bold, maskBold, [('out_file', 'img_file')]), (maskBold, specify_model, [('out_file', 'functional_runs')]), (maskBold, modelfit, [('out_file', 'inputspec.functional_data')]) ]) lvl1pipe_wf.connect([ (specify_model, modelfit, [('session_info', 'inputspec.session_info')]), (inputspec, sinker, [('subject_id','container'), ('sinker_subs', 'substitutions')]), # creates folder for each subject. (make_outdir, sinker, [('new_out_dir', 'base_directory')]), (modelfit, sinker, [('outputspec.parameter_estimates', 'model'), ('outputspec.dof_file','model.@dof'), #.@ puts this in the model folder. ('outputspec.copes','model.@copes'), ('outputspec.varcopes','model.@varcopes'), ('outputspec.zfiles','stats'), ('outputspec.pfiles', 'stats.@pfiles'), ('level1design.ev_files', 'design'), ('level1design.fsf_files', 'design.@fsf'), ('modelgen.con_file', 'design.@confile'), ('modelgen.fcon_file', 'design.@fconfile'), ('modelgen.design_cov', 'design.@covmatriximg'), ('modelgen.design_image', 'design.@designimg'), ('modelgen.design_file', 'design.@designfile'), ('modelestimate.logfile', 'design.@log'), ('modelestimate.sigmasquareds', 'model.@resid_sum'), ('modelestimate.fstats', 'stats.@fstats'), ('modelestimate.thresholdac', 'model.@serial_corr'), ]) ]) if options['keep_resid']: lvl1pipe_wf.connect([ (modelfit, sinker, [('modelestimate.residual4d', 'model.@resid') ]) ]) return lvl1pipe_wf
NodeHash_1e98cad0.inputs.highpass_sigma = 25 #Wraps command **fslmaths** NodeHash_1e558730 = pe.MapNode(interface=fsl.MeanImage(), name='NodeName_1e558730', iterfield=['in_file']) NodeHash_1e558730.inputs.dimension = 'T' #Wraps command **fslmaths** NodeHash_1fdac460 = pe.MapNode(interface=fsl.BinaryMaths(), name='NodeName_1fdac460', iterfield=['in_file', 'operand_file']) NodeHash_1fdac460.inputs.operation = 'add' #Makes a model specification compatible with spm/fsl designers. NodeHash_214dcae0 = pe.MapNode(interface=modelgen.SpecifyModel(), name='NodeName_214dcae0', iterfield=['functional_runs', 'subject_info']) NodeHash_214dcae0.inputs.high_pass_filter_cutoff = 0 NodeHash_214dcae0.inputs.input_units = 'secs' NodeHash_214dcae0.inputs.time_repetition = 2.0 #Generate FEAT specific files NodeHash_2087a210 = pe.MapNode(interface=fsl.Level1Design(), name='NodeName_2087a210', iterfield=['session_info']) NodeHash_2087a210.inputs.bases = {'dgamma': {'derivs': False}} NodeHash_2087a210.inputs.contrasts = [ ('con-incon', 'T', ['congruent_correct', 'congruent_correct'], [-1, 1]) ] NodeHash_2087a210.inputs.interscan_interval = 2.0
def init_taskbased_wf(analysis=None, memcalc=MemoryCalculator()): """ create workflow to calculate a first level glm for task functional data """ assert isinstance(analysis, Analysis) assert isinstance(analysis.tags, Tags) # make bold file variant specification boldfilefields = ["bold_file"] varianttupls = [("space", analysis.tags.space)] if analysis.tags.grand_mean_scaled is not None: assert isinstance(analysis.tags.grand_mean_scaled, GrandMeanScaledTag) varianttupls.append(analysis.tags.grand_mean_scaled.as_tupl()) if analysis.tags.band_pass_filtered is not None: assert isinstance(analysis.tags.band_pass_filtered, BandPassFilteredTag) assert analysis.tags.band_pass_filtered.type == "gaussian" varianttupls.append(analysis.tags.band_pass_filtered.as_tupl()) if analysis.tags.confounds_removed is not None: assert isinstance(analysis.tags.confounds_removed, ConfoundsRemovedTag) confounds_removed_names = tuple( name for name in analysis.tags.confounds_removed.names if "aroma_motion" in name) varianttupls.append(("confounds_removed", confounds_removed_names)) confounds_extract_names = tuple( name for name in analysis.tags.confounds_removed.names if "aroma_motion" not in name) if len(confounds_extract_names) > 0: boldfilefields.append("confounds_file") varianttupls.append(("confounds_extract", confounds_extract_names)) if analysis.tags.smoothed is not None: assert isinstance(analysis.tags.smoothed, SmoothedTag) varianttupls.append(analysis.tags.smoothed.as_tupl()) variantdict = dict(varianttupls) boldfilevariant = (tuple(boldfilefields), tuple(varianttupls)) assert analysis.name is not None workflow = pe.Workflow(name=analysis.name) # inputs are the bold file, the mask file and the confounds file inputnode = pe.Node( niu.IdentityInterface(fields=[ *boldfilefields, "mask_file", "condition_files", "metadata" ]), name="inputnode", ) # parse condition files into three (ordered) lists parseconditionfile = pe.Node( interface=ParseConditionFile(), name="parseconditionfile", ) workflow.connect(inputnode, "condition_files", parseconditionfile, "in_any") def get_repetition_time(dic): return dic.get("RepetitionTime") # first level model specification modelspec = pe.Node( interface=model.SpecifyModel(input_units="secs", ), name="modelspec", ) workflow.connect([ ( inputnode, modelspec, [ ("bold_file", "functional_runs"), (("metadata", get_repetition_time), "time_repetition"), ], ), (parseconditionfile, modelspec, [("subject_info", "subject_info")]), ]) if "band_pass_filtered" in variantdict: modelspec.inputs.high_pass_filter_cutoff = float( analysis.tags.band_pass_filtered.high) if "confounds_extract" in variantdict: workflow.connect([(inputnode, modelspec, [("confounds_file", "realignment_parameters")])]) # transform contrasts dictionary to nipype list data structure contrasts = [[ contrast.name, contrast.type.upper(), *map(list, zip(*contrast.values.items())) ] for contrast in analysis.contrasts] # generate design from first level specification level1design = pe.Node( interface=fsl.Level1Design( contrasts=contrasts, model_serial_correlations=True, bases={"dgamma": { "derivs": False }}, ), name="level1design", ) workflow.connect([ ( inputnode, level1design, [(("metadata", get_repetition_time), "interscan_interval")], ), (modelspec, level1design, [("session_info", "session_info")]), ]) # generate required input files for FILMGLS from design modelgen = pe.Node(interface=fsl.FEATModel(), name="modelgen", iterfield=["fsf_file", "ev_files"]) workflow.connect([( level1design, modelgen, [("fsf_files", "fsf_file"), ("ev_files", "ev_files")], )]) # calculate range of image values to determine cutoff value # for FILMGLS boldfilecutoff = pe.Node(interface=fsl.ImageStats(op_string="-R"), name="boldfilecutoff") workflow.connect([(inputnode, boldfilecutoff, [("bold_file", "in_file")])]) # actually estimate the first level model modelestimate = pe.Node( interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5), name="modelestimate", iterfield=["design_file", "in_file", "tcon_file"], ) workflow.connect([ (inputnode, modelestimate, [("bold_file", "in_file")]), (boldfilecutoff, modelestimate, [(("out_stat", firstfloat), "threshold")]), ( modelgen, modelestimate, [("design_file", "design_file"), ("con_file", "tcon_file")], ), ]) # make dof volume makedofvolume = pe.MapNode( interface=MakeDofVolume(), iterfield=["dof_file", "cope_file"], name="makedofvolume", ) workflow.connect([ ( modelestimate, makedofvolume, [(("copes", first), "cope_file"), ("dof_file", "dof_file")], ), ]) outputnode = pe.Node( interface=MakeResultdicts(keys=[ "firstlevelanalysisname", "firstlevelfeaturename", "cope", "varcope", "zstat", "dof_file", "mask_file", ]), name="outputnode", ) outputnode.inputs.firstlevelanalysisname = analysis.name outputnode.inputs.firstlevelfeaturename = list(map(first, contrasts)) workflow.connect([ (inputnode, outputnode, [("metadata", "basedict"), ("mask_file", "mask_file")]), ( modelestimate, outputnode, [ (("copes", ravel), "cope"), (("varcopes", ravel), "varcope"), (("zstats", ravel), "zstat"), ], ), (makedofvolume, outputnode, [("out_file", "dof_file")]), ]) return workflow, (boldfilevariant, )
def modelfit_fsl(wf_name='modelfit'): """ Fit 1st level GLM using FSL routines Usage (TODO) modelfit.inputs.inputspec.fwhm = 12 modelfit.inputs.inputspec.brain_mask = ['/opt/shared2/nipype-test/testblock/example_func_brain_mask.nii.gz', '/opt/shared2/nipype-test/testblock/example_func_brain_mask.nii.gz'] modelfit.inputs.inputspec.input_units = 'secs' modelfit.inputs.inputspec.in_file = ['/opt/shared2/nipype-test/testblock/mc_data_brain.nii.gz', '/opt/shared2/nipype-test/testblock/mc_data_brain.nii.gz'] modelfit.inputs.inputspec.TR = 2 modelfit.inputs.inputspec.high_pass_filter_cutoff = 100 #sigma in TR modelfit.inputs.inputspec.event_files = ['/opt/shared2/nipype-test/testblock/a'] cont1 = ['whisker', 'T', ['a', 'a'], [1.0, 0.0]] cont2 = ['-whisker', 'T', ['a', 'a'], [-1.0, 0.0]] cont3 = ['Task','F', [cont1, cont2]] contrasts = [cont1] modelfit.inputs.inputspec.contrasts = contrasts #TODO: change condition names modelfit.inputs.inputspec.bases_function = {'dgamma': {'derivs': True}} modelfit.inputs.inputspec.model_serial_correlations = True #modelfit.write_graph('graph.dot'); modelfit.write_graph('graph.dot', graph2use='colored'); x=modelfit.run() #x=modelfit.run(plugin='MultiProc', plugin_args={'n_procs': 8}) server.serve_content(modelfit) """ modelfit = pe.Workflow(name=wf_name) """ Set up a node to define all inputs required for the preprocessing workflow """ inputnode = pe.Node(interface=util.IdentityInterface( fields=[ 'in_file', 'ev_file', 'confounders', 'contrasts', 'high_pass_filter_cutoff', 'fwhm', 'interscan_interval', 'TR', 'input_units', 'bases_function', 'model_serial_correlations', 'brain_mask' ], mandatory_inputs=True), name='inputspec') #TODO: eliminate brain mask #inputnode.iterables=[('high_pass_filter_cutoff', [30, 60, 90, 120, 500])] """ Set up a node to define outputs for the preprocessing workflow """ outputnode = pe.Node(interface=util.IdentityInterface( fields=['zstats', 'zfstats', 'copes', 'varcopes'], mandatory_inputs=True), name='outputspec') # collect subject info getsubjectinfo = pe.MapNode(util.Function( input_names=['ev_file', 'confounders'], output_names=['subject_info'], function=get_subject_info), name='getsubjectinfo', iterfield=['confounders']) # nipype.algorithms.modelgen.SpecifyModel to generate design information. modelspec = pe.MapNode(interface=model.SpecifyModel(), name="modelspec", iterfield=['subject_info']) # smooth #TODO: move into preproc pipeline smooth = preproc.create_susan_smooth("smooth") #smooth.get_node( "smooth").iterables=[('fwhm', [6., 8., 10., 12., 14., 16.])] toSigma = pe.Node(interface=util.Function( input_names=['high_pass_filter_cutoff', 'TR'], output_names=['high_pass_filter_opstring'], function=highpass_operand), name='toSigma') highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt', op_string=''), iterfield=['in_file'], name='highpass') # Use nipype.interfaces.fsl.Level1Design to generate a run specific fsf file for analysis level1design = pe.MapNode(interface=fsl.Level1Design(), name="level1design", iterfield='session_info') # Use nipype.interfaces.fsl.FEATModel to generate a run specific mat file for use by FILMGLS modelgen = pe.MapNode(interface=fsl.FEATModel(), name='modelgen', iterfield=['fsf_file', 'ev_files']) # Use nipype.interfaces.fsl.FILMGLS to estimate a model specified by a mat file and a functional run modelestimate = pe.MapNode( interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5, threshold=200), name='modelestimate', #iterfield=['design_file', 'in_file']) iterfield=['in_file', 'design_file']) # Use nipype.interfaces.fsl.ContrastMgr to generate contrast estimates conestimate = pe.MapNode(interface=fsl.ContrastMgr(), name='conestimate', iterfield=[ 'param_estimates', 'sigmasquareds', 'corrections', 'dof_file', 'tcon_file' ]) modelfit.connect([ ( inputnode, smooth, [ ('in_file', 'inputnode.in_files'), ('fwhm', 'inputnode.fwhm'), # in iterable ('brain_mask', 'inputnode.mask_file') ]), (smooth, highpass, [('outputnode.smoothed_files', 'in_file')]), (inputnode, toSigma, [('high_pass_filter_cutoff', 'high_pass_filter_cutoff')]), (inputnode, toSigma, [('TR', 'TR')]), (toSigma, highpass, [('high_pass_filter_opstring', 'op_string')]), (inputnode, getsubjectinfo, [('ev_file', 'ev_file'), ('confounders', 'confounders')]), (getsubjectinfo, modelspec, [('subject_info', 'subject_info')]), (highpass, modelspec, [('out_file', 'functional_runs')]), (highpass, modelestimate, [('out_file', 'in_file')]), (inputnode, modelspec, [ ('input_units', 'input_units'), ('TR', 'time_repetition'), ('high_pass_filter_cutoff', 'high_pass_filter_cutoff'), ]), (inputnode, level1design, [('TR', 'interscan_interval'), ('model_serial_correlations', 'model_serial_correlations'), ('bases_function', 'bases'), ('contrasts', 'contrasts')]), (modelspec, level1design, [('session_info', 'session_info')]), (level1design, modelgen, [('fsf_files', 'fsf_file'), ('ev_files', 'ev_files')]), (modelgen, modelestimate, [('design_file', 'design_file')]), (modelgen, conestimate, [('con_file', 'tcon_file')]), (modelestimate, conestimate, [('param_estimates', 'param_estimates'), ('sigmasquareds', 'sigmasquareds'), ('corrections', 'corrections'), ('dof_file', 'dof_file')]), (conestimate, outputnode, [('zstats', 'zstats'), ('zfstats', 'zfstats'), ('copes', 'copes'), ('varcopes', 'varcopes')]) ]) return modelfit
def init_taskbased_wf( workdir=None, feature=None, condition_files=None, condition_units=None, memcalc=MemoryCalculator(), ): """ create workflow to calculate a first level glm for task functional data """ if feature is not None: name = f"{formatlikebids(feature.name)}_wf" else: name = "taskbased_wf" workflow = pe.Workflow(name=name) # inputnode = pe.Node( niu.IdentityInterface( fields=[ "tags", "vals", "metadata", "bold", "mask", "repetition_time", "confounds_selected", "condition_names", "condition_files", "condition_units", ] ), name="inputnode", ) outputnode = pe.Node(niu.IdentityInterface(fields=["resultdicts"]), name="outputnode") if feature is not None: inputnode.inputs.condition_names = feature.conditions if condition_files is not None: inputnode.inputs.condition_files = condition_files if condition_units is not None: inputnode.inputs.condition_units = condition_units # make_resultdicts_a = pe.Node( MakeResultdicts(tagkeys=["feature"], imagekeys=["design_matrix", "contrast_matrix"]), name="make_resultdicts_a", ) if feature is not None: make_resultdicts_a.inputs.feature = feature.name workflow.connect(inputnode, "tags", make_resultdicts_a, "tags") workflow.connect(inputnode, "vals", make_resultdicts_a, "vals") workflow.connect(inputnode, "metadata", make_resultdicts_a, "metadata") make_resultdicts_b = pe.Node( MakeResultdicts( tagkeys=["feature", "taskcontrast"], imagekeys=["effect", "variance", "z", "dof", "mask"], metadatakeys=["sources"], ), name="make_resultdicts_b", ) if feature is not None: make_resultdicts_b.inputs.feature = feature.name workflow.connect(inputnode, "tags", make_resultdicts_b, "tags") workflow.connect(inputnode, "vals", make_resultdicts_b, "vals") workflow.connect(inputnode, "metadata", make_resultdicts_b, "metadata") workflow.connect(inputnode, "mask", make_resultdicts_b, "mask") workflow.connect(make_resultdicts_b, "resultdicts", outputnode, "resultdicts") # merge_resultdicts = pe.Node(niu.Merge(2), name="merge_resultdicts") workflow.connect(make_resultdicts_a, "resultdicts", merge_resultdicts, "in1") workflow.connect(make_resultdicts_b, "resultdicts", merge_resultdicts, "in2") resultdict_datasink = pe.Node( ResultdictDatasink(base_directory=workdir), name="resultdict_datasink" ) workflow.connect(merge_resultdicts, "out", resultdict_datasink, "indicts") # parse condition files into three (ordered) lists parseconditionfile = pe.Node(ParseConditionFile(), name="parseconditionfile") workflow.connect(inputnode, "condition_names", parseconditionfile, "condition_names") workflow.connect(inputnode, "condition_files", parseconditionfile, "in_any") fillna = pe.Node(FillNA(), name="fillna") workflow.connect(inputnode, "confounds_selected", fillna, "in_tsv") # first level model specification modelspec = pe.Node(model.SpecifyModel(), name="modelspec") if hasattr(feature, "high_pass_filter_cutoff"): modelspec.inputs.high_pass_filter_cutoff = feature.high_pass_filter_cutoff else: modelspec.inputs.high_pass_filter_cutoff = np.inf workflow.connect(inputnode, "bold", modelspec, "functional_runs") workflow.connect(inputnode, "condition_units", modelspec, "input_units") workflow.connect(inputnode, "repetition_time", modelspec, "time_repetition") workflow.connect(fillna, "out_no_header", modelspec, "realignment_parameters") workflow.connect(parseconditionfile, "subject_info", modelspec, "subject_info") # transform contrasts dictionary to nipype list data structure contrasts = [] if feature is not None: condition_names = feature.conditions for contrast in feature.contrasts: contrast_values = [contrast["values"].get(c, 0.0) for c in condition_names] contrasts.append( [contrast["name"], contrast["type"].upper(), condition_names, contrast_values] ) contrast_names = list(map(firststr, contrasts)) make_resultdicts_b.inputs.taskcontrast = contrast_names # generate design from first level specification level1design = pe.Node( fsl.Level1Design( contrasts=contrasts, model_serial_correlations=True, bases={"dgamma": {"derivs": False}}, ), name="level1design", ) workflow.connect(inputnode, "repetition_time", level1design, "interscan_interval") workflow.connect(modelspec, "session_info", level1design, "session_info") # generate required input files for FILMGLS from design modelgen = pe.Node(fsl.FEATModel(), name="modelgen") workflow.connect([(level1design, modelgen, [(("fsf_files", firststr), "fsf_file")])]) workflow.connect([(level1design, modelgen, [(("ev_files", ravel), "ev_files")])]) # calculate range of image values to determine cutoff value stats = pe.Node(fsl.ImageStats(op_string="-R"), name="stats") workflow.connect(inputnode, "bold", stats, "in_file") cutoff = pe.Node( niu.Function(input_names=["obj"], output_names=["min_val"], function=firstfloat), name="cutoff", ) workflow.connect(stats, "out_stat", cutoff, "obj") # actually estimate the first level model modelestimate = pe.Node( fsl.FILMGLS(smooth_autocorr=True, mask_size=5), name="modelestimate" ) workflow.connect(inputnode, "bold", modelestimate, "in_file") workflow.connect(cutoff, "min_val", modelestimate, "threshold") workflow.connect(modelgen, "design_file", modelestimate, "design_file") workflow.connect(modelgen, "con_file", modelestimate, "tcon_file") # make dof volume makedofvolume = pe.Node( MakeDofVolume(), iterfield=["dof_file", "copes"], name="makedofvolume" ) workflow.connect(modelestimate, "copes", makedofvolume, "copes") workflow.connect(modelestimate, "dof_file", makedofvolume, "dof_file") workflow.connect(modelestimate, "copes", make_resultdicts_b, "effect") workflow.connect(modelestimate, "varcopes", make_resultdicts_b, "variance") workflow.connect(modelestimate, "zstats", make_resultdicts_b, "z") workflow.connect(makedofvolume, "out_file", make_resultdicts_b, "dof") # mergecolumnnames = pe.Node(niu.Merge(2), name="mergecolumnnames") mergecolumnnames.inputs.in1 = condition_names workflow.connect(fillna, "column_names", mergecolumnnames, "in2") design_unvest = pe.Node(Unvest(), name="design_unvest") workflow.connect(modelgen, "design_file", design_unvest, "in_vest") design_tsv = pe.Node(MergeColumns(1), name="design_tsv") workflow.connect(design_unvest, "out_no_header", design_tsv, "in1") workflow.connect(mergecolumnnames, "out", design_tsv, "column_names1") contrast_unvest = pe.Node(Unvest(), name="contrast_unvest") workflow.connect(modelgen, "con_file", contrast_unvest, "in_vest") contrast_tsv = pe.Node(MergeColumns(1), name="contrast_tsv") contrast_tsv.inputs.row_index = contrast_names workflow.connect(contrast_unvest, "out_no_header", contrast_tsv, "in1") workflow.connect(mergecolumnnames, "out", contrast_tsv, "column_names1") workflow.connect(design_tsv, "out_with_header", make_resultdicts_a, "design_matrix") workflow.connect(contrast_tsv, "out_with_header", make_resultdicts_a, "contrast_matrix") return workflow
def L1PIPE(): # ---1) Import modules import nipype.interfaces.fsl as fsl import nipype.pipeline.engine as pe import nipype.algorithms.modelgen as model import glob from nipype import Function import matplotlib import nipype.interfaces.utility as util import os #--- 2) Specify model node specify_model = pe.Node(interface=model.SpecifyModel(), name="SPECIFY_MODEL") specify_model.inputs.input_units = 'secs' runs=raw_input('Please drag in the pre-processsed functional data\n') runs2= runs.strip('\'"') NIFTIDIR=os.path.split(runs2)[0] specify_model.inputs.functional_runs = [runs2] specify_model.inputs.time_repetition = float(raw_input('Enter the TR (s)\n')) specify_model.inputs.high_pass_filter_cutoff = float(raw_input('Enter the High pass filter cutoff (s)\n')) EVENTFILES=raw_input('Please drag in the directory of 3 column event files') EVENTFILES2=EVENTFILES.strip('\'"') EVENTFILESLIST=glob.glob(EVENTFILES2 + '/*') specify_model.inputs.event_files=sorted(EVENTFILESLIST) #--- 3) Level 1 design node. Designer=pe.Node(interface=fsl.Level1Design(),name='DESIGN') Designer.inputs.interscan_interval = float(specify_model.inputs.time_repetition) Designer.inputs.bases = {'dgamma':{'derivs': False}} Designer.inputs.model_serial_correlations=bool(0) #--- 4) Make some contrasts cont1=('Task', 'T', ['B1INVFEAR.RUN001', 'B1INVINVFEAR.RUN001', 'B1INVINVNEUT.RUN001', 'B1INVNEUT.RUN001', 'B1SCFEAR.RUN001', 'B1SCNEUT.RUN001', 'B1UPFEAR.RUN001', 'B1UPINVFEAR.RUN001', 'B1UPINVNEUT.RUN001', 'B1UPNEUT.RUN001'], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]) cont2=('Up', 'T', ['B1INVFEAR.RUN001', 'B1INVINVFEAR.RUN001', 'B1INVINVNEUT.RUN001', 'B1INVNEUT.RUN001', 'B1SCFEAR.RUN001', 'B1SCNEUT.RUN001', 'B1UPFEAR.RUN001', 'B1UPINVFEAR.RUN001', 'B1UPINVNEUT.RUN001', 'B1UPNEUT.RUN001'], [0, 0, 0, 0, 0, 0, 1, 0, 0, 1]) cont3=('SC', 'T', ['B1INVFEAR.RUN001', 'B1INVINVFEAR.RUN001', 'B1INVINVNEUT.RUN001', 'B1INVNEUT.RUN001', 'B1SCFEAR.RUN001', 'B1SCNEUT.RUN001', 'B1UPFEAR.RUN001', 'B1UPINVFEAR.RUN001', 'B1UPINVNEUT.RUN001', 'B1UPNEUT.RUN001'], [0, 0, 0, 0, 1, 1, 0, 0, 0, 0]) cont4=('UpvSC', 'T', ['B1INVFEAR.RUN001', 'B1INVINVFEAR.RUN001', 'B1INVINVNEUT.RUN001', 'B1INVNEUT.RUN001', 'B1SCFEAR.RUN001', 'B1SCNEUT.RUN001', 'B1UPFEAR.RUN001', 'B1UPINVFEAR.RUN001', 'B1UPINVNEUT.RUN001', 'B1UPNEUT.RUN001'], [0, 0, 0, 0, -1, -1, 1, 0, 0, 1]) Designer.inputs.contrasts=[cont1, cont2, cont3, cont4] #--- 5) FSL model node Model=pe.Node(interface=fsl.FEATModel(),name='FEATMODEL') #--- 6) FILM GSL node fgls=pe.Node(interface=fsl.FILMGLS(),name='FILM_GLS') fgls.inputs.in_file=runs2 #--- 7) outputnode for the design image (gets binned otherwise) outputnode = pe.Node(interface=util.IdentityInterface(fields=['im','cope','varcope','dof','resid','params','sigmas']),name='outputnode') #--- 8) Plotting node def plot(in_file): from nilearn import image from nilearn import plotting import matplotlib display=plotting.plot_stat_map(stat_map_img = in_file, display_mode='z', cut_coords=10, threshold=float(0)) matplotlib.pyplot.show() plotter=pe.MapNode(Function(input_names=['in_file'],output_names='display',function=plot),iterfield=['in_file'],name='PLOTTER') workflow = pe.Workflow(name='L1PIPE') workflow.connect(specify_model,'session_info',Designer,'session_info') workflow.connect(Designer,'fsf_files',Model,'fsf_file') workflow.connect(Designer,'ev_files',Model,'ev_files') workflow.connect(Model,'design_file',fgls,'design_file') workflow.connect(Model,'con_file',fgls,'tcon_file') workflow.connect(Model,'design_image',outputnode,'im') # Feed the z stats to the plotter. workflow.connect(fgls,'zstats',plotter,'in_file') workflow.connect(fgls,'copes',outputnode,'cope') workflow.connect(fgls,'varcopes',outputnode,'varcope') workflow.connect(fgls,'dof_file',outputnode,'dof') workflow.connect(fgls,'residual4d',outputnode,'resid') workflow.connect(fgls,'param_estimates',outputnode,'params') workflow.connect(fgls,'sigmasquareds',outputnode,'sigmas') workflow.base_dir = NIFTIDIR workflow.write_graph(graph2use='exec') workflow.run()
def fsl_run_level_wf( model, step, bids_dir, output_dir, work_dir, subject_id, database_path, smoothing_fwhm=None, smoothing_level=None, smoothing_type=None, use_rapidart=False, detrend_poly=None, align_volumes=None, smooth_autocorrelations=False, despike=False, name="fsl_run_level_wf", ): """Generate run level workflow for a given model.""" bids_dir = Path(bids_dir) work_dir = Path(work_dir) workflow = pe.Workflow(name=name) level = step["Level"] dimensionality = 3 # Nipype FSL.SUSAN Default if smoothing_type == "inp": dimensionality = 2 workflow.__desc__ = "" (work_dir / model["Name"]).mkdir(exist_ok=True) include_entities = {} if "Input" in model: if "Include" in model["Input"]: include_entities = model["Input"]["Include"] include_entities.update({"subject": subject_id}) getter = pe.Node( BIDSGet( database_path=database_path, fixed_entities=include_entities, align_volumes=align_volumes, ), name="func_select", ) get_info = pe.MapNode( GetRunModelInfo(model=step, detrend_poly=detrend_poly), iterfield=[ "metadata_file", "regressor_file", "events_file", "entities" ], name=f"get_{level}_info", ) despiker = pe.MapNode( afni.Despike(outputtype="NIFTI_GZ"), iterfield=["in_file"], name="despiker", ) realign_runs = pe.MapNode( fsl.MCFLIRT(output_type="NIFTI_GZ", interpolation="sinc"), iterfield=["in_file", "ref_file"], name="func_realign", ) wrangle_volumes = pe.MapNode( IdentityInterface(fields=["functional_file"]), iterfield=["functional_file"], name="wrangle_volumes", ) specify_model = pe.MapNode( modelgen.SpecifyModel(high_pass_filter_cutoff=-1.0, input_units="secs"), iterfield=["functional_runs", "subject_info", "time_repetition"], name=f"model_{level}_specify", ) fit_model = pe.MapNode( IdentityInterface( fields=[ "session_info", "interscan_interval", "contrasts", "functional_data" ], mandatory_inputs=True, ), iterfield=[ "functional_data", "session_info", "interscan_interval", "contrasts" ], name=f"model_{level}_fit", ) first_level_design = pe.MapNode( fsl.Level1Design( bases={"dgamma": { "derivs": False }}, model_serial_correlations=False, ), iterfield=["session_info", "interscan_interval", "contrasts"], name=f"model_{level}_design", ) generate_model = pe.MapNode( fsl.FEATModel(output_type="NIFTI_GZ"), iterfield=["fsf_file", "ev_files"], name=f"model_{level}_generate", ) estimate_model = pe.MapNode( fsl.FILMGLS( threshold=0.0, # smooth_autocorr=True output_type="NIFTI_GZ", results_dir="results", smooth_autocorr=False, autocorr_noestimate=True, ), iterfield=["design_file", "in_file", "tcon_file"], name=f"model_{level}_estimate", ) if smooth_autocorrelations: first_level_design.inputs.model_serial_correlations = True estimate_model.inputs.smooth_autocorr = True estimate_model.inputs.autocorr_noestimate = False calculate_p = pe.MapNode( fsl.ImageMaths(output_type="NIFTI_GZ", op_string="-ztop", suffix="_pval"), iterfield=["in_file"], name=f"model_{level}_caculate_p", ) image_pattern = ("[sub-{subject}/][ses-{session}/]" "[sub-{subject}_][ses-{session}_]" "task-{task}_[acq-{acquisition}_]" "[rec-{reconstruction}_][run-{run}_]" "[echo-{echo}_][space-{space}_]contrast-{contrast}_" "stat-{stat<effect|variance|z|p|t|F>}_statmap.nii.gz") run_rapidart = pe.MapNode( ra.ArtifactDetect( use_differences=[True, False], use_norm=True, zintensity_threshold=3, norm_threshold=1, bound_by_brainmask=True, mask_type="file", parameter_source="FSL", ), iterfield=["realignment_parameters", "realigned_files", "mask_file"], name="rapidart_run", ) reshape_rapidart = pe.MapNode( Function( input_names=[ "run_info", "functional_file", "outlier_file", "contrast_entities" ], output_names=["run_info", "contrast_entities"], function=utils.reshape_ra, ), iterfield=[ "run_info", "functional_file", "outlier_file", "contrast_entities" ], name="reshape_rapidart", ) mean_img = pe.MapNode( fsl.ImageMaths(output_type="NIFTI_GZ", op_string="-Tmean", suffix="_mean"), iterfield=["in_file", "mask_file"], name="smooth_susan_avgimg", ) median_img = pe.MapNode( fsl.ImageStats(output_type="NIFTI_GZ", op_string="-k %s -p 50"), iterfield=["in_file", "mask_file"], name="smooth_susan_medimg", ) merge = pe.Node(Merge(2, axis="hstack"), name="smooth_merge") run_susan = pe.MapNode( fsl.SUSAN(output_type="NIFTI_GZ"), iterfield=["in_file", "brightness_threshold", "usans"], name="smooth_susan", ) mask_functional = pe.MapNode(ApplyMask(), iterfield=["in_file", "mask_file"], name="mask_functional") # Exists solely to correct undesirable behavior of FSL # that results in loss of constant columns correct_matrices = pe.MapNode( Function( input_names=["design_matrix"], output_names=["design_matrix"], function=utils.correct_matrix, ), iterfield=["design_matrix"], run_without_submitting=True, name=f"correct_{level}_matrices", ) collate = pe.Node( MergeAll( fields=[ "effect_maps", "variance_maps", "zscore_maps", "pvalue_maps", "tstat_maps", "contrast_metadata", ], check_lengths=True, ), name=f"collate_{level}", ) collate_outputs = pe.Node( CollateWithMetadata( fields=[ "effect_maps", "variance_maps", "zscore_maps", "pvalue_maps", "tstat_maps" ], field_to_metadata_map={ "effect_maps": { "stat": "effect" }, "variance_maps": { "stat": "variance" }, "zscore_maps": { "stat": "z" }, "pvalue_maps": { "stat": "p" }, "tstat_maps": { "stat": "t" }, }, ), name=f"collate_{level}_outputs", ) plot_matrices = pe.MapNode( PlotMatrices(output_dir=output_dir, database_path=database_path), iterfield=["mat_file", "con_file", "entities", "run_info"], run_without_submitting=True, name=f"plot_{level}_matrices", ) ds_contrast_maps = pe.MapNode( BIDSDataSink(base_directory=output_dir, path_patterns=image_pattern), iterfield=["entities", "in_file"], run_without_submitting=True, name=f"ds_{level}_contrast_maps", ) wrangle_outputs = pe.Node( IdentityInterface(fields=["contrast_metadata", "contrast_maps"]), name=f"wrangle_{level}_outputs", ) # Setup connections among nodes workflow.connect([( getter, get_info, [ ("metadata_files", "metadata_file"), ("events_files", "events_file"), ("regressor_files", "regressor_file"), ("entities", "entities"), ], )]) if align_volumes and despike: workflow.connect([ (getter, despiker, [("functional_files", "in_file")]), (despiker, realign_runs, [("out_file", "in_file")]), (getter, realign_runs, [("reference_files", "ref_file")]), ( realign_runs, wrangle_volumes, [("out_file", "functional_file")], ), ]) elif align_volumes and not despike: workflow.connect([ ( getter, realign_runs, [("functional_files", "in_file"), ("reference_files", "ref_file")], ), ( realign_runs, wrangle_volumes, [("out_file", "functional_file")], ), ]) elif despike: workflow.connect([ (getter, despiker, [("functional_files", "in_file")]), (despiker, wrangle_volumes, [("out_file", "functional_file")]), ]) else: workflow.connect([(getter, wrangle_volumes, [("functional_files", "functional_file")])]) if use_rapidart: workflow.connect([ (get_info, run_rapidart, [("motion_parameters", "realignment_parameters")]), (getter, run_rapidart, [("mask_files", "mask_file")]), ( wrangle_volumes, run_rapidart, [("functional_file", "realigned_files")], ), ( run_rapidart, reshape_rapidart, [("outlier_files", "outlier_file")], ), ( get_info, reshape_rapidart, [("run_info", "run_info"), ("contrast_entities", "contrast_entities")], ), (wrangle_volumes, reshape_rapidart, [("functional_file", "functional_file")]), ( reshape_rapidart, specify_model, [("run_info", "subject_info")], ), (reshape_rapidart, plot_matrices, [("run_info", "run_info")]), (reshape_rapidart, collate, [("contrast_entities", "contrast_metadata")]), ]) else: workflow.connect([ (get_info, specify_model, [("run_info", "subject_info")]), (get_info, plot_matrices, [("run_info", "run_info")]), ( get_info, collate, [("contrast_entities", "contrast_metadata")], ), ]) if smoothing_level == "l1" or smoothing_level == "run": run_susan.inputs.fwhm = smoothing_fwhm run_susan.inputs.dimension = dimensionality estimate_model.inputs.mask_size = smoothing_fwhm workflow.connect([ (wrangle_volumes, mean_img, [("functional_file", "in_file")]), ( wrangle_volumes, median_img, [("functional_file", "in_file")], ), (getter, mean_img, [("mask_files", "mask_file")]), (getter, median_img, [("mask_files", "mask_file")]), (mean_img, merge, [("out_file", "in1")]), (median_img, merge, [("out_stat", "in2")]), (wrangle_volumes, run_susan, [("functional_file", "in_file")]), ( median_img, run_susan, [( ("out_stat", utils.get_btthresh), "brightness_threshold", )], ), (merge, run_susan, [(("out", utils.get_usans), "usans")]), (getter, mask_functional, [("mask_files", "mask_file")]), (run_susan, mask_functional, [("smoothed_file", "in_file")]), ( mask_functional, specify_model, [("out_file", "functional_runs")], ), ( mask_functional, fit_model, [("out_file", "functional_data")], ), ]) else: workflow.connect([ (getter, mask_functional, [("mask_files", "mask_file")]), ( wrangle_volumes, mask_functional, [("functional_file", "in_file")], ), ( mask_functional, specify_model, [("out_file", "functional_runs")], ), ( mask_functional, fit_model, [("out_file", "functional_data")], ), ]) workflow.connect([ ( get_info, specify_model, [("repetition_time", "time_repetition")], ), (specify_model, fit_model, [("session_info", "session_info")]), ( get_info, fit_model, [("repetition_time", "interscan_interval"), ("run_contrasts", "contrasts")], ), ( fit_model, first_level_design, [ ("interscan_interval", "interscan_interval"), ("session_info", "session_info"), ("contrasts", "contrasts"), ], ), (first_level_design, generate_model, [("fsf_files", "fsf_file")]), (first_level_design, generate_model, [("ev_files", "ev_files")]), ]) if detrend_poly: workflow.connect([ ( generate_model, correct_matrices, [("design_file", "design_matrix")], ), ( correct_matrices, plot_matrices, [("design_matrix", "mat_file")], ), ( correct_matrices, estimate_model, [("design_matrix", "design_file")], ), ]) else: workflow.connect([ (generate_model, plot_matrices, [("design_file", "mat_file")]), ( generate_model, estimate_model, [("design_file", "design_file")], ), ]) workflow.connect([ (getter, plot_matrices, [("entities", "entities")]), (generate_model, plot_matrices, [("con_file", "con_file")]), (fit_model, estimate_model, [("functional_data", "in_file")]), (generate_model, estimate_model, [("con_file", "tcon_file")]), ( estimate_model, calculate_p, [(("zstats", utils.flatten), "in_file")], ), ( estimate_model, collate, [ ("copes", "effect_maps"), ("varcopes", "variance_maps"), ("zstats", "zscore_maps"), ("tstats", "tstat_maps"), ], ), (calculate_p, collate, [("out_file", "pvalue_maps")]), ( collate, collate_outputs, [ ("effect_maps", "effect_maps"), ("variance_maps", "variance_maps"), ("zscore_maps", "zscore_maps"), ("pvalue_maps", "pvalue_maps"), ("tstat_maps", "tstat_maps"), ("contrast_metadata", "metadata"), ], ), ( collate_outputs, ds_contrast_maps, [("out", "in_file"), ("metadata", "entities")], ), ( collate_outputs, wrangle_outputs, [("metadata", "contrast_metadata"), ("out", "contrast_maps")], ), ]) return workflow
mp_trimmer = MapNode(Function(input_names=['mp', 'ftp'], output_names=['trimmed'], function=motionpar_trimmer), name='Motion_Parameter_Trimmer', iterfield=['mp', 'ftp']) analysis.connect(data, 'mp', mp_trimmer, 'mp') analysis.connect(data, 'func_trim_point', mp_trimmer, 'ftp') analysis.connect(mp_trimmer, 'trimmed', datasink, 'Trimmed_MotionPars') #Now we will model the task using FEATModel to convolve the task regressors #with a double gamma hemodynamic response function target_modelspec = MapNode( interface=model.SpecifyModel(high_pass_filter_cutoff=128.00, input_units='secs', time_repetition=0.400), name="Target_ModelSpec", iterfield=[ 'event_files', 'functional_runs', 'realignment_parameters' ]) analysis.connect(data, 'ev_target', target_modelspec, 'event_files') analysis.connect(question_trimmer, ('roi_file', nest_list), target_modelspec, 'functional_runs') analysis.connect(mp_trimmer, ('trimmed', nest_list), target_modelspec, 'realignment_parameters') target_level1design = MapNode( interface=fsl.Level1Design(bases={'dgamma': {
def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, task_id=None, output_dir=None): """Analyzes an open fmri dataset Parameters ---------- data_dir : str Path to the base data directory work_dir : str Nipype working directory (defaults to cwd) """ """ Load nipype workflows """ preproc = create_featreg_preproc(whichvol='first') modelfit = create_modelfit_workflow() fixed_fx = create_fixed_effects_flow() registration = create_reg_workflow() """ Remove the plotting connection so that plot iterables don't propagate to the model stage """ preproc.disconnect(preproc.get_node('plot_motion'), 'out_file', preproc.get_node('outputspec'), 'motion_plots') """ Set up openfmri data specific components """ subjects = sorted([path.split(os.path.sep)[-1] for path in glob(os.path.join(data_dir, 'sub*'))]) infosource = pe.Node(niu.IdentityInterface(fields=['subject_id', 'model_id', 'task_id']), name='infosource') if subject is None: infosource.iterables = [('subject_id', subjects), ('model_id', [model_id]), ('task_id', [task_id])] else: infosource.iterables = [('subject_id', [subjects[subjects.index(subject)]]), ('model_id', [model_id]), ('task_id', [task_id])] subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir', 'task_id', 'model_id'], output_names=['run_id', 'conds', 'TR'], function=get_subjectinfo), name='subjectinfo') subjinfo.inputs.base_dir = data_dir """ Return data components as anat, bold and behav """ datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id', 'task_id', 'model_id'], outfields=['anat', 'bold', 'behav', 'contrasts']), name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '*' datasource.inputs.field_template = {'anat': '%s/anatomy/highres001.nii.gz', 'bold': '%s/BOLD/task%03d_r*/bold.nii.gz', 'behav': ('%s/model/model%03d/onsets/task%03d_' 'run%03d/cond*.txt'), 'contrasts': ('models/model%03d/' 'task_contrasts.txt')} datasource.inputs.template_args = {'anat': [['subject_id']], 'bold': [['subject_id', 'task_id']], 'behav': [['subject_id', 'model_id', 'task_id', 'run_id']], 'contrasts': [['model_id']]} datasource.inputs.sort_filelist = True """ Create meta workflow """ wf = pe.Workflow(name='openfmri') wf.connect(infosource, 'subject_id', subjinfo, 'subject_id') wf.connect(infosource, 'model_id', subjinfo, 'model_id') wf.connect(infosource, 'task_id', subjinfo, 'task_id') wf.connect(infosource, 'subject_id', datasource, 'subject_id') wf.connect(infosource, 'model_id', datasource, 'model_id') wf.connect(infosource, 'task_id', datasource, 'task_id') wf.connect(subjinfo, 'run_id', datasource, 'run_id') wf.connect([(datasource, preproc, [('bold', 'inputspec.func')]), ]) def get_highpass(TR, hpcutoff): return hpcutoff / (2 * TR) gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'], output_names=['highpass'], function=get_highpass), name='gethighpass') wf.connect(subjinfo, 'TR', gethighpass, 'TR') wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass') """ Setup a basic set of contrasts, a t-test per condition """ def get_contrasts(contrast_file, task_id, conds): import numpy as np contrast_def = np.genfromtxt(contrast_file, dtype=object) if len(contrast_def.shape) == 1: contrast_def = contrast_def[None, :] contrasts = [] for row in contrast_def: if row[0] != 'task%03d' % task_id: continue con = [row[1], 'T', ['cond%03d' % (i + 1) for i in range(len(conds))], row[2:].astype(float).tolist()] contrasts.append(con) # add auto contrasts for each column for i, cond in enumerate(conds): con = [cond, 'T', ['cond%03d' % (i + 1)], [1]] contrasts.append(con) return contrasts contrastgen = pe.Node(niu.Function(input_names=['contrast_file', 'task_id', 'conds'], output_names=['contrasts'], function=get_contrasts), name='contrastgen') art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False], use_norm=True, norm_threshold=1, zintensity_threshold=3, parameter_source='FSL', mask_type='file'), iterfield=['realigned_files', 'realignment_parameters', 'mask_file'], name="art") modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") modelspec.inputs.input_units = 'secs' wf.connect(subjinfo, 'TR', modelspec, 'time_repetition') wf.connect(datasource, 'behav', modelspec, 'event_files') wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval') wf.connect(subjinfo, 'conds', contrastgen, 'conds') wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file') wf.connect(infosource, 'task_id', contrastgen, 'task_id') wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts') wf.connect([(preproc, art, [('outputspec.motion_parameters', 'realignment_parameters'), ('outputspec.realigned_files', 'realigned_files'), ('outputspec.mask', 'mask_file')]), (preproc, modelspec, [('outputspec.highpassed_files', 'functional_runs'), ('outputspec.motion_parameters', 'realignment_parameters')]), (art, modelspec, [('outlier_files', 'outlier_files')]), (modelspec, modelfit, [('session_info', 'inputspec.session_info')]), (preproc, modelfit, [('outputspec.highpassed_files', 'inputspec.functional_data')]) ]) """ Reorder the copes so that now it combines across runs """ def sort_copes(files): numelements = len(files[0]) outfiles = [] for i in range(numelements): outfiles.insert(i, []) for j, elements in enumerate(files): outfiles[i].append(elements[i]) return outfiles def num_copes(files): return len(files) pickfirst = lambda x: x[0] wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst), 'flameo.mask_file')]), (modelfit, fixed_fx, [(('outputspec.copes', sort_copes), 'inputspec.copes'), ('outputspec.dof_file', 'inputspec.dof_files'), (('outputspec.varcopes', sort_copes), 'inputspec.varcopes'), (('outputspec.copes', num_copes), 'l2model.num_copes'), ]) ]) wf.connect(preproc, 'outputspec.mean', registration, 'inputspec.mean_image') wf.connect(datasource, 'anat', registration, 'inputspec.anatomical_image') registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz') def merge_files(copes, varcopes): out_files = [] splits = [] out_files.extend(copes) splits.append(len(copes)) out_files.extend(varcopes) splits.append(len(varcopes)) return out_files, splits mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes'], output_names=['out_files', 'splits'], function=merge_files), name='merge_files') wf.connect([(fixed_fx.get_node('outputspec'), mergefunc, [('copes', 'copes'), ('varcopes', 'varcopes'), ])]) wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files') def split_files(in_files, splits): copes = in_files[:splits[1]] varcopes = in_files[splits[1]:] return copes, varcopes splitfunc = pe.Node(niu.Function(input_names=['in_files', 'splits'], output_names=['copes', 'varcopes'], function=split_files), name='split_files') wf.connect(mergefunc, 'splits', splitfunc, 'splits') wf.connect(registration, 'outputspec.transformed_files', splitfunc, 'in_files') """ Connect to a datasink """ def get_subs(subject_id, conds, model_id, task_id): subs = [('_subject_id_%s_' % subject_id, '')] subs.append(('_model_id_%d' % model_id, 'model%03d' %model_id)) subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id)) subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp_warp', 'mean')) for i in range(len(conds)): subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1))) subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1))) subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1))) subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1))) subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1))) subs.append(('_warpall%d/cope1_warp_warp.' % i, 'cope%02d.' % (i + 1))) subs.append(('_warpall%d/varcope1_warp_warp.' % (len(conds) + i), 'varcope%02d.' % (i + 1))) return subs subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds', 'model_id', 'task_id'], output_names=['substitutions'], function=get_subs), name='subsgen') datasink = pe.Node(interface=nio.DataSink(), name="datasink") wf.connect(infosource, 'subject_id', datasink, 'container') wf.connect(infosource, 'subject_id', subsgen, 'subject_id') wf.connect(infosource, 'model_id', subsgen, 'model_id') wf.connect(infosource, 'task_id', subsgen, 'task_id') wf.connect(contrastgen, 'contrasts', subsgen, 'conds') wf.connect(subsgen, 'substitutions', datasink, 'substitutions') wf.connect([(fixed_fx.get_node('outputspec'), datasink, [('res4d', 'res4d'), ('copes', 'copes'), ('varcopes', 'varcopes'), ('zstats', 'zstats'), ('tstats', 'tstats')]) ]) wf.connect([(splitfunc, datasink, [('copes', 'copes.mni'), ('varcopes', 'varcopes.mni'), ])]) wf.connect(registration, 'outputspec.transformed_mean', datasink, 'mean.mni') """ Set processing parameters """ hpcutoff = 120. preproc.inputs.inputspec.fwhm = 6.0 gethighpass.inputs.hpcutoff = hpcutoff modelspec.inputs.high_pass_filter_cutoff = hpcutoff modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}} modelfit.inputs.inputspec.model_serial_correlations = True modelfit.inputs.inputspec.film_threshold = 1000 datasink.inputs.base_directory = output_dir return wf
def create_workflow(contrasts, out_label, contrasts_name, hrf, fwhm, HighPass, RegSpace, motion_outliers_type): level1_workflow = pe.Workflow(name='level1flow') level1_workflow.base_dir = os.path.abspath('./workingdirs/level1flow/' + contrasts_name + '/' + RegSpace) # =================================================================== # _____ _ # |_ _| | | # | | _ __ _ __ _ _| |_ # | | | '_ \| '_ \| | | | __| # _| |_| | | | |_) | |_| | |_ # |_____|_| |_| .__/ \__,_|\__| # | | # |_| # =================================================================== # ------------------ Specify variables inputnode = pe.Node( niu.IdentityInterface(fields=[ 'fwhm', # smoothing 'highpass', 'funcs', 'event_log', 'motion_parameters', 'motion_outlier_files', 'ref_func', 'ref_funcmask', ]), name="inputspec") def remove_runs_missing_funcs(in_files, in_funcs): import os import re assert not isinstance(in_files, str), "in_files must be list" assert not isinstance(in_funcs, str), "in_funcs must be list" if isinstance(in_files, str): in_files = [in_files] if isinstance(in_funcs, str): in_funcs = [in_funcs] has_func = set() for f in in_funcs: base = os.path.basename(f) try: sub = re.search(r'sub-([a-zA-Z0-9]+)_', base).group(1) ses = re.search(r'ses-([a-zA-Z0-9]+)_', base).group(1) run = re.search(r'run-([a-zA-Z0-9]+)_', base).group(1) except AttributeError as e: raise RuntimeError('Could not process "sub-*_", "ses-*_", " \ "or "run-*_" from func "%s"' % f) has_func.add((sub, ses, run)) files = [] for f in in_files: base = os.path.basename(f) try: sub = re.search(r'sub-([a-zA-Z0-9]+)_', base).group(1) ses = re.search(r'ses-([a-zA-Z0-9]+)_', base).group(1) run = re.search(r'run-([a-zA-Z0-9]+)_', base).group(1) except AttributeError as e: raise RuntimeError('Could not process "sub-*_", "ses-*_", " \ "or "run-*_" from event file "%s"' % f) if (sub, ses, run) in has_func: files.append(f) return files input_events = pe.Node( interface=niu.Function(input_names=['in_files', 'in_funcs'], output_names=['out_files'], function=remove_runs_missing_funcs), name='input_events', ) level1_workflow.connect([ (inputnode, input_events, [ ('funcs', 'in_funcs'), ('event_log', 'in_files'), ]), ]) # ------------------------------------------------------------------- # /~_ _ _ _ _. _ _ . _ _ |. _ _ # \_/(/_| |(/_| |(_ |_)||_)(/_||| |(/_ # | | # ------------------------------------------------------------------- """ Preliminaries ------------- Setup any package specific configuration. The output file format for FSL routines is being set to compressed NIFTI. """ fsl.FSLCommand.set_default_output_type('NIFTI_GZ') modelfit = fslflows.create_modelfit_workflow() modelspec = pe.Node(model.SpecifyModel(), name="modelspec") """ Set up first-level workflow --------------------------- """ def sort_copes(files): """ Sort by copes and the runs, ie. [[cope1_run1, cope1_run2], [cope2_run1, cope2_run2]] """ assert files[0] is not str numcopes = len(files[0]) assert numcopes > 1 outfiles = [] for i in range(numcopes): outfiles.insert(i, []) for j, elements in enumerate(files): outfiles[i].append(elements[i]) return outfiles def num_copes(files): return len(files) # =================================================================== # ____ _ _ # / __ \ | | | | # | | | |_ _| |_ _ __ _ _| |_ # | | | | | | | __| '_ \| | | | __| # | |__| | |_| | |_| |_) | |_| | |_ # \____/ \__,_|\__| .__/ \__,_|\__| # | | # |_| # =================================================================== # --- LEV1 --- # Datasink outputfiles_lev1 = pe.Node(nio.DataSink( base_directory=ds_root, container=('derivatives/modelfit/' + contrasts_name + '/' + RegSpace + '/level1/mo-' + motion_outliers_type), parameterization=True), name="output_files") # Use the following DataSink output substitutions outputfiles_lev1.inputs.substitutions = [ ('subject_id_', 'sub-'), ('session_id_', 'ses-'), ('/_mc_method_afni3dAllinSlices/', '/'), ] # Put result into a BIDS-like format outputfiles_lev1.inputs.regexp_substitutions = [ (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1'), (r'_refsub([a-zA-Z0-9]+)', r''), ] level1_workflow.connect([ (modelfit, outputfiles_lev1, [ (('outputspec.copes', sort_copes), 'copes'), ('outputspec.dof_file', 'dof_files'), (('outputspec.varcopes', sort_copes), 'varcopes'), ]), ]) # ------------------------------------------------------------------- # (~ _ _ _. _ _ _ _ _|_ _ _ _ _. |`. _ # (_><|_)(/_| || | |(/_| | | _\|_)(/_(_|~|~|(_ # | | # ------------------------------------------------------------------- """ Use the get_node function to retrieve an internal node by name. Then set iterables on this node to perform two different extents of smoothing. """ featinput = level1_workflow.get_node('modelfit.inputspec') featinput.inputs.fwhm = fwhm hpcutoff_s = HighPass # FWHM in seconds TR = 2.5 hpcutoff_nvol = hpcutoff_s / 2.5 # FWHM in volumns # Use Python3 for processing. See code/requirements.txt for pip packages. featinput.inputs.highpass = hpcutoff_nvol / 2.355 # Gaussian: σ in vols """ Setup a function that returns subject-specific information about the experimental paradigm. This is used by the :class:`nipype.modelgen.SpecifyModel` to create the information necessary to generate an SPM design matrix. In this tutorial, the same paradigm was used for every participant. Other examples of this function are available in the `doc/examples` folder. Note: Python knowledge required here. """ # from timeevents.curvetracing import calc_curvetracing_events from timeevents import process_time_events timeevents = pe.MapNode( interface=process_time_events, # calc_curvetracing_events, iterfield=('event_log', 'in_nvols', 'TR'), name='timeevents') def get_nvols(funcs): import nibabel as nib nvols = [] if isinstance(funcs, str): funcs = [funcs] for func in funcs: func_img = nib.load(func) header = func_img.header try: nvols.append(func_img.get_data().shape[3]) except IndexError as e: # if shape only has 3 dimensions, then it is only 1 volume nvols.append(1) return (nvols) def get_TR(funcs): import nibabel as nib TRs = [] if isinstance(funcs, str): funcs = [funcs] for func in funcs: func_img = nib.load(func) header = func_img.header try: TR = round(header.get_zooms()[3], 5) except IndexError as e: TR = 2.5 print("Warning: %s did not have TR defined in the header. " "Using default TR of %0.2f" % (func, TR)) assert TR > 1 TRs.append(TR) return (TRs) level1_workflow.connect([ (inputnode, timeevents, [ (('funcs', get_nvols), 'in_nvols'), (('funcs', get_TR), 'TR'), ]), (input_events, timeevents, [('out_files', 'event_log')]), (inputnode, modelspec, [('motion_parameters', 'realignment_parameters') ]), (modelspec, modelfit, [('session_info', 'inputspec.session_info')]), ]) # Ignore volumes after last good response filter_outliers = pe.MapNode(interface=FilterNumsTask(), name='filter_outliers', iterfield=('in_file', 'max_number')) level1_workflow.connect([ (inputnode, filter_outliers, [('motion_outlier_files', 'in_file')]), (filter_outliers, modelspec, [('out_file', 'outlier_files')]), (timeevents, filter_outliers, [('out_nvols', 'max_number')]), ]) def evt_info(cond_events): output = [] # for each run for ev in cond_events: from nipype.interfaces.base import Bunch from copy import deepcopy names = [] for name in ev.keys(): if ev[name].shape[0] > 0: names.append(name) onsets = [ deepcopy(ev[name].time) if ev[name].shape[0] > 0 else [] for name in names ] durations = [ deepcopy(ev[name].dur) if ev[name].shape[0] > 0 else [] for name in names ] amplitudes = [ deepcopy(ev[name].amplitude) if ev[name].shape[0] > 0 else [] for name in names ] run_results = Bunch( conditions=names, onsets=[deepcopy(ev[name].time) for name in names], durations=[deepcopy(ev[name].dur) for name in names], amplitudes=[deepcopy(ev[name].amplitude) for name in names]) output.append(run_results) return output modelspec.inputs.input_units = 'secs' modelspec.inputs.time_repetition = TR # to-do: specify per func modelspec.inputs.high_pass_filter_cutoff = hpcutoff_s # Find out which HRF function we want to use if hrf == 'fsl_doublegamma': # this is the default modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}} else: # use a custom hrf as defined in arguments currpath = os.getcwd() hrftxt = currpath + hrf[1:] modelfit.inputs.inputspec.bases = {'custom': {'bfcustompath': hrftxt}} modelfit.inputs.inputspec.interscan_interval = TR modelfit.inputs.inputspec.contrasts = contrasts modelfit.inputs.inputspec.model_serial_correlations = True modelfit.inputs.inputspec.film_threshold = 1000 modelfit.config['execution'] = dict(crashdump_dir=os.path.abspath('.')) # Ignore volumes after subject has finished working for the run beh_roi = pe.MapNode(fsl.ExtractROI(t_min=0), name='beh_roi', iterfield=['in_file', 't_size']) level1_workflow.connect([ (timeevents, modelspec, [ (('out_events', evt_info), 'subject_info'), ]), (inputnode, beh_roi, [ ('funcs', 'in_file'), ]), (timeevents, beh_roi, [ ('out_nvols', 't_size'), ]), (beh_roi, modelspec, [ ('roi_file', 'functional_runs'), ]), (beh_roi, modelfit, [ ('roi_file', 'inputspec.functional_data'), ]), (beh_roi, outputfiles_lev1, [ ('roi_file', 'roi_file'), ]), ]) return (level1_workflow)
def model_fitting(source_img, prepped_img, subject_info, aroma, task, args, mask_file, run_number): # Get the necessary parameters outputdir = args.outputdir fwhm = args.fwhm cthresh = args.cthresh alpha = args.alpha # Make a task directory in the output folder if run_number > 0: taskdir = os.path.join(outputdir, task + "_run-0" + str(run_number + 1)) else: taskdir = os.path.join(outputdir, task) if not os.path.exists(taskdir): os.mkdir(taskdir) os.mkdir(os.path.join(taskdir, 'stats')) os.mkdir(os.path.join(taskdir, 'figs')) processed_image = preprocess(aroma, fwhm, prepped_img, mask_file, taskdir, task) task_vs_baseline = [ task + " vs baseline", 'T', [task, 'baseline'], [1, -1] ] # set up contrasts contrasts = [task_vs_baseline] """ Model fitting workflow Inputs:: inputspec.session_info : info generated by modelgen.SpecifyModel inputspec.interscan_interval : interscan interval inputspec.contrasts : list of contrasts inputspec.film_threshold : image threshold for FILM estimation inputspec.model_serial_correlations inputspec.bases Outputs:: outputspec.copes outputspec.varcopes outputspec.dof_file outputspec.zfiles outputspec.parameter_estimates """ modelfit = pe.Workflow(name='modelfit', base_dir=taskdir) modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") # generate design info inputspec = pe.Node(util.IdentityInterface(fields=[ 'session_info', 'interscan_interval', 'contrasts', 'film_threshold', 'functional_data', 'bases', 'model_serial_correlations' ]), name='inputspec') level1design = pe.Node(interface=fsl.Level1Design(), name="level1design") modelgen = pe.MapNode(interface=fsl.FEATModel(), name='modelgen', iterfield=['fsf_file', 'ev_files']) modelestimate = pe.MapNode( interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5), name='modelestimate', iterfield=['design_file', 'in_file', 'tcon_file']) merge_contrasts = pe.MapNode(interface=util.Merge(2), name='merge_contrasts', iterfield=['in1']) outputspec = pe.Node(util.IdentityInterface(fields=[ 'copes', 'varcopes', 'dof_file', 'zfiles', 'parameter_estimates' ]), name='outputspec') modelfit.connect([ (modelspec, inputspec, [('session_info', 'session_info')]), (inputspec, level1design, [('interscan_interval', 'interscan_interval'), ('session_info', 'session_info'), ('contrasts', 'contrasts'), ('bases', 'bases'), ('model_serial_correlations', 'model_serial_correlations')]), (inputspec, modelestimate, [('film_threshold', 'threshold'), ('functional_data', 'in_file')]), (level1design, modelgen, [('fsf_files', 'fsf_file'), ('ev_files', 'ev_files')]), (modelgen, modelestimate, [('design_file', 'design_file')]), (merge_contrasts, outputspec, [('out', 'zfiles')]), (modelestimate, outputspec, [('param_estimates', 'parameter_estimates'), ('dof_file', 'dof_file')]), ]) modelfit.connect([ (modelgen, modelestimate, [('con_file', 'tcon_file'), ('fcon_file', 'fcon_file')]), (modelestimate, merge_contrasts, [('zstats', 'in1'), ('zfstats', 'in2')]), (modelestimate, outputspec, [('copes', 'copes'), ('varcopes', 'varcopes')]), ]) # Define inputs to workflow modelspec.inputs.functional_runs = processed_image inputspec.inputs.functional_data = processed_image modelspec.inputs.subject_info = subject_info modelspec.inputs.input_units = 'secs' modelspec.inputs.time_repetition = source_img.entities['RepetitionTime'] modelspec.inputs.high_pass_filter_cutoff = 90 inputspec.inputs.model_serial_correlations = True inputspec.inputs.film_threshold = 10.0 inputspec.inputs.interscan_interval = source_img.entities['RepetitionTime'] inputspec.inputs.bases = { 'gamma': { 'gammasigma': 3, 'gammadelay': 6, 'derivs': True } } inputspec.inputs.contrasts = contrasts # Run the model-fitting pipeline. Main outputs are a feat directory (w/ functional img) and a design.mat file res = modelfit.run() # outputs output_txt = open(os.path.join(taskdir, task + '_outputs.txt'), 'w') print_outputs(output_txt, res) # The third node, FILM's, first element (i.e. only element) of its 'zstats' output z_img = list(res.nodes)[2].result.outputs.zstats[0] # Use False Discovery Rate theory to correct for multiple comparisons fdr_thresh_img, fdr_threshold = thresholding.map_threshold( stat_img=z_img, mask_img=mask_file, alpha=alpha, height_control='fdr', cluster_threshold=cthresh) print("Thresholding at FDR corrected threshold of " + str(fdr_threshold)) fdr_thresh_img_path = os.path.join(taskdir, task + '_fdr_thresholded_z.nii.gz') nibabel.save(fdr_thresh_img, fdr_thresh_img_path) # Do a cluster analysis using the FDR corrected threshold on the original z_img print("Performing cluster analysis.") cl = fsl.Cluster(in_file=z_img, threshold=fdr_threshold) cluster_file = os.path.join(taskdir, 'stats', task + "_cluster_stats.txt") cluster_analysis(cluster_file, cl) # Resample the result image with AFNI resample_fdr_thresh_img_path = os.path.join( taskdir, task + '_fdr_thresholded_z_resample.nii.gz') print("Resampling thresholded image to MNI space") resample = afni.Resample(master=template, out_file=resample_fdr_thresh_img_path, in_file=fdr_thresh_img_path) resample.run() os.remove(fdr_thresh_img_path) print("Image to be returned: " + resample_fdr_thresh_img_path) return resample_fdr_thresh_img_path
#Wraps command **bet** NodeHash_3443a20 = pe.Node(interface=fsl.BET(), name='NodeName_3443a20') NodeHash_3443a20.inputs.frac = 0.3 NodeHash_3443a20.inputs.mask = True NodeHash_3443a20.inputs.no_output = True #Wraps command **fslmaths** NodeHash_43b01b0 = pe.Node(interface=fsl.ApplyMask(), name='NodeName_43b01b0') #Custom interface wrapping function Tsv2subjectinfo NodeHash_3042f20 = pe.Node(interface=firstlevelhelpers.Tsv2subjectinfo, name='NodeName_3042f20') #Makes a model specification compatible with spm/fsl designers. NodeHash_6bef320 = pe.Node(interface=modelgen.SpecifyModel(), name='NodeName_6bef320') NodeHash_6bef320.inputs.high_pass_filter_cutoff = 0 NodeHash_6bef320.inputs.input_units = 'secs' NodeHash_6bef320.inputs.time_repetition = 2.0 #Generate FEAT specific files NodeHash_8241250 = pe.Node(interface=fsl.Level1Design(), name='NodeName_8241250') NodeHash_8241250.inputs.bases = {'dgamma': {'derivs': False}} NodeHash_8241250.inputs.contrasts = [ ('con-incon', 'T', ['congruent_correct', 'congruent_correct'], [-1, 1]) ] NodeHash_8241250.inputs.interscan_interval = 2.0 NodeHash_8241250.inputs.model_serial_correlations = True
def create_workflow(contrasts, combine_runs=True): level1_workflow = pe.Workflow(name='level1flow') # =================================================================== # _____ _ # |_ _| | | # | | _ __ _ __ _ _| |_ # | | | '_ \| '_ \| | | | __| # _| |_| | | | |_) | |_| | |_ # |_____|_| |_| .__/ \__,_|\__| # | | # |_| # =================================================================== # ------------------ Specify variables inputnode = pe.Node( niu.IdentityInterface(fields=[ #'funcmasks', 'fwhm', # smoothing 'highpass', 'funcs', 'event_log', 'motion_parameters', 'motion_outlier_files', 'ref_func', 'ref_funcmask', ]), name="inputspec") def remove_runs_missing_funcs(in_files, in_funcs): import os # import pdb import re # if input.synchronize = True, then in_files and in_funcs will # be single strings assert not isinstance(in_files, str), "in_files must be list" assert not isinstance(in_funcs, str), "in_funcs must be list" if isinstance(in_files, str): in_files = [in_files] if isinstance(in_funcs, str): in_funcs = [in_funcs] has_func = set() for f in in_funcs: base = os.path.basename(f) try: sub = re.search(r'sub-([a-zA-Z0-9]+)_', base).group(1) ses = re.search(r'ses-([a-zA-Z0-9]+)_', base).group(1) run = re.search(r'run-([a-zA-Z0-9]+)_', base).group(1) except AttributeError as e: raise RuntimeError( 'Could not process "sub-*_", "ses-*_", or "run-*_" from func "%s"' % f) has_func.add((sub, ses, run)) files = [] for f in in_files: base = os.path.basename(f) try: sub = re.search(r'sub-([a-zA-Z0-9]+)_', base).group(1) ses = re.search(r'ses-([a-zA-Z0-9]+)_', base).group(1) run = re.search(r'run-([a-zA-Z0-9]+)_', base).group(1) except AttributeError as e: raise RuntimeError( 'Could not process "sub-*_", "ses-*_", or "run-*_" from event file "%s"' % f) if (sub, ses, run) in has_func: files.append(f) return files input_events = pe.Node( interface=niu.Function(input_names=['in_files', 'in_funcs'], output_names=['out_files'], function=remove_runs_missing_funcs), name='input_events', ) level1_workflow.connect([ (inputnode, input_events, [ ('funcs', 'in_funcs'), ('event_log', 'in_files'), ]), ]) # ------------------------------------------------------------------- # /~_ _ _ _ _. _ _ . _ _ |. _ _ # \_/(/_| |(/_| |(_ |_)||_)(/_||| |(/_ # | | # ------------------------------------------------------------------- """ Preliminaries ------------- Setup any package specific configuration. The output file format for FSL routines is being set to compressed NIFTI. """ fsl.FSLCommand.set_default_output_type('NIFTI_GZ') modelfit = fslflows.create_modelfit_workflow() if combine_runs: fixed_fx = fslflows.create_fixed_effects_flow() else: fixed_fx = None """ Artifact detection is done in preprocessing workflow. """ """ Add model specification nodes between the preprocessing and modelfitting workflows. """ modelspec = pe.Node(model.SpecifyModel(), name="modelspec") """ Set up first-level workflow --------------------------- """ def sort_copes(files): """ Sort by copes and the runs, ie. [[cope1_run1, cope1_run2], [cope2_run1, cope2_run2]] """ assert files[0] is not str numcopes = len(files[0]) assert numcopes > 1 outfiles = [] for i in range(numcopes): outfiles.insert(i, []) for j, elements in enumerate(files): outfiles[i].append(elements[i]) return outfiles def num_copes(files): return len(files) if fixed_fx is not None: level1_workflow.connect([ (inputnode, fixed_fx, [('ref_funcmask', 'flameo.mask_file') ]), # To-do: use reference mask!!! (modelfit, fixed_fx, [ (('outputspec.copes', sort_copes), 'inputspec.copes'), ('outputspec.dof_file', 'inputspec.dof_files'), (('outputspec.varcopes', sort_copes), 'inputspec.varcopes'), (('outputspec.copes', num_copes), 'l2model.num_copes'), ]) ]) # ------------------------------------------------------------------- # /~\ _|_ _ _|_ # \_/|_|| |_)|_|| # | # ------------------------------------------------------------------- # Datasink outputfiles = pe.Node(nio.DataSink(base_directory=ds_root, container='derivatives/modelfit', parameterization=True), name="output_files") # Use the following DataSink output substitutions outputfiles.inputs.substitutions = [ ('subject_id_', 'sub-'), ('session_id_', 'ses-'), # ('/mask/', '/'), # ('_preproc_flirt_thresh.nii.gz', '_transformedmask.nii.gz'), # ('_preproc_volreg_unwarped.nii.gz', '_preproc.nii.gz'), # ('_preproc_flirt_unwarped.nii.gz', '_preproc-mask.nii.gz'), # ('/_mc_method_afni3dvolreg/', '/'), # ('/funcs/', '/'), # ('/funcmasks/', '/'), # ('preproc_volreg.nii.gz', 'preproc.nii.gz'), ('/_mc_method_afni3dAllinSlices/', '/'), ] # Put result into a BIDS-like format outputfiles.inputs.regexp_substitutions = [ (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1'), # (r'/_addmean[0-9]+/', r'/func/'), # (r'/_dilatemask[0-9]+/', r'/func/'), # (r'/_funcbrain[0-9]+/', r'/func/'), # (r'/_maskfunc[0-9]+/', r'/func/'), # (r'/_mc[0-9]+/', r'/func/'), # (r'/_meanfunc[0-9]+/', r'/func/'), # (r'/_outliers[0-9]+/', r'/func/'), # (r'/_undistort_masks[0-9]+/', r'/func/'), # (r'/_undistort[0-9]+/', r'/func/'), ] level1_workflow.connect([ (modelfit, outputfiles, [ (('outputspec.copes', sort_copes), 'copes'), ('outputspec.dof_file', 'dof_files'), (('outputspec.varcopes', sort_copes), 'varcopes'), ]), ]) if fixed_fx is not None: level1_workflow.connect([ (fixed_fx, outputfiles, [ ('outputspec.res4d', 'fx.res4d'), ('outputspec.copes', 'fx.copes'), ('outputspec.varcopes', 'fx.varcopes'), ('outputspec.zstats', 'fx.zstats'), ('outputspec.tstats', 'fx.tstats'), ]), ]) # ------------------------------------------------------------------- # (~ _ _ _. _ _ _ _ _|_ _ _ _ _. |`. _ # (_><|_)(/_| || | |(/_| | | _\|_)(/_(_|~|~|(_ # | | # ------------------------------------------------------------------- # """ # Experiment specific components # ------------------------------ # """ """ Use the get_node function to retrieve an internal node by name. Then set the iterables on this node to perform two different extents of smoothing. """ featinput = level1_workflow.get_node('modelfit.inputspec') # featinput.iterables = ('fwhm', [5., 10.]) featinput.inputs.fwhm = 2.0 hpcutoff_s = 50. # FWHM in seconds TR = 2.5 hpcutoff_nvol = hpcutoff_s / 2.5 # FWHM in volumns # Use Python3 for processing. See code/requirements.txt for pip packages. featinput.inputs.highpass = hpcutoff_nvol / 2.355 # Gaussian: σ in volumes - (REMEMBER to run with Python 3) """ Setup a function that returns subject-specific information about the experimental paradigm. This is used by the :class:`nipype.modelgen.SpecifyModel` to create the information necessary to generate an SPM design matrix. In this tutorial, the same paradigm was used for every participant. Other examples of this function are available in the `doc/examples` folder. Note: Python knowledge required here. """ # from timeevents.curvetracing import calc_curvetracing_events from timeevents import process_time_events timeevents = pe.MapNode( interface=process_time_events, # calc_curvetracing_events, iterfield=('event_log', 'in_nvols', 'TR'), name='timeevents') def get_nvols(funcs): import nibabel as nib nvols = [] if isinstance(funcs, str): funcs = [funcs] for func in funcs: func_img = nib.load(func) header = func_img.header try: nvols.append(func_img.get_data().shape[3]) except IndexError as e: # if shape only has 3 dimensions, then it is only 1 volume nvols.append(1) return (nvols) def get_TR(funcs): import nibabel as nib TRs = [] if isinstance(funcs, str): funcs = [funcs] for func in funcs: func_img = nib.load(func) header = func_img.header try: TR = round(header.get_zooms()[3], 5) except IndexError as e: TR = 2.5 print("Warning: %s did not have TR defined in the header. " "Using default TR of %0.2f" % (func, TR)) assert TR > 1 TRs.append(TR) return (TRs) level1_workflow.connect([ (inputnode, timeevents, [ (('funcs', get_nvols), 'in_nvols'), (('funcs', get_TR), 'TR'), ]), (input_events, timeevents, [('out_files', 'event_log')]), (inputnode, modelspec, [('motion_parameters', 'realignment_parameters') ]), (modelspec, modelfit, [('session_info', 'inputspec.session_info')]), ]) # Ignore volumes after last good response filter_outliers = pe.MapNode(interface=FilterNumsTask(), name='filter_outliers', iterfield=('in_file', 'max_number')) level1_workflow.connect([ (inputnode, filter_outliers, [('motion_outlier_files', 'in_file')]), (filter_outliers, modelspec, [('out_file', 'outlier_files')]), (timeevents, filter_outliers, [('out_nvols', 'max_number')]), ]) def evt_info(cond_events): output = [] # for each run for ev in cond_events: from nipype.interfaces.base import Bunch from copy import deepcopy names = [] for name in ev.keys(): if ev[name].shape[0] > 0: names.append(name) onsets = [ deepcopy(ev[name].time) if ev[name].shape[0] > 0 else [] for name in names ] durations = [ deepcopy(ev[name].dur) if ev[name].shape[0] > 0 else [] for name in names ] amplitudes = [ deepcopy(ev[name].amplitude) if ev[name].shape[0] > 0 else [] for name in names ] run_results = Bunch( conditions=names, onsets=[deepcopy(ev[name].time) for name in names], durations=[deepcopy(ev[name].dur) for name in names], amplitudes=[deepcopy(ev[name].amplitude) for name in names]) output.append(run_results) return output modelspec.inputs.input_units = 'secs' modelspec.inputs.time_repetition = TR # to-do: specify per func modelspec.inputs.high_pass_filter_cutoff = hpcutoff_s modelfit.inputs.inputspec.interscan_interval = TR # to-do: specify per func modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}} modelfit.inputs.inputspec.contrasts = contrasts modelfit.inputs.inputspec.model_serial_correlations = True modelfit.inputs.inputspec.film_threshold = 1000 # level1_workflow.base_dir = os.path.abspath('./workingdirs/level1flow') modelfit.config['execution'] = dict(crashdump_dir=os.path.abspath('.')) # Ignore volumes after subject has finished working for the run beh_roi = pe.MapNode(fsl.ExtractROI(t_min=0), name='beh_roi', iterfield=['in_file', 't_size']) level1_workflow.connect([ (timeevents, modelspec, [ (('out_events', evt_info), 'subject_info'), ]), (inputnode, beh_roi, [ ('funcs', 'in_file'), ]), (timeevents, beh_roi, [ ('out_nvols', 't_size'), ]), (beh_roi, modelspec, [ ('roi_file', 'functional_runs'), ]), (beh_roi, modelfit, [ ('roi_file', 'inputspec.functional_data'), ]), (beh_roi, outputfiles, [ ('roi_file', 'roi_file'), ]), # (inputnode, datasource, [('in_data', 'base_directory')]), # (infosource, datasource, [('subject_id', 'subject_id')]), # (infosource, modelspec, [(('subject_id', subjectinfo), 'subject_info')]), # (datasource, preproc, [('func', 'inputspec.func')]), ]) return (level1_workflow)
def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, work_dir=None): """Analyzes an open fmri dataset Parameters ---------- data_dir : str Path to the base data directory work_dir : str Nipype working directory (defaults to cwd) """ """ Load nipype workflows """ preproc = create_featreg_preproc(whichvol='first') modelfit = create_modelfit_workflow() fixed_fx = create_fixed_effects_flow() """ Remove the plotting connection so that plot iterables don't propagate to the model stage """ preproc.disconnect(preproc.get_node('plot_motion'), 'out_file', preproc.get_node('outputspec'), 'motion_plots') """ Set up openfmri data specific components """ subjects = [path.split(os.path.sep)[-1] for path in glob(os.path.join(data_dir, 'sub*'))] infosource = pe.Node(niu.IdentityInterface(fields=['subject_id', 'model_id']), name='infosource') if subject is None: infosource.iterables = [('subject_id', subjects), ('model_id', [model_id])] else: infosource.iterables = [('subject_id', [subjects[subjects.index(subject)]]), ('model_id', [model_id])] subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir', 'task_id', 'model_id'], output_names=['run_id', 'conds', 'TR'], function=get_subjectinfo), name='subjectinfo') subjinfo.inputs.base_dir = data_dir """ Return data components as anat, bold and behav """ datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id', 'model_id'], outfields=['anat', 'bold', 'behav']), name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '*' datasource.inputs.field_template = {'anat': '%s/anatomy/highres001.nii.gz', 'bold': '%s/BOLD/task001_r*/bold.nii.gz', 'behav': ('%s/model/model%03d/onsets/task001_' 'run%03d/cond*.txt')} datasource.inputs.template_args = {'anat': [['subject_id']], 'bold': [['subject_id']], 'behav': [['subject_id', 'model_id', 'run_id']]} datasource.inputs.sorted = True """ Create meta workflow """ wf = pe.Workflow(name='openfmri') wf.connect(infosource, 'subject_id', subjinfo, 'subject_id') wf.connect(infosource, 'model_id', subjinfo, 'model_id') wf.connect(infosource, 'subject_id', datasource, 'subject_id') wf.connect(infosource, 'model_id', datasource, 'model_id') wf.connect(subjinfo, 'run_id', datasource, 'run_id') wf.connect([(datasource, preproc, [('bold', 'inputspec.func')]), ]) def get_highpass(TR, hpcutoff): return hpcutoff / (2 * TR) gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'], output_names=['highpass'], function=get_highpass), name='gethighpass') wf.connect(subjinfo, 'TR', gethighpass, 'TR') wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass') """ Setup a basic set of contrasts, a t-test per condition """ def get_contrasts(base_dir, model_id, conds): import numpy as np import os contrast_file = os.path.join(base_dir, 'models', 'model%03d' % model_id, 'task_contrasts.txt') contrast_def = np.genfromtxt(contrast_file, dtype=object) contrasts = [] for row in contrast_def: con = [row[0], 'T', ['cond%03d' % i for i in range(len(conds))], row[1:].astype(float).tolist()] contrasts.append(con) return contrasts contrastgen = pe.Node(niu.Function(input_names=['base_dir', 'model_id', 'conds'], output_names=['contrasts'], function=get_contrasts), name='contrastgen') contrastgen.inputs.base_dir = data_dir art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False], use_norm=True, norm_threshold=1, zintensity_threshold=3, parameter_source='FSL', mask_type='file'), iterfield=['realigned_files', 'realignment_parameters', 'mask_file'], name="art") modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") modelspec.inputs.input_units = 'secs' wf.connect(subjinfo, 'TR', modelspec, 'time_repetition') wf.connect(datasource, 'behav', modelspec, 'event_files') wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval') wf.connect(subjinfo, 'conds', contrastgen, 'conds') wf.connect(infosource, 'model_id', contrastgen, 'model_id') wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts') wf.connect([(preproc, art, [('outputspec.motion_parameters', 'realignment_parameters'), ('outputspec.realigned_files', 'realigned_files'), ('outputspec.mask', 'mask_file')]), (preproc, modelspec, [('outputspec.highpassed_files', 'functional_runs'), ('outputspec.motion_parameters', 'realignment_parameters')]), (art, modelspec, [('outlier_files', 'outlier_files')]), (modelspec, modelfit, [('session_info', 'inputspec.session_info')]), (preproc, modelfit, [('outputspec.highpassed_files', 'inputspec.functional_data')]) ]) """ Reorder the copes so that now it combines across runs """ def sort_copes(files): numelements = len(files[0]) outfiles = [] for i in range(numelements): outfiles.insert(i, []) for j, elements in enumerate(files): outfiles[i].append(elements[i]) return outfiles def num_copes(files): return len(files) pickfirst = lambda x: x[0] wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst), 'flameo.mask_file')]), (modelfit, fixed_fx, [(('outputspec.copes', sort_copes), 'inputspec.copes'), ('outputspec.dof_file', 'inputspec.dof_files'), (('outputspec.varcopes', sort_copes), 'inputspec.varcopes'), (('outputspec.copes', num_copes), 'l2model.num_copes'), ]) ]) """ Connect to a datasink """ def get_subs(subject_id, conds): subs = [('_subject_id_%s/' % subject_id, '')] for i in range(len(conds)): subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1))) subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1))) subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1))) subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1))) subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1))) return subs subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds'], output_names=['substitutions'], function=get_subs), name='subsgen') datasink = pe.Node(interface=nio.DataSink(), name="datasink") wf.connect(infosource, 'subject_id', datasink, 'container') wf.connect(infosource, 'subject_id', subsgen, 'subject_id') wf.connect(subjinfo, 'conds', subsgen, 'conds') wf.connect(subsgen, 'substitutions', datasink, 'substitutions') wf.connect([(fixed_fx.get_node('outputspec'), datasink, [('res4d', 'res4d'), ('copes', 'copes'), ('varcopes', 'varcopes'), ('zstats', 'zstats'), ('tstats', 'tstats')]) ]) """ Set processing parameters """ hpcutoff = 120. subjinfo.inputs.task_id = 1 preproc.inputs.inputspec.fwhm = 6.0 gethighpass.inputs.hpcutoff = hpcutoff modelspec.inputs.high_pass_filter_cutoff = hpcutoff modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}} modelfit.inputs.inputspec.model_serial_correlations = True modelfit.inputs.inputspec.film_threshold = 1000 if work_dir is None: work_dir = os.path.join(os.getcwd(), 'working') wf.base_dir = work_dir datasink.inputs.base_directory = os.path.join(work_dir, 'output') wf.config['execution'] = dict(crashdump_dir=os.path.join(work_dir, 'crashdumps'), stop_on_first_crash=True) wf.run('MultiProc', plugin_args={'n_procs': 2})
""" Add artifact detection and model specification nodes between the preprocessing and modelfitting workflows. """ art = pe.MapNode( ra.ArtifactDetect(use_differences=[True, False], use_norm=True, norm_threshold=1, zintensity_threshold=3, parameter_source='FSL', mask_type='file'), iterfield=['realigned_files', 'realignment_parameters', 'mask_file'], name="art") modelspec = pe.Node(model.SpecifyModel(), name="modelspec") level1_workflow.connect([ (preproc, art, [('outputspec.motion_parameters', 'realignment_parameters'), ('outputspec.realigned_files', 'realigned_files'), ('outputspec.mask', 'mask_file')]), (preproc, modelspec, [('outputspec.highpassed_files', 'functional_runs'), ('outputspec.motion_parameters', 'realignment_parameters')]), (art, modelspec, [('outlier_files', 'outlier_files')]), (modelspec, modelfit, [('session_info', 'inputspec.session_info')]), (preproc, modelfit, [('outputspec.highpassed_files', 'inputspec.functional_data')]) ]) """ Set up first-level workflow
# ====================================================================== # DEFINE NODE: SPECIFY SPM MODEL (GENERATE SPM-SPECIFIC MODEL) # ====================================================================== # function: makes a model specification compatible with spm designers # adds SPM specific options to SpecifyModel # SpecifyModel - Generates SPM-specific Model modelspec = Node(SpecifySPMModel(concatenate_runs=False, input_units='secs', output_units='secs', time_repetition=TR, high_pass_filter_cutoff=128), name="modelspec") l1model = Node(model.SpecifyModel(input_units='secs', time_repetition=TR, high_pass_filter_cutoff=128), name="l1model") # input: concatenate runs to a single session (boolean, default: False): # l1model.inputs.concatenate_runs = False # input: units of event onsets and durations (secs or scans): # l1model.inputs.input_units = 'secs' # input: units of design event onsets and durations (secs or scans): # l1model.inputs.output_units = 'secs' # input: time of repetition (a float): # l1model.inputs.time_repetition = TR # high-pass filter cutoff in secs (a float, default = 128 secs): # l1model.inputs.high_pass_filter_cutoff = 128 # ====================================================================== # DEFINE NODE: LEVEL 1 DESIGN (GENERATE AN SPM DESIGN MATRIX)
preprocessing workflow (see Figure TODO).""" preprocessing = pe.Workflow(name="preprocessing") preprocessing.connect(realign, "realigned_files", smooth, "in_files") """Creating a modelling workflow which will define the design, estimate model and contrasts follows the same suite. We will again use SPM implementations. NiPyPe, however, adds extra abstraction layer to model definition which allows using the same definition for many model estimation implemantations (for example one from FSL or nippy). Therefore we will need four nodes: SpecifyModel (NiPyPe specific abstraction layer), Level1Design (SPM design definition), ModelEstimate, and ContrastEstimate. The connected modelling Workflow can be seen on Figure TODO. Model specification supports block, event and sparse designs. Contrasts provided to ContrastEstimate are defined using the same names of regressors as defined in the SpecifyModel.""" specify_model = pe.Node(interface=model.SpecifyModel(), name="specify_model") specify_model.inputs.input_units = 'secs' specify_model.inputs.time_repetition = 3. specify_model.inputs.high_pass_filter_cutoff = 120 specify_model.inputs.subject_info = [ Bunch( conditions=['Task-Odd', 'Task-Even'], onsets=[list(range(15, 240, 60)), list(range(45, 240, 60))], durations=[[15], [15]]) ] * 4 level1design = pe.Node(interface=spm.Level1Design(), name="level1design") level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} level1design.inputs.timing_units = 'secs' level1design.inputs.interscan_interval = specify_model.inputs.time_repetition
for n, session in enumerate(sessions): fMRI[session] = {} for run in range(runs[n]): fMRI[session][run] = os.path.join( base_dir, 'fmriprep/sub-{0}/ses-{1}/func/sub-{0}_ses-{1}_task-mos_run-0{2}_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz' .format(subject, session, run + 1)) skip = mem.cache(fsl.ExtractROI) skip_results = skip(in_file=fMRI[this_session][this_run], t_min=0, t_size=-1) # Set up model modelspec = model.SpecifyModel( input_units='secs', time_repetition=1, high_pass_filter_cutoff=128, subject_info=session_info[this_session][this_run], functional_runs=skip_results.outputs.roi_file) specify_model_results = modelspec.run() level1design = fsl.model.Level1Design( interscan_interval=1, bases={'dgamma': { 'derivs': False }}, session_info=specify_model_results.outputs.session_info, model_serial_correlations=True) level1design_results = level1design.run()