def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, task_id=None, output_dir=None, subj_prefix='*'): """Analyzes an open fmri dataset Parameters ---------- data_dir : str Path to the base data directory work_dir : str Nipype working directory (defaults to cwd) """ """ Load nipype workflows """ preproc = create_featreg_preproc(whichvol='first') modelfit = create_modelfit_workflow() fixed_fx = create_fixed_effects_flow() registration = create_reg_workflow() """ Remove the plotting connection so that plot iterables don't propagate to the model stage """ preproc.disconnect(preproc.get_node('plot_motion'), 'out_file', preproc.get_node('outputspec'), 'motion_plots') """ Set up openfmri data specific components """ subjects = sorted([path.split(os.path.sep)[-1] for path in glob(os.path.join(data_dir, subj_prefix))]) infosource = pe.Node(niu.IdentityInterface(fields=['subject_id', 'model_id', 'task_id']), name='infosource') if len(subject) == 0: infosource.iterables = [('subject_id', subjects), ('model_id', [model_id]), ('task_id', task_id)] else: infosource.iterables = [('subject_id', [subjects[subjects.index(subj)] for subj in subject]), ('model_id', [model_id]), ('task_id', task_id)] subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir', 'task_id', 'model_id'], output_names=['run_id', 'conds', 'TR'], function=get_subjectinfo), name='subjectinfo') subjinfo.inputs.base_dir = data_dir """ Return data components as anat, bold and behav """ datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id', 'task_id', 'model_id'], outfields=['anat', 'bold', 'behav', 'contrasts']), name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '*' datasource.inputs.field_template = {'anat': '%s/anatomy/T1_001.nii.gz', 'bold': '%s/BOLD/task%03d_r*/bold.nii.gz', 'behav': ('%s/model/model%03d/onsets/task%03d_' 'run%03d/cond*.txt'), 'contrasts': ('models/model%03d/' 'task_contrasts.txt')} datasource.inputs.template_args = {'anat': [['subject_id']], 'bold': [['subject_id', 'task_id']], 'behav': [['subject_id', 'model_id', 'task_id', 'run_id']], 'contrasts': [['model_id']]} datasource.inputs.sort_filelist = True """ Create meta workflow """ wf = pe.Workflow(name='openfmri') wf.connect(infosource, 'subject_id', subjinfo, 'subject_id') wf.connect(infosource, 'model_id', subjinfo, 'model_id') wf.connect(infosource, 'task_id', subjinfo, 'task_id') wf.connect(infosource, 'subject_id', datasource, 'subject_id') wf.connect(infosource, 'model_id', datasource, 'model_id') wf.connect(infosource, 'task_id', datasource, 'task_id') wf.connect(subjinfo, 'run_id', datasource, 'run_id') wf.connect([(datasource, preproc, [('bold', 'inputspec.func')]), ]) def get_highpass(TR, hpcutoff): return hpcutoff / (2 * TR) gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'], output_names=['highpass'], function=get_highpass), name='gethighpass') wf.connect(subjinfo, 'TR', gethighpass, 'TR') wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass') """ Setup a basic set of contrasts, a t-test per condition """ def get_contrasts(contrast_file, task_id, conds): import numpy as np contrast_def = np.genfromtxt(contrast_file, dtype=object) if len(contrast_def.shape) == 1: contrast_def = contrast_def[None, :] contrasts = [] for row in contrast_def: if row[0] != 'task%03d' % task_id: continue con = [row[1], 'T', ['cond%03d' % (i + 1) for i in range(len(conds))], row[2:].astype(float).tolist()] contrasts.append(con) # add auto contrasts for each column for i, cond in enumerate(conds): con = [cond, 'T', ['cond%03d' % (i + 1)], [1]] contrasts.append(con) return contrasts contrastgen = pe.Node(niu.Function(input_names=['contrast_file', 'task_id', 'conds'], output_names=['contrasts'], function=get_contrasts), name='contrastgen') art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False], use_norm=True, norm_threshold=1, zintensity_threshold=3, parameter_source='FSL', mask_type='file'), iterfield=['realigned_files', 'realignment_parameters', 'mask_file'], name="art") modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") modelspec.inputs.input_units = 'secs' def check_behav_list(behav): out_behav = [] if isinstance(behav, six.string_types): behav = [behav] for val in behav: if not isinstance(val, list): out_behav.append([val]) else: out_behav.append(val) return out_behav wf.connect(subjinfo, 'TR', modelspec, 'time_repetition') wf.connect(datasource, ('behav', check_behav_list), modelspec, 'event_files') wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval') wf.connect(subjinfo, 'conds', contrastgen, 'conds') wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file') wf.connect(infosource, 'task_id', contrastgen, 'task_id') wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts') wf.connect([(preproc, art, [('outputspec.motion_parameters', 'realignment_parameters'), ('outputspec.realigned_files', 'realigned_files'), ('outputspec.mask', 'mask_file')]), (preproc, modelspec, [('outputspec.highpassed_files', 'functional_runs'), ('outputspec.motion_parameters', 'realignment_parameters')]), (art, modelspec, [('outlier_files', 'outlier_files')]), (modelspec, modelfit, [('session_info', 'inputspec.session_info')]), (preproc, modelfit, [('outputspec.highpassed_files', 'inputspec.functional_data')]) ]) """ Reorder the copes so that now it combines across runs """ def sort_copes(files): numelements = len(files[0]) outfiles = [] for i in range(numelements): outfiles.insert(i, []) for j, elements in enumerate(files): outfiles[i].append(elements[i]) return outfiles def num_copes(files): return len(files) pickfirst = lambda x: x[0] wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst), 'flameo.mask_file')]), (modelfit, fixed_fx, [(('outputspec.copes', sort_copes), 'inputspec.copes'), ('outputspec.dof_file', 'inputspec.dof_files'), (('outputspec.varcopes', sort_copes), 'inputspec.varcopes'), (('outputspec.copes', num_copes), 'l2model.num_copes'), ]) ]) wf.connect(preproc, 'outputspec.mean', registration, 'inputspec.mean_image') wf.connect(datasource, 'anat', registration, 'inputspec.anatomical_image') registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz') registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz') registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm' def merge_files(copes, varcopes, zstats): out_files = [] splits = [] out_files.extend(copes) splits.append(len(copes)) out_files.extend(varcopes) splits.append(len(varcopes)) out_files.extend(zstats) splits.append(len(zstats)) return out_files, splits mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes', 'zstats'], output_names=['out_files', 'splits'], function=merge_files), name='merge_files') wf.connect([(fixed_fx.get_node('outputspec'), mergefunc, [('copes', 'copes'), ('varcopes', 'varcopes'), ('zstats', 'zstats'), ])]) wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files') def split_files(in_files, splits): copes = in_files[:splits[0]] varcopes = in_files[splits[0]:(splits[0] + splits[1])] zstats = in_files[(splits[0] + splits[1]):] return copes, varcopes, zstats splitfunc = pe.Node(niu.Function(input_names=['in_files', 'splits'], output_names=['copes', 'varcopes', 'zstats'], function=split_files), name='split_files') wf.connect(mergefunc, 'splits', splitfunc, 'splits') wf.connect(registration, 'outputspec.transformed_files', splitfunc, 'in_files') """ Connect to a datasink """ def get_subs(subject_id, conds, model_id, task_id): subs = [('_subject_id_%s_' % subject_id, '')] subs.append(('_model_id_%d' % model_id, 'model%03d' %model_id)) subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id)) subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp', 'mean')) subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt', 'affine')) for i in range(len(conds)): subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1))) subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1))) subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1))) subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1))) subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1))) subs.append(('_warpall%d/cope1_warp.' % i, 'cope%02d.' % (i + 1))) subs.append(('_warpall%d/varcope1_warp.' % (len(conds) + i), 'varcope%02d.' % (i + 1))) subs.append(('_warpall%d/zstat1_warp.' % (2 * len(conds) + i), 'zstat%02d.' % (i + 1))) return subs subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds', 'model_id', 'task_id'], output_names=['substitutions'], function=get_subs), name='subsgen') datasink = pe.Node(interface=nio.DataSink(), name="datasink") wf.connect(infosource, 'subject_id', datasink, 'container') wf.connect(infosource, 'subject_id', subsgen, 'subject_id') wf.connect(infosource, 'model_id', subsgen, 'model_id') wf.connect(infosource, 'task_id', subsgen, 'task_id') wf.connect(contrastgen, 'contrasts', subsgen, 'conds') wf.connect(subsgen, 'substitutions', datasink, 'substitutions') wf.connect([(fixed_fx.get_node('outputspec'), datasink, [('res4d', 'res4d'), ('copes', 'copes'), ('varcopes', 'varcopes'), ('zstats', 'zstats'), ('tstats', 'tstats')]) ]) wf.connect([(splitfunc, datasink, [('copes', 'copes.mni'), ('varcopes', 'varcopes.mni'), ('zstats', 'zstats.mni'), ])]) wf.connect(registration, 'outputspec.transformed_mean', datasink, 'mean.mni') wf.connect(registration, 'outputspec.func2anat_transform', datasink, 'xfm.mean2anat') wf.connect(registration, 'outputspec.anat2target_transform', datasink, 'xfm.anat2target') """ Set processing parameters """ hpcutoff = 120. preproc.inputs.inputspec.fwhm = 6.0 gethighpass.inputs.hpcutoff = hpcutoff modelspec.inputs.high_pass_filter_cutoff = hpcutoff modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}} modelfit.inputs.inputspec.model_serial_correlations = True modelfit.inputs.inputspec.film_threshold = 1000 datasink.inputs.base_directory = output_dir return wf
Preliminaries ------------- Setup any package specific configuration. The output file format for FSL routines is being set to compressed NIFTI. """ fsl.FSLCommand.set_default_output_type('NIFTI_GZ') level1_workflow = pe.Workflow(name='level1flow') preproc = create_featreg_preproc(whichvol='first') modelfit = create_modelfit_workflow() fixed_fx = create_fixed_effects_flow() """ Add artifact detection and model specification nodes between the preprocessing and modelfitting workflows. """ art = pe.MapNode( ra.ArtifactDetect(use_differences=[True, False], use_norm=True, norm_threshold=1, zintensity_threshold=3, parameter_source='FSL', mask_type='file'), iterfield=['realigned_files', 'realignment_parameters', 'mask_file'], name="art")
def analyze_openfmri_dataset( data_dir, subject=None, model_id=None, task_id=None, output_dir=None, subj_prefix="*", hpcutoff=120.0, use_derivatives=True, fwhm=6.0, subjects_dir=None, target=None, ): """Analyzes an open fmri dataset Parameters ---------- data_dir : str Path to the base data directory work_dir : str Nipype working directory (defaults to cwd) """ """ Load nipype workflows """ preproc = create_featreg_preproc(whichvol="first") modelfit = create_modelfit_workflow() fixed_fx = create_fixed_effects_flow() if subjects_dir: registration = create_fs_reg_workflow() else: registration = create_reg_workflow() """ Remove the plotting connection so that plot iterables don't propagate to the model stage """ preproc.disconnect(preproc.get_node("plot_motion"), "out_file", preproc.get_node("outputspec"), "motion_plots") """ Set up openfmri data specific components """ subjects = sorted([path.split(os.path.sep)[-1] for path in glob(os.path.join(data_dir, subj_prefix))]) infosource = pe.Node(niu.IdentityInterface(fields=["subject_id", "model_id", "task_id"]), name="infosource") if len(subject) == 0: infosource.iterables = [("subject_id", subjects), ("model_id", [model_id]), ("task_id", task_id)] else: infosource.iterables = [ ("subject_id", [subjects[subjects.index(subj)] for subj in subject]), ("model_id", [model_id]), ("task_id", task_id), ] subjinfo = pe.Node( niu.Function( input_names=["subject_id", "base_dir", "task_id", "model_id"], output_names=["run_id", "conds", "TR"], function=get_subjectinfo, ), name="subjectinfo", ) subjinfo.inputs.base_dir = data_dir """ Return data components as anat, bold and behav """ contrast_file = os.path.join(data_dir, "models", "model%03d" % model_id, "task_contrasts.txt") has_contrast = os.path.exists(contrast_file) if has_contrast: datasource = pe.Node( nio.DataGrabber( infields=["subject_id", "run_id", "task_id", "model_id"], outfields=["anat", "bold", "behav", "contrasts"], ), name="datasource", ) else: datasource = pe.Node( nio.DataGrabber( infields=["subject_id", "run_id", "task_id", "model_id"], outfields=["anat", "bold", "behav"] ), name="datasource", ) datasource.inputs.base_directory = data_dir datasource.inputs.template = "*" if has_contrast: datasource.inputs.field_template = { "anat": "%s/anatomy/T1_001.nii.gz", "bold": "%s/BOLD/task%03d_r*/bold.nii.gz", "behav": ("%s/model/model%03d/onsets/task%03d_" "run%03d/cond*.txt"), "contrasts": ("models/model%03d/" "task_contrasts.txt"), } datasource.inputs.template_args = { "anat": [["subject_id"]], "bold": [["subject_id", "task_id"]], "behav": [["subject_id", "model_id", "task_id", "run_id"]], "contrasts": [["model_id"]], } else: datasource.inputs.field_template = { "anat": "%s/anatomy/T1_001.nii.gz", "bold": "%s/BOLD/task%03d_r*/bold.nii.gz", "behav": ("%s/model/model%03d/onsets/task%03d_" "run%03d/cond*.txt"), } datasource.inputs.template_args = { "anat": [["subject_id"]], "bold": [["subject_id", "task_id"]], "behav": [["subject_id", "model_id", "task_id", "run_id"]], } datasource.inputs.sort_filelist = True """ Create meta workflow """ wf = pe.Workflow(name="openfmri") wf.connect(infosource, "subject_id", subjinfo, "subject_id") wf.connect(infosource, "model_id", subjinfo, "model_id") wf.connect(infosource, "task_id", subjinfo, "task_id") wf.connect(infosource, "subject_id", datasource, "subject_id") wf.connect(infosource, "model_id", datasource, "model_id") wf.connect(infosource, "task_id", datasource, "task_id") wf.connect(subjinfo, "run_id", datasource, "run_id") wf.connect([(datasource, preproc, [("bold", "inputspec.func")])]) def get_highpass(TR, hpcutoff): return hpcutoff / (2 * TR) gethighpass = pe.Node( niu.Function(input_names=["TR", "hpcutoff"], output_names=["highpass"], function=get_highpass), name="gethighpass", ) wf.connect(subjinfo, "TR", gethighpass, "TR") wf.connect(gethighpass, "highpass", preproc, "inputspec.highpass") """ Setup a basic set of contrasts, a t-test per condition """ def get_contrasts(contrast_file, task_id, conds): import numpy as np import os contrast_def = [] if os.path.exists(contrast_file): with open(contrast_file, "rt") as fp: contrast_def.extend([np.array(row.split()) for row in fp.readlines() if row.strip()]) contrasts = [] for row in contrast_def: if row[0] != "task%03d" % task_id: continue con = [row[1], "T", ["cond%03d" % (i + 1) for i in range(len(conds))], row[2:].astype(float).tolist()] contrasts.append(con) # add auto contrasts for each column for i, cond in enumerate(conds): con = [cond, "T", ["cond%03d" % (i + 1)], [1]] contrasts.append(con) return contrasts contrastgen = pe.Node( niu.Function( input_names=["contrast_file", "task_id", "conds"], output_names=["contrasts"], function=get_contrasts ), name="contrastgen", ) art = pe.MapNode( interface=ra.ArtifactDetect( use_differences=[True, False], use_norm=True, norm_threshold=1, zintensity_threshold=3, parameter_source="FSL", mask_type="file", ), iterfield=["realigned_files", "realignment_parameters", "mask_file"], name="art", ) modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") modelspec.inputs.input_units = "secs" def check_behav_list(behav, run_id, conds): from nipype.external import six import numpy as np num_conds = len(conds) if isinstance(behav, six.string_types): behav = [behav] behav_array = np.array(behav).flatten() num_elements = behav_array.shape[0] return behav_array.reshape(num_elements / num_conds, num_conds).tolist() reshape_behav = pe.Node( niu.Function(input_names=["behav", "run_id", "conds"], output_names=["behav"], function=check_behav_list), name="reshape_behav", ) wf.connect(subjinfo, "TR", modelspec, "time_repetition") wf.connect(datasource, "behav", reshape_behav, "behav") wf.connect(subjinfo, "run_id", reshape_behav, "run_id") wf.connect(subjinfo, "conds", reshape_behav, "conds") wf.connect(reshape_behav, "behav", modelspec, "event_files") wf.connect(subjinfo, "TR", modelfit, "inputspec.interscan_interval") wf.connect(subjinfo, "conds", contrastgen, "conds") if has_contrast: wf.connect(datasource, "contrasts", contrastgen, "contrast_file") else: contrastgen.inputs.contrast_file = "" wf.connect(infosource, "task_id", contrastgen, "task_id") wf.connect(contrastgen, "contrasts", modelfit, "inputspec.contrasts") wf.connect( [ ( preproc, art, [ ("outputspec.motion_parameters", "realignment_parameters"), ("outputspec.realigned_files", "realigned_files"), ("outputspec.mask", "mask_file"), ], ), ( preproc, modelspec, [ ("outputspec.highpassed_files", "functional_runs"), ("outputspec.motion_parameters", "realignment_parameters"), ], ), (art, modelspec, [("outlier_files", "outlier_files")]), (modelspec, modelfit, [("session_info", "inputspec.session_info")]), (preproc, modelfit, [("outputspec.highpassed_files", "inputspec.functional_data")]), ] ) # Comute TSNR on realigned data regressing polynomials upto order 2 tsnr = MapNode(TSNR(regress_poly=2), iterfield=["in_file"], name="tsnr") wf.connect(preproc, "outputspec.realigned_files", tsnr, "in_file") # Compute the median image across runs calc_median = Node( Function(input_names=["in_files"], output_names=["median_file"], function=median, imports=imports), name="median", ) wf.connect(tsnr, "detrended_file", calc_median, "in_files") """ Reorder the copes so that now it combines across runs """ def sort_copes(copes, varcopes, contrasts): import numpy as np if not isinstance(copes, list): copes = [copes] varcopes = [varcopes] num_copes = len(contrasts) n_runs = len(copes) all_copes = np.array(copes).flatten() all_varcopes = np.array(varcopes).flatten() outcopes = all_copes.reshape(len(all_copes) / num_copes, num_copes).T.tolist() outvarcopes = all_varcopes.reshape(len(all_varcopes) / num_copes, num_copes).T.tolist() return outcopes, outvarcopes, n_runs cope_sorter = pe.Node( niu.Function( input_names=["copes", "varcopes", "contrasts"], output_names=["copes", "varcopes", "n_runs"], function=sort_copes, ), name="cope_sorter", ) pickfirst = lambda x: x[0] wf.connect(contrastgen, "contrasts", cope_sorter, "contrasts") wf.connect( [ (preproc, fixed_fx, [(("outputspec.mask", pickfirst), "flameo.mask_file")]), (modelfit, cope_sorter, [("outputspec.copes", "copes")]), (modelfit, cope_sorter, [("outputspec.varcopes", "varcopes")]), ( cope_sorter, fixed_fx, [("copes", "inputspec.copes"), ("varcopes", "inputspec.varcopes"), ("n_runs", "l2model.num_copes")], ), (modelfit, fixed_fx, [("outputspec.dof_file", "inputspec.dof_files")]), ] ) wf.connect(calc_median, "median_file", registration, "inputspec.mean_image") if subjects_dir: wf.connect(infosource, "subject_id", registration, "inputspec.subject_id") registration.inputs.inputspec.subjects_dir = subjects_dir registration.inputs.inputspec.target_image = fsl.Info.standard_image("MNI152_T1_2mm_brain.nii.gz") if target: registration.inputs.inputspec.target_image = target else: wf.connect(datasource, "anat", registration, "inputspec.anatomical_image") registration.inputs.inputspec.target_image = fsl.Info.standard_image("MNI152_T1_2mm.nii.gz") registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image("MNI152_T1_2mm_brain.nii.gz") registration.inputs.inputspec.config_file = "T1_2_MNI152_2mm" def merge_files(copes, varcopes, zstats): out_files = [] splits = [] out_files.extend(copes) splits.append(len(copes)) out_files.extend(varcopes) splits.append(len(varcopes)) out_files.extend(zstats) splits.append(len(zstats)) return out_files, splits mergefunc = pe.Node( niu.Function( input_names=["copes", "varcopes", "zstats"], output_names=["out_files", "splits"], function=merge_files ), name="merge_files", ) wf.connect( [ ( fixed_fx.get_node("outputspec"), mergefunc, [("copes", "copes"), ("varcopes", "varcopes"), ("zstats", "zstats")], ) ] ) wf.connect(mergefunc, "out_files", registration, "inputspec.source_files") def split_files(in_files, splits): copes = in_files[: splits[0]] varcopes = in_files[splits[0] : (splits[0] + splits[1])] zstats = in_files[(splits[0] + splits[1]) :] return copes, varcopes, zstats splitfunc = pe.Node( niu.Function( input_names=["in_files", "splits"], output_names=["copes", "varcopes", "zstats"], function=split_files ), name="split_files", ) wf.connect(mergefunc, "splits", splitfunc, "splits") wf.connect(registration, "outputspec.transformed_files", splitfunc, "in_files") if subjects_dir: get_roi_mean = pe.MapNode(fs.SegStats(default_color_table=True), iterfield=["in_file"], name="get_aparc_means") get_roi_mean.inputs.avgwf_txt_file = True wf.connect(fixed_fx.get_node("outputspec"), "copes", get_roi_mean, "in_file") wf.connect(registration, "outputspec.aparc", get_roi_mean, "segmentation_file") get_roi_tsnr = pe.MapNode(fs.SegStats(default_color_table=True), iterfield=["in_file"], name="get_aparc_tsnr") get_roi_tsnr.inputs.avgwf_txt_file = True wf.connect(tsnr, "tsnr_file", get_roi_tsnr, "in_file") wf.connect(registration, "outputspec.aparc", get_roi_tsnr, "segmentation_file") """ Connect to a datasink """ def get_subs(subject_id, conds, run_id, model_id, task_id): subs = [("_subject_id_%s_" % subject_id, "")] subs.append(("_model_id_%d" % model_id, "model%03d" % model_id)) subs.append(("task_id_%d/" % task_id, "/task%03d_" % task_id)) subs.append(("bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp", "mean")) subs.append(("bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt", "affine")) for i in range(len(conds)): subs.append(("_flameo%d/cope1." % i, "cope%02d." % (i + 1))) subs.append(("_flameo%d/varcope1." % i, "varcope%02d." % (i + 1))) subs.append(("_flameo%d/zstat1." % i, "zstat%02d." % (i + 1))) subs.append(("_flameo%d/tstat1." % i, "tstat%02d." % (i + 1))) subs.append(("_flameo%d/res4d." % i, "res4d%02d." % (i + 1))) subs.append(("_warpall%d/cope1_warp." % i, "cope%02d." % (i + 1))) subs.append(("_warpall%d/varcope1_warp." % (len(conds) + i), "varcope%02d." % (i + 1))) subs.append(("_warpall%d/zstat1_warp." % (2 * len(conds) + i), "zstat%02d." % (i + 1))) subs.append(("_warpall%d/cope1_trans." % i, "cope%02d." % (i + 1))) subs.append(("_warpall%d/varcope1_trans." % (len(conds) + i), "varcope%02d." % (i + 1))) subs.append(("_warpall%d/zstat1_trans." % (2 * len(conds) + i), "zstat%02d." % (i + 1))) subs.append(("__get_aparc_means%d/" % i, "/cope%02d_" % (i + 1))) for i, run_num in enumerate(run_id): subs.append(("__get_aparc_tsnr%d/" % i, "/run%02d_" % run_num)) subs.append(("__art%d/" % i, "/run%02d_" % run_num)) subs.append(("__dilatemask%d/" % i, "/run%02d_" % run_num)) subs.append(("__realign%d/" % i, "/run%02d_" % run_num)) subs.append(("__modelgen%d/" % i, "/run%02d_" % run_num)) subs.append(("/model%03d/task%03d/" % (model_id, task_id), "/")) subs.append(("/model%03d/task%03d_" % (model_id, task_id), "/")) subs.append(("_bold_dtype_mcf_bet_thresh_dil", "_mask")) subs.append(("_output_warped_image", "_anat2target")) subs.append(("median_flirt_brain_mask", "median_brain_mask")) subs.append(("median_bbreg_brain_mask", "median_brain_mask")) return subs subsgen = pe.Node( niu.Function( input_names=["subject_id", "conds", "run_id", "model_id", "task_id"], output_names=["substitutions"], function=get_subs, ), name="subsgen", ) wf.connect(subjinfo, "run_id", subsgen, "run_id") datasink = pe.Node(interface=nio.DataSink(), name="datasink") wf.connect(infosource, "subject_id", datasink, "container") wf.connect(infosource, "subject_id", subsgen, "subject_id") wf.connect(infosource, "model_id", subsgen, "model_id") wf.connect(infosource, "task_id", subsgen, "task_id") wf.connect(contrastgen, "contrasts", subsgen, "conds") wf.connect(subsgen, "substitutions", datasink, "substitutions") wf.connect( [ ( fixed_fx.get_node("outputspec"), datasink, [ ("res4d", "res4d"), ("copes", "copes"), ("varcopes", "varcopes"), ("zstats", "zstats"), ("tstats", "tstats"), ], ) ] ) wf.connect( [ ( modelfit.get_node("modelgen"), datasink, [ ("design_cov", "qa.model"), ("design_image", "qa.model.@matrix_image"), ("design_file", "qa.model.@matrix"), ], ) ] ) wf.connect( [ ( preproc, datasink, [ ("outputspec.motion_parameters", "qa.motion"), ("outputspec.motion_plots", "qa.motion.plots"), ("outputspec.mask", "qa.mask"), ], ) ] ) wf.connect(registration, "outputspec.mean2anat_mask", datasink, "qa.mask.mean2anat") wf.connect(art, "norm_files", datasink, "qa.art.@norm") wf.connect(art, "intensity_files", datasink, "qa.art.@intensity") wf.connect(art, "outlier_files", datasink, "qa.art.@outlier_files") wf.connect(registration, "outputspec.anat2target", datasink, "qa.anat2target") wf.connect(tsnr, "tsnr_file", datasink, "qa.tsnr.@map") if subjects_dir: wf.connect(registration, "outputspec.min_cost_file", datasink, "qa.mincost") wf.connect([(get_roi_tsnr, datasink, [("avgwf_txt_file", "qa.tsnr"), ("summary_file", "qa.tsnr.@summary")])]) wf.connect( [(get_roi_mean, datasink, [("avgwf_txt_file", "copes.roi"), ("summary_file", "copes.roi.@summary")])] ) wf.connect( [(splitfunc, datasink, [("copes", "copes.mni"), ("varcopes", "varcopes.mni"), ("zstats", "zstats.mni")])] ) wf.connect(calc_median, "median_file", datasink, "mean") wf.connect(registration, "outputspec.transformed_mean", datasink, "mean.mni") wf.connect(registration, "outputspec.func2anat_transform", datasink, "xfm.mean2anat") wf.connect(registration, "outputspec.anat2target_transform", datasink, "xfm.anat2target") """ Set processing parameters """ preproc.inputs.inputspec.fwhm = fwhm gethighpass.inputs.hpcutoff = hpcutoff modelspec.inputs.high_pass_filter_cutoff = hpcutoff modelfit.inputs.inputspec.bases = {"dgamma": {"derivs": use_derivatives}} modelfit.inputs.inputspec.model_serial_correlations = True modelfit.inputs.inputspec.film_threshold = 1000 datasink.inputs.base_directory = output_dir return wf
def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, task_id=None, output_dir=None, subj_prefix='*', hpcutoff=120., use_derivatives=True, fwhm=6.0, subjects_dir=None, target=None): """Analyzes an open fmri dataset Parameters ---------- data_dir : str Path to the base data directory work_dir : str Nipype working directory (defaults to cwd) """ """ Load nipype workflows """ preproc = create_featreg_preproc(whichvol='first') modelfit = create_modelfit_workflow() fixed_fx = create_fixed_effects_flow() if subjects_dir: registration = create_fs_reg_workflow() else: registration = create_reg_workflow() """ Remove the plotting connection so that plot iterables don't propagate to the model stage """ preproc.disconnect(preproc.get_node('plot_motion'), 'out_file', preproc.get_node('outputspec'), 'motion_plots') """ Set up openfmri data specific components """ subjects = sorted([path.split(os.path.sep)[-1] for path in glob(os.path.join(data_dir, subj_prefix))]) infosource = pe.Node(niu.IdentityInterface(fields=['subject_id', 'model_id', 'task_id']), name='infosource') if len(subject) == 0: infosource.iterables = [('subject_id', subjects), ('model_id', [model_id]), ('task_id', task_id)] else: infosource.iterables = [('subject_id', [subjects[subjects.index(subj)] for subj in subject]), ('model_id', [model_id]), ('task_id', task_id)] subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir', 'task_id', 'model_id'], output_names=['run_id', 'conds', 'TR'], function=get_subjectinfo), name='subjectinfo') subjinfo.inputs.base_dir = data_dir """ Return data components as anat, bold and behav """ contrast_file = os.path.join(data_dir, 'models', 'model%03d' % model_id, 'task_contrasts.txt') has_contrast = os.path.exists(contrast_file) if has_contrast: datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id', 'task_id', 'model_id'], outfields=['anat', 'bold', 'behav', 'contrasts']), name='datasource') else: datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id', 'task_id', 'model_id'], outfields=['anat', 'bold', 'behav']), name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '*' if has_contrast: datasource.inputs.field_template = {'anat': '%s/anatomy/T1_001.nii.gz', 'bold': '%s/BOLD/task%03d_r*/bold.nii.gz', 'behav': ('%s/model/model%03d/onsets/task%03d_' 'run%03d/cond*.txt'), 'contrasts': ('models/model%03d/' 'task_contrasts.txt')} datasource.inputs.template_args = {'anat': [['subject_id']], 'bold': [['subject_id', 'task_id']], 'behav': [['subject_id', 'model_id', 'task_id', 'run_id']], 'contrasts': [['model_id']]} else: datasource.inputs.field_template = {'anat': '%s/anatomy/T1_001.nii.gz', 'bold': '%s/BOLD/task%03d_r*/bold.nii.gz', 'behav': ('%s/model/model%03d/onsets/task%03d_' 'run%03d/cond*.txt')} datasource.inputs.template_args = {'anat': [['subject_id']], 'bold': [['subject_id', 'task_id']], 'behav': [['subject_id', 'model_id', 'task_id', 'run_id']]} datasource.inputs.sort_filelist = True """ Create meta workflow """ wf = pe.Workflow(name='openfmri') wf.connect(infosource, 'subject_id', subjinfo, 'subject_id') wf.connect(infosource, 'model_id', subjinfo, 'model_id') wf.connect(infosource, 'task_id', subjinfo, 'task_id') wf.connect(infosource, 'subject_id', datasource, 'subject_id') wf.connect(infosource, 'model_id', datasource, 'model_id') wf.connect(infosource, 'task_id', datasource, 'task_id') wf.connect(subjinfo, 'run_id', datasource, 'run_id') wf.connect([(datasource, preproc, [('bold', 'inputspec.func')]), ]) def get_highpass(TR, hpcutoff): return hpcutoff / (2 * TR) gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'], output_names=['highpass'], function=get_highpass), name='gethighpass') wf.connect(subjinfo, 'TR', gethighpass, 'TR') wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass') """ Setup a basic set of contrasts, a t-test per condition """ def get_contrasts(contrast_file, task_id, conds): import numpy as np import os contrast_def = [] if os.path.exists(contrast_file): with open(contrast_file, 'rt') as fp: contrast_def.extend([np.array(row.split()) for row in fp.readlines() if row.strip()]) contrasts = [] for row in contrast_def: if row[0] != 'task%03d' % task_id: continue con = [row[1], 'T', ['cond%03d' % (i + 1) for i in range(len(conds))], row[2:].astype(float).tolist()] contrasts.append(con) # add auto contrasts for each column for i, cond in enumerate(conds): con = [cond, 'T', ['cond%03d' % (i + 1)], [1]] contrasts.append(con) return contrasts contrastgen = pe.Node(niu.Function(input_names=['contrast_file', 'task_id', 'conds'], output_names=['contrasts'], function=get_contrasts), name='contrastgen') art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False], use_norm=True, norm_threshold=1, zintensity_threshold=3, parameter_source='FSL', mask_type='file'), iterfield=['realigned_files', 'realignment_parameters', 'mask_file'], name="art") modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") modelspec.inputs.input_units = 'secs' def check_behav_list(behav, run_id, conds): from nipype.external import six import numpy as np num_conds = len(conds) if isinstance(behav, six.string_types): behav = [behav] behav_array = np.array(behav).flatten() num_elements = behav_array.shape[0] return behav_array.reshape(num_elements/num_conds, num_conds).tolist() reshape_behav = pe.Node(niu.Function(input_names=['behav', 'run_id', 'conds'], output_names=['behav'], function=check_behav_list), name='reshape_behav') wf.connect(subjinfo, 'TR', modelspec, 'time_repetition') wf.connect(datasource, 'behav', reshape_behav, 'behav') wf.connect(subjinfo, 'run_id', reshape_behav, 'run_id') wf.connect(subjinfo, 'conds', reshape_behav, 'conds') wf.connect(reshape_behav, 'behav', modelspec, 'event_files') wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval') wf.connect(subjinfo, 'conds', contrastgen, 'conds') if has_contrast: wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file') else: contrastgen.inputs.contrast_file = '' wf.connect(infosource, 'task_id', contrastgen, 'task_id') wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts') wf.connect([(preproc, art, [('outputspec.motion_parameters', 'realignment_parameters'), ('outputspec.realigned_files', 'realigned_files'), ('outputspec.mask', 'mask_file')]), (preproc, modelspec, [('outputspec.highpassed_files', 'functional_runs'), ('outputspec.motion_parameters', 'realignment_parameters')]), (art, modelspec, [('outlier_files', 'outlier_files')]), (modelspec, modelfit, [('session_info', 'inputspec.session_info')]), (preproc, modelfit, [('outputspec.highpassed_files', 'inputspec.functional_data')]) ]) # Comute TSNR on realigned data regressing polynomials upto order 2 tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr') wf.connect(preproc, "outputspec.realigned_files", tsnr, "in_file") # Compute the median image across runs calc_median = Node(Function(input_names=['in_files'], output_names=['median_file'], function=median, imports=imports), name='median') wf.connect(tsnr, 'detrended_file', calc_median, 'in_files') """ Reorder the copes so that now it combines across runs """ def sort_copes(copes, varcopes, contrasts): import numpy as np if not isinstance(copes, list): copes = [copes] varcopes = [varcopes] num_copes = len(contrasts) n_runs = len(copes) all_copes = np.array(copes).flatten() all_varcopes = np.array(varcopes).flatten() outcopes = all_copes.reshape(len(all_copes)/num_copes, num_copes).T.tolist() outvarcopes = all_varcopes.reshape(len(all_varcopes)/num_copes, num_copes).T.tolist() return outcopes, outvarcopes, n_runs cope_sorter = pe.Node(niu.Function(input_names=['copes', 'varcopes', 'contrasts'], output_names=['copes', 'varcopes', 'n_runs'], function=sort_copes), name='cope_sorter') pickfirst = lambda x: x[0] wf.connect(contrastgen, 'contrasts', cope_sorter, 'contrasts') wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst), 'flameo.mask_file')]), (modelfit, cope_sorter, [('outputspec.copes', 'copes')]), (modelfit, cope_sorter, [('outputspec.varcopes', 'varcopes')]), (cope_sorter, fixed_fx, [('copes', 'inputspec.copes'), ('varcopes', 'inputspec.varcopes'), ('n_runs', 'l2model.num_copes')]), (modelfit, fixed_fx, [('outputspec.dof_file', 'inputspec.dof_files'), ]) ]) wf.connect(calc_median, 'median_file', registration, 'inputspec.mean_image') if subjects_dir: wf.connect(infosource, 'subject_id', registration, 'inputspec.subject_id') registration.inputs.inputspec.subjects_dir = subjects_dir registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz') if target: registration.inputs.inputspec.target_image = target else: wf.connect(datasource, 'anat', registration, 'inputspec.anatomical_image') registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz') registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz') registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm' def merge_files(copes, varcopes, zstats): out_files = [] splits = [] out_files.extend(copes) splits.append(len(copes)) out_files.extend(varcopes) splits.append(len(varcopes)) out_files.extend(zstats) splits.append(len(zstats)) return out_files, splits mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes', 'zstats'], output_names=['out_files', 'splits'], function=merge_files), name='merge_files') wf.connect([(fixed_fx.get_node('outputspec'), mergefunc, [('copes', 'copes'), ('varcopes', 'varcopes'), ('zstats', 'zstats'), ])]) wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files') def split_files(in_files, splits): copes = in_files[:splits[0]] varcopes = in_files[splits[0]:(splits[0] + splits[1])] zstats = in_files[(splits[0] + splits[1]):] return copes, varcopes, zstats splitfunc = pe.Node(niu.Function(input_names=['in_files', 'splits'], output_names=['copes', 'varcopes', 'zstats'], function=split_files), name='split_files') wf.connect(mergefunc, 'splits', splitfunc, 'splits') wf.connect(registration, 'outputspec.transformed_files', splitfunc, 'in_files') if subjects_dir: get_roi_mean = pe.MapNode(fs.SegStats(default_color_table=True), iterfield=['in_file'], name='get_aparc_means') get_roi_mean.inputs.avgwf_txt_file = True wf.connect(fixed_fx.get_node('outputspec'), 'copes', get_roi_mean, 'in_file') wf.connect(registration, 'outputspec.aparc', get_roi_mean, 'segmentation_file') get_roi_tsnr = pe.MapNode(fs.SegStats(default_color_table=True), iterfield=['in_file'], name='get_aparc_tsnr') get_roi_tsnr.inputs.avgwf_txt_file = True wf.connect(tsnr, 'tsnr_file', get_roi_tsnr, 'in_file') wf.connect(registration, 'outputspec.aparc', get_roi_tsnr, 'segmentation_file') """ Connect to a datasink """ def get_subs(subject_id, conds, run_id, model_id, task_id): subs = [('_subject_id_%s_' % subject_id, '')] subs.append(('_model_id_%d' % model_id, 'model%03d' %model_id)) subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id)) subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp', 'mean')) subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt', 'affine')) for i in range(len(conds)): subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1))) subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1))) subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1))) subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1))) subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1))) subs.append(('_warpall%d/cope1_warp.' % i, 'cope%02d.' % (i + 1))) subs.append(('_warpall%d/varcope1_warp.' % (len(conds) + i), 'varcope%02d.' % (i + 1))) subs.append(('_warpall%d/zstat1_warp.' % (2 * len(conds) + i), 'zstat%02d.' % (i + 1))) subs.append(('_warpall%d/cope1_trans.' % i, 'cope%02d.' % (i + 1))) subs.append(('_warpall%d/varcope1_trans.' % (len(conds) + i), 'varcope%02d.' % (i + 1))) subs.append(('_warpall%d/zstat1_trans.' % (2 * len(conds) + i), 'zstat%02d.' % (i + 1))) subs.append(('__get_aparc_means%d/' % i, '/cope%02d_' % (i + 1))) for i, run_num in enumerate(run_id): subs.append(('__get_aparc_tsnr%d/' % i, '/run%02d_' % run_num)) subs.append(('__art%d/' % i, '/run%02d_' % run_num)) subs.append(('__dilatemask%d/' % i, '/run%02d_' % run_num)) subs.append(('__realign%d/' % i, '/run%02d_' % run_num)) subs.append(('__modelgen%d/' % i, '/run%02d_' % run_num)) subs.append(('/model%03d/task%03d/' % (model_id, task_id), '/')) subs.append(('/model%03d/task%03d_' % (model_id, task_id), '/')) subs.append(('_bold_dtype_mcf_bet_thresh_dil', '_mask')) subs.append(('_output_warped_image', '_anat2target')) subs.append(('median_flirt_brain_mask', 'median_brain_mask')) subs.append(('median_bbreg_brain_mask', 'median_brain_mask')) return subs subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds', 'run_id', 'model_id', 'task_id'], output_names=['substitutions'], function=get_subs), name='subsgen') wf.connect(subjinfo, 'run_id', subsgen, 'run_id') datasink = pe.Node(interface=nio.DataSink(), name="datasink") wf.connect(infosource, 'subject_id', datasink, 'container') wf.connect(infosource, 'subject_id', subsgen, 'subject_id') wf.connect(infosource, 'model_id', subsgen, 'model_id') wf.connect(infosource, 'task_id', subsgen, 'task_id') wf.connect(contrastgen, 'contrasts', subsgen, 'conds') wf.connect(subsgen, 'substitutions', datasink, 'substitutions') wf.connect([(fixed_fx.get_node('outputspec'), datasink, [('res4d', 'res4d'), ('copes', 'copes'), ('varcopes', 'varcopes'), ('zstats', 'zstats'), ('tstats', 'tstats')]) ]) wf.connect([(modelfit.get_node('modelgen'), datasink, [('design_cov', 'qa.model'), ('design_image', 'qa.model.@matrix_image'), ('design_file', 'qa.model.@matrix'), ])]) wf.connect([(preproc, datasink, [('outputspec.motion_parameters', 'qa.motion'), ('outputspec.motion_plots', 'qa.motion.plots'), ('outputspec.mask', 'qa.mask')])]) wf.connect(registration, 'outputspec.mean2anat_mask', datasink, 'qa.mask.mean2anat') wf.connect(art, 'norm_files', datasink, 'qa.art.@norm') wf.connect(art, 'intensity_files', datasink, 'qa.art.@intensity') wf.connect(art, 'outlier_files', datasink, 'qa.art.@outlier_files') wf.connect(registration, 'outputspec.anat2target', datasink, 'qa.anat2target') wf.connect(tsnr, 'tsnr_file', datasink, 'qa.tsnr.@map') if subjects_dir: wf.connect(registration, 'outputspec.min_cost_file', datasink, 'qa.mincost') wf.connect([(get_roi_tsnr, datasink, [('avgwf_txt_file', 'qa.tsnr'), ('summary_file', 'qa.tsnr.@summary')])]) wf.connect([(get_roi_mean, datasink, [('avgwf_txt_file', 'copes.roi'), ('summary_file', 'copes.roi.@summary')])]) wf.connect([(splitfunc, datasink, [('copes', 'copes.mni'), ('varcopes', 'varcopes.mni'), ('zstats', 'zstats.mni'), ])]) wf.connect(calc_median, 'median_file', datasink, 'mean') wf.connect(registration, 'outputspec.transformed_mean', datasink, 'mean.mni') wf.connect(registration, 'outputspec.func2anat_transform', datasink, 'xfm.mean2anat') wf.connect(registration, 'outputspec.anat2target_transform', datasink, 'xfm.anat2target') """ Set processing parameters """ preproc.inputs.inputspec.fwhm = fwhm gethighpass.inputs.hpcutoff = hpcutoff modelspec.inputs.high_pass_filter_cutoff = hpcutoff modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivatives}} modelfit.inputs.inputspec.model_serial_correlations = True modelfit.inputs.inputspec.film_threshold = 1000 datasink.inputs.base_directory = output_dir return wf
def create_workflow(contrasts, combine_runs=True): level1_workflow = pe.Workflow(name='level1flow') # =================================================================== # _____ _ # |_ _| | | # | | _ __ _ __ _ _| |_ # | | | '_ \| '_ \| | | | __| # _| |_| | | | |_) | |_| | |_ # |_____|_| |_| .__/ \__,_|\__| # | | # |_| # =================================================================== # ------------------ Specify variables inputnode = pe.Node( niu.IdentityInterface(fields=[ #'funcmasks', 'fwhm', # smoothing 'highpass', 'funcs', 'event_log', 'motion_parameters', 'motion_outlier_files', 'ref_func', 'ref_funcmask', ]), name="inputspec") def remove_runs_missing_funcs(in_files, in_funcs): import os # import pdb import re # if input.synchronize = True, then in_files and in_funcs will # be single strings assert not isinstance(in_files, str), "in_files must be list" assert not isinstance(in_funcs, str), "in_funcs must be list" if isinstance(in_files, str): in_files = [in_files] if isinstance(in_funcs, str): in_funcs = [in_funcs] has_func = set() for f in in_funcs: base = os.path.basename(f) try: sub = re.search(r'sub-([a-zA-Z0-9]+)_', base).group(1) ses = re.search(r'ses-([a-zA-Z0-9]+)_', base).group(1) run = re.search(r'run-([a-zA-Z0-9]+)_', base).group(1) except AttributeError as e: raise RuntimeError( 'Could not process "sub-*_", "ses-*_", or "run-*_" from func "%s"' % f) has_func.add((sub, ses, run)) files = [] for f in in_files: base = os.path.basename(f) try: sub = re.search(r'sub-([a-zA-Z0-9]+)_', base).group(1) ses = re.search(r'ses-([a-zA-Z0-9]+)_', base).group(1) run = re.search(r'run-([a-zA-Z0-9]+)_', base).group(1) except AttributeError as e: raise RuntimeError( 'Could not process "sub-*_", "ses-*_", or "run-*_" from event file "%s"' % f) if (sub, ses, run) in has_func: files.append(f) return files input_events = pe.Node( interface=niu.Function(input_names=['in_files', 'in_funcs'], output_names=['out_files'], function=remove_runs_missing_funcs), name='input_events', ) level1_workflow.connect([ (inputnode, input_events, [ ('funcs', 'in_funcs'), ('event_log', 'in_files'), ]), ]) # ------------------------------------------------------------------- # /~_ _ _ _ _. _ _ . _ _ |. _ _ # \_/(/_| |(/_| |(_ |_)||_)(/_||| |(/_ # | | # ------------------------------------------------------------------- """ Preliminaries ------------- Setup any package specific configuration. The output file format for FSL routines is being set to compressed NIFTI. """ fsl.FSLCommand.set_default_output_type('NIFTI_GZ') modelfit = fslflows.create_modelfit_workflow() if combine_runs: fixed_fx = fslflows.create_fixed_effects_flow() else: fixed_fx = None """ Artifact detection is done in preprocessing workflow. """ """ Add model specification nodes between the preprocessing and modelfitting workflows. """ modelspec = pe.Node(model.SpecifyModel(), name="modelspec") """ Set up first-level workflow --------------------------- """ def sort_copes(files): """ Sort by copes and the runs, ie. [[cope1_run1, cope1_run2], [cope2_run1, cope2_run2]] """ assert files[0] is not str numcopes = len(files[0]) assert numcopes > 1 outfiles = [] for i in range(numcopes): outfiles.insert(i, []) for j, elements in enumerate(files): outfiles[i].append(elements[i]) return outfiles def num_copes(files): return len(files) if fixed_fx is not None: level1_workflow.connect([ (inputnode, fixed_fx, [('ref_funcmask', 'flameo.mask_file') ]), # To-do: use reference mask!!! (modelfit, fixed_fx, [ (('outputspec.copes', sort_copes), 'inputspec.copes'), ('outputspec.dof_file', 'inputspec.dof_files'), (('outputspec.varcopes', sort_copes), 'inputspec.varcopes'), (('outputspec.copes', num_copes), 'l2model.num_copes'), ]) ]) # ------------------------------------------------------------------- # /~\ _|_ _ _|_ # \_/|_|| |_)|_|| # | # ------------------------------------------------------------------- # Datasink outputfiles = pe.Node(nio.DataSink(base_directory=ds_root, container='derivatives/modelfit', parameterization=True), name="output_files") # Use the following DataSink output substitutions outputfiles.inputs.substitutions = [ ('subject_id_', 'sub-'), ('session_id_', 'ses-'), # ('/mask/', '/'), # ('_preproc_flirt_thresh.nii.gz', '_transformedmask.nii.gz'), # ('_preproc_volreg_unwarped.nii.gz', '_preproc.nii.gz'), # ('_preproc_flirt_unwarped.nii.gz', '_preproc-mask.nii.gz'), # ('/_mc_method_afni3dvolreg/', '/'), # ('/funcs/', '/'), # ('/funcmasks/', '/'), # ('preproc_volreg.nii.gz', 'preproc.nii.gz'), ('/_mc_method_afni3dAllinSlices/', '/'), ] # Put result into a BIDS-like format outputfiles.inputs.regexp_substitutions = [ (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1'), # (r'/_addmean[0-9]+/', r'/func/'), # (r'/_dilatemask[0-9]+/', r'/func/'), # (r'/_funcbrain[0-9]+/', r'/func/'), # (r'/_maskfunc[0-9]+/', r'/func/'), # (r'/_mc[0-9]+/', r'/func/'), # (r'/_meanfunc[0-9]+/', r'/func/'), # (r'/_outliers[0-9]+/', r'/func/'), # (r'/_undistort_masks[0-9]+/', r'/func/'), # (r'/_undistort[0-9]+/', r'/func/'), ] level1_workflow.connect([ (modelfit, outputfiles, [ (('outputspec.copes', sort_copes), 'copes'), ('outputspec.dof_file', 'dof_files'), (('outputspec.varcopes', sort_copes), 'varcopes'), ]), ]) if fixed_fx is not None: level1_workflow.connect([ (fixed_fx, outputfiles, [ ('outputspec.res4d', 'fx.res4d'), ('outputspec.copes', 'fx.copes'), ('outputspec.varcopes', 'fx.varcopes'), ('outputspec.zstats', 'fx.zstats'), ('outputspec.tstats', 'fx.tstats'), ]), ]) # ------------------------------------------------------------------- # (~ _ _ _. _ _ _ _ _|_ _ _ _ _. |`. _ # (_><|_)(/_| || | |(/_| | | _\|_)(/_(_|~|~|(_ # | | # ------------------------------------------------------------------- # """ # Experiment specific components # ------------------------------ # """ """ Use the get_node function to retrieve an internal node by name. Then set the iterables on this node to perform two different extents of smoothing. """ featinput = level1_workflow.get_node('modelfit.inputspec') # featinput.iterables = ('fwhm', [5., 10.]) featinput.inputs.fwhm = 2.0 hpcutoff_s = 50. # FWHM in seconds TR = 2.5 hpcutoff_nvol = hpcutoff_s / 2.5 # FWHM in volumns # Use Python3 for processing. See code/requirements.txt for pip packages. featinput.inputs.highpass = hpcutoff_nvol / 2.355 # Gaussian: σ in volumes - (REMEMBER to run with Python 3) """ Setup a function that returns subject-specific information about the experimental paradigm. This is used by the :class:`nipype.modelgen.SpecifyModel` to create the information necessary to generate an SPM design matrix. In this tutorial, the same paradigm was used for every participant. Other examples of this function are available in the `doc/examples` folder. Note: Python knowledge required here. """ # from timeevents.curvetracing import calc_curvetracing_events from timeevents import process_time_events timeevents = pe.MapNode( interface=process_time_events, # calc_curvetracing_events, iterfield=('event_log', 'in_nvols', 'TR'), name='timeevents') def get_nvols(funcs): import nibabel as nib nvols = [] if isinstance(funcs, str): funcs = [funcs] for func in funcs: func_img = nib.load(func) header = func_img.header try: nvols.append(func_img.get_data().shape[3]) except IndexError as e: # if shape only has 3 dimensions, then it is only 1 volume nvols.append(1) return (nvols) def get_TR(funcs): import nibabel as nib TRs = [] if isinstance(funcs, str): funcs = [funcs] for func in funcs: func_img = nib.load(func) header = func_img.header try: TR = round(header.get_zooms()[3], 5) except IndexError as e: TR = 2.5 print("Warning: %s did not have TR defined in the header. " "Using default TR of %0.2f" % (func, TR)) assert TR > 1 TRs.append(TR) return (TRs) level1_workflow.connect([ (inputnode, timeevents, [ (('funcs', get_nvols), 'in_nvols'), (('funcs', get_TR), 'TR'), ]), (input_events, timeevents, [('out_files', 'event_log')]), (inputnode, modelspec, [('motion_parameters', 'realignment_parameters') ]), (modelspec, modelfit, [('session_info', 'inputspec.session_info')]), ]) # Ignore volumes after last good response filter_outliers = pe.MapNode(interface=FilterNumsTask(), name='filter_outliers', iterfield=('in_file', 'max_number')) level1_workflow.connect([ (inputnode, filter_outliers, [('motion_outlier_files', 'in_file')]), (filter_outliers, modelspec, [('out_file', 'outlier_files')]), (timeevents, filter_outliers, [('out_nvols', 'max_number')]), ]) def evt_info(cond_events): output = [] # for each run for ev in cond_events: from nipype.interfaces.base import Bunch from copy import deepcopy names = [] for name in ev.keys(): if ev[name].shape[0] > 0: names.append(name) onsets = [ deepcopy(ev[name].time) if ev[name].shape[0] > 0 else [] for name in names ] durations = [ deepcopy(ev[name].dur) if ev[name].shape[0] > 0 else [] for name in names ] amplitudes = [ deepcopy(ev[name].amplitude) if ev[name].shape[0] > 0 else [] for name in names ] run_results = Bunch( conditions=names, onsets=[deepcopy(ev[name].time) for name in names], durations=[deepcopy(ev[name].dur) for name in names], amplitudes=[deepcopy(ev[name].amplitude) for name in names]) output.append(run_results) return output modelspec.inputs.input_units = 'secs' modelspec.inputs.time_repetition = TR # to-do: specify per func modelspec.inputs.high_pass_filter_cutoff = hpcutoff_s modelfit.inputs.inputspec.interscan_interval = TR # to-do: specify per func modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}} modelfit.inputs.inputspec.contrasts = contrasts modelfit.inputs.inputspec.model_serial_correlations = True modelfit.inputs.inputspec.film_threshold = 1000 # level1_workflow.base_dir = os.path.abspath('./workingdirs/level1flow') modelfit.config['execution'] = dict(crashdump_dir=os.path.abspath('.')) # Ignore volumes after subject has finished working for the run beh_roi = pe.MapNode(fsl.ExtractROI(t_min=0), name='beh_roi', iterfield=['in_file', 't_size']) level1_workflow.connect([ (timeevents, modelspec, [ (('out_events', evt_info), 'subject_info'), ]), (inputnode, beh_roi, [ ('funcs', 'in_file'), ]), (timeevents, beh_roi, [ ('out_nvols', 't_size'), ]), (beh_roi, modelspec, [ ('roi_file', 'functional_runs'), ]), (beh_roi, modelfit, [ ('roi_file', 'inputspec.functional_data'), ]), (beh_roi, outputfiles, [ ('roi_file', 'roi_file'), ]), # (inputnode, datasource, [('in_data', 'base_directory')]), # (infosource, datasource, [('subject_id', 'subject_id')]), # (infosource, modelspec, [(('subject_id', subjectinfo), 'subject_info')]), # (datasource, preproc, [('func', 'inputspec.func')]), ]) return (level1_workflow)
def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, task_id=None, output_dir=None): """Analyzes an open fmri dataset Parameters ---------- data_dir : str Path to the base data directory work_dir : str Nipype working directory (defaults to cwd) """ """ Load nipype workflows """ preproc = create_featreg_preproc(whichvol='first') modelfit = create_modelfit_workflow() fixed_fx = create_fixed_effects_flow() registration = create_reg_workflow() """ Remove the plotting connection so that plot iterables don't propagate to the model stage """ preproc.disconnect(preproc.get_node('plot_motion'), 'out_file', preproc.get_node('outputspec'), 'motion_plots') """ Set up openfmri data specific components """ subjects = sorted([path.split(os.path.sep)[-1] for path in glob(os.path.join(data_dir, 'sub*'))]) infosource = pe.Node(niu.IdentityInterface(fields=['subject_id', 'model_id', 'task_id']), name='infosource') if subject is None: infosource.iterables = [('subject_id', subjects), ('model_id', [model_id]), ('task_id', [task_id])] else: infosource.iterables = [('subject_id', [subjects[subjects.index(subject)]]), ('model_id', [model_id]), ('task_id', [task_id])] subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir', 'task_id', 'model_id'], output_names=['run_id', 'conds', 'TR'], function=get_subjectinfo), name='subjectinfo') subjinfo.inputs.base_dir = data_dir """ Return data components as anat, bold and behav """ datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id', 'task_id', 'model_id'], outfields=['anat', 'bold', 'behav', 'contrasts']), name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '*' datasource.inputs.field_template = {'anat': '%s/anatomy/highres001.nii.gz', 'bold': '%s/BOLD/task%03d_r*/bold.nii.gz', 'behav': ('%s/model/model%03d/onsets/task%03d_' 'run%03d/cond*.txt'), 'contrasts': ('models/model%03d/' 'task_contrasts.txt')} datasource.inputs.template_args = {'anat': [['subject_id']], 'bold': [['subject_id', 'task_id']], 'behav': [['subject_id', 'model_id', 'task_id', 'run_id']], 'contrasts': [['model_id']]} datasource.inputs.sort_filelist = True """ Create meta workflow """ wf = pe.Workflow(name='openfmri') wf.connect(infosource, 'subject_id', subjinfo, 'subject_id') wf.connect(infosource, 'model_id', subjinfo, 'model_id') wf.connect(infosource, 'task_id', subjinfo, 'task_id') wf.connect(infosource, 'subject_id', datasource, 'subject_id') wf.connect(infosource, 'model_id', datasource, 'model_id') wf.connect(infosource, 'task_id', datasource, 'task_id') wf.connect(subjinfo, 'run_id', datasource, 'run_id') wf.connect([(datasource, preproc, [('bold', 'inputspec.func')]), ]) def get_highpass(TR, hpcutoff): return hpcutoff / (2 * TR) gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'], output_names=['highpass'], function=get_highpass), name='gethighpass') wf.connect(subjinfo, 'TR', gethighpass, 'TR') wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass') """ Setup a basic set of contrasts, a t-test per condition """ def get_contrasts(contrast_file, task_id, conds): import numpy as np contrast_def = np.genfromtxt(contrast_file, dtype=object) if len(contrast_def.shape) == 1: contrast_def = contrast_def[None, :] contrasts = [] for row in contrast_def: if row[0] != 'task%03d' % task_id: continue con = [row[1], 'T', ['cond%03d' % (i + 1) for i in range(len(conds))], row[2:].astype(float).tolist()] contrasts.append(con) # add auto contrasts for each column for i, cond in enumerate(conds): con = [cond, 'T', ['cond%03d' % (i + 1)], [1]] contrasts.append(con) return contrasts contrastgen = pe.Node(niu.Function(input_names=['contrast_file', 'task_id', 'conds'], output_names=['contrasts'], function=get_contrasts), name='contrastgen') art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False], use_norm=True, norm_threshold=1, zintensity_threshold=3, parameter_source='FSL', mask_type='file'), iterfield=['realigned_files', 'realignment_parameters', 'mask_file'], name="art") modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") modelspec.inputs.input_units = 'secs' wf.connect(subjinfo, 'TR', modelspec, 'time_repetition') wf.connect(datasource, 'behav', modelspec, 'event_files') wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval') wf.connect(subjinfo, 'conds', contrastgen, 'conds') wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file') wf.connect(infosource, 'task_id', contrastgen, 'task_id') wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts') wf.connect([(preproc, art, [('outputspec.motion_parameters', 'realignment_parameters'), ('outputspec.realigned_files', 'realigned_files'), ('outputspec.mask', 'mask_file')]), (preproc, modelspec, [('outputspec.highpassed_files', 'functional_runs'), ('outputspec.motion_parameters', 'realignment_parameters')]), (art, modelspec, [('outlier_files', 'outlier_files')]), (modelspec, modelfit, [('session_info', 'inputspec.session_info')]), (preproc, modelfit, [('outputspec.highpassed_files', 'inputspec.functional_data')]) ]) """ Reorder the copes so that now it combines across runs """ def sort_copes(files): numelements = len(files[0]) outfiles = [] for i in range(numelements): outfiles.insert(i, []) for j, elements in enumerate(files): outfiles[i].append(elements[i]) return outfiles def num_copes(files): return len(files) pickfirst = lambda x: x[0] wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst), 'flameo.mask_file')]), (modelfit, fixed_fx, [(('outputspec.copes', sort_copes), 'inputspec.copes'), ('outputspec.dof_file', 'inputspec.dof_files'), (('outputspec.varcopes', sort_copes), 'inputspec.varcopes'), (('outputspec.copes', num_copes), 'l2model.num_copes'), ]) ]) wf.connect(preproc, 'outputspec.mean', registration, 'inputspec.mean_image') wf.connect(datasource, 'anat', registration, 'inputspec.anatomical_image') registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz') def merge_files(copes, varcopes): out_files = [] splits = [] out_files.extend(copes) splits.append(len(copes)) out_files.extend(varcopes) splits.append(len(varcopes)) return out_files, splits mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes'], output_names=['out_files', 'splits'], function=merge_files), name='merge_files') wf.connect([(fixed_fx.get_node('outputspec'), mergefunc, [('copes', 'copes'), ('varcopes', 'varcopes'), ])]) wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files') def split_files(in_files, splits): copes = in_files[:splits[1]] varcopes = in_files[splits[1]:] return copes, varcopes splitfunc = pe.Node(niu.Function(input_names=['in_files', 'splits'], output_names=['copes', 'varcopes'], function=split_files), name='split_files') wf.connect(mergefunc, 'splits', splitfunc, 'splits') wf.connect(registration, 'outputspec.transformed_files', splitfunc, 'in_files') """ Connect to a datasink """ def get_subs(subject_id, conds, model_id, task_id): subs = [('_subject_id_%s_' % subject_id, '')] subs.append(('_model_id_%d' % model_id, 'model%03d' %model_id)) subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id)) subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp_warp', 'mean')) for i in range(len(conds)): subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1))) subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1))) subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1))) subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1))) subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1))) subs.append(('_warpall%d/cope1_warp_warp.' % i, 'cope%02d.' % (i + 1))) subs.append(('_warpall%d/varcope1_warp_warp.' % (len(conds) + i), 'varcope%02d.' % (i + 1))) return subs subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds', 'model_id', 'task_id'], output_names=['substitutions'], function=get_subs), name='subsgen') datasink = pe.Node(interface=nio.DataSink(), name="datasink") wf.connect(infosource, 'subject_id', datasink, 'container') wf.connect(infosource, 'subject_id', subsgen, 'subject_id') wf.connect(infosource, 'model_id', subsgen, 'model_id') wf.connect(infosource, 'task_id', subsgen, 'task_id') wf.connect(contrastgen, 'contrasts', subsgen, 'conds') wf.connect(subsgen, 'substitutions', datasink, 'substitutions') wf.connect([(fixed_fx.get_node('outputspec'), datasink, [('res4d', 'res4d'), ('copes', 'copes'), ('varcopes', 'varcopes'), ('zstats', 'zstats'), ('tstats', 'tstats')]) ]) wf.connect([(splitfunc, datasink, [('copes', 'copes.mni'), ('varcopes', 'varcopes.mni'), ])]) wf.connect(registration, 'outputspec.transformed_mean', datasink, 'mean.mni') """ Set processing parameters """ hpcutoff = 120. preproc.inputs.inputspec.fwhm = 6.0 gethighpass.inputs.hpcutoff = hpcutoff modelspec.inputs.high_pass_filter_cutoff = hpcutoff modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}} modelfit.inputs.inputspec.model_serial_correlations = True modelfit.inputs.inputspec.film_threshold = 1000 datasink.inputs.base_directory = output_dir return wf
def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, task_id=None, output_dir=None, subj_prefix='*', hpcutoff=120., use_derivatives=True, fwhm=6.0, subjects_dir=None, target=None): """Analyzes an open fmri dataset Parameters ---------- data_dir : str Path to the base data directory work_dir : str Nipype working directory (defaults to cwd) """ """ Load nipype workflows """ preproc = create_featreg_preproc(whichvol='first') modelfit = create_modelfit_workflow() fixed_fx = create_fixed_effects_flow() if subjects_dir: registration = create_fs_reg_workflow() else: registration = create_reg_workflow() """ Remove the plotting connection so that plot iterables don't propagate to the model stage """ preproc.disconnect( preproc.get_node('plot_motion'), 'out_file', preproc.get_node('outputspec'), 'motion_plots') """ Set up openfmri data specific components """ subjects = sorted([ path.split(os.path.sep)[-1] for path in glob(os.path.join(data_dir, subj_prefix)) ]) infosource = pe.Node( niu.IdentityInterface(fields=['subject_id', 'model_id', 'task_id']), name='infosource') if len(subject) == 0: infosource.iterables = [('subject_id', subjects), ('model_id', [model_id]), ('task_id', task_id)] else: infosource.iterables = [('subject_id', [ subjects[subjects.index(subj)] for subj in subject ]), ('model_id', [model_id]), ('task_id', task_id)] subjinfo = pe.Node( niu.Function( input_names=['subject_id', 'base_dir', 'task_id', 'model_id'], output_names=['run_id', 'conds', 'TR'], function=get_subjectinfo), name='subjectinfo') subjinfo.inputs.base_dir = data_dir """ Return data components as anat, bold and behav """ contrast_file = os.path.join(data_dir, 'models', 'model%03d' % model_id, 'task_contrasts.txt') has_contrast = os.path.exists(contrast_file) if has_contrast: datasource = pe.Node( nio.DataGrabber( infields=['subject_id', 'run_id', 'task_id', 'model_id'], outfields=['anat', 'bold', 'behav', 'contrasts']), name='datasource') else: datasource = pe.Node( nio.DataGrabber( infields=['subject_id', 'run_id', 'task_id', 'model_id'], outfields=['anat', 'bold', 'behav']), name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '*' if has_contrast: datasource.inputs.field_template = { 'anat': '%s/anatomy/T1_001.nii.gz', 'bold': '%s/BOLD/task%03d_r*/bold.nii.gz', 'behav': ('%s/model/model%03d/onsets/task%03d_' 'run%03d/cond*.txt'), 'contrasts': ('models/model%03d/' 'task_contrasts.txt') } datasource.inputs.template_args = { 'anat': [['subject_id']], 'bold': [['subject_id', 'task_id']], 'behav': [['subject_id', 'model_id', 'task_id', 'run_id']], 'contrasts': [['model_id']] } else: datasource.inputs.field_template = { 'anat': '%s/anatomy/T1_001.nii.gz', 'bold': '%s/BOLD/task%03d_r*/bold.nii.gz', 'behav': ('%s/model/model%03d/onsets/task%03d_' 'run%03d/cond*.txt') } datasource.inputs.template_args = { 'anat': [['subject_id']], 'bold': [['subject_id', 'task_id']], 'behav': [['subject_id', 'model_id', 'task_id', 'run_id']] } datasource.inputs.sort_filelist = True """ Create meta workflow """ wf = pe.Workflow(name='openfmri') wf.connect(infosource, 'subject_id', subjinfo, 'subject_id') wf.connect(infosource, 'model_id', subjinfo, 'model_id') wf.connect(infosource, 'task_id', subjinfo, 'task_id') wf.connect(infosource, 'subject_id', datasource, 'subject_id') wf.connect(infosource, 'model_id', datasource, 'model_id') wf.connect(infosource, 'task_id', datasource, 'task_id') wf.connect(subjinfo, 'run_id', datasource, 'run_id') wf.connect([ (datasource, preproc, [('bold', 'inputspec.func')]), ]) def get_highpass(TR, hpcutoff): return hpcutoff / (2. * TR) gethighpass = pe.Node( niu.Function( input_names=['TR', 'hpcutoff'], output_names=['highpass'], function=get_highpass), name='gethighpass') wf.connect(subjinfo, 'TR', gethighpass, 'TR') wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass') """ Setup a basic set of contrasts, a t-test per condition """ def get_contrasts(contrast_file, task_id, conds): import numpy as np import os contrast_def = [] if os.path.exists(contrast_file): with open(contrast_file, 'rt') as fp: contrast_def.extend([ np.array(row.split()) for row in fp.readlines() if row.strip() ]) contrasts = [] for row in contrast_def: if row[0] != 'task%03d' % task_id: continue con = [ row[1], 'T', ['cond%03d' % (i + 1) for i in range(len(conds))], row[2:].astype(float).tolist() ] contrasts.append(con) # add auto contrasts for each column for i, cond in enumerate(conds): con = [cond, 'T', ['cond%03d' % (i + 1)], [1]] contrasts.append(con) return contrasts contrastgen = pe.Node( niu.Function( input_names=['contrast_file', 'task_id', 'conds'], output_names=['contrasts'], function=get_contrasts), name='contrastgen') art = pe.MapNode( interface=ra.ArtifactDetect( use_differences=[True, False], use_norm=True, norm_threshold=1, zintensity_threshold=3, parameter_source='FSL', mask_type='file'), iterfield=['realigned_files', 'realignment_parameters', 'mask_file'], name="art") modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") modelspec.inputs.input_units = 'secs' def check_behav_list(behav, run_id, conds): import numpy as np num_conds = len(conds) if isinstance(behav, (str, bytes)): behav = [behav] behav_array = np.array(behav).flatten() num_elements = behav_array.shape[0] return behav_array.reshape(int(num_elements / num_conds), num_conds).tolist() reshape_behav = pe.Node( niu.Function( input_names=['behav', 'run_id', 'conds'], output_names=['behav'], function=check_behav_list), name='reshape_behav') wf.connect(subjinfo, 'TR', modelspec, 'time_repetition') wf.connect(datasource, 'behav', reshape_behav, 'behav') wf.connect(subjinfo, 'run_id', reshape_behav, 'run_id') wf.connect(subjinfo, 'conds', reshape_behav, 'conds') wf.connect(reshape_behav, 'behav', modelspec, 'event_files') wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval') wf.connect(subjinfo, 'conds', contrastgen, 'conds') if has_contrast: wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file') else: contrastgen.inputs.contrast_file = '' wf.connect(infosource, 'task_id', contrastgen, 'task_id') wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts') wf.connect([(preproc, art, [('outputspec.motion_parameters', 'realignment_parameters'), ('outputspec.realigned_files', 'realigned_files'), ('outputspec.mask', 'mask_file')]), (preproc, modelspec, [('outputspec.highpassed_files', 'functional_runs'), ('outputspec.motion_parameters', 'realignment_parameters')]), (art, modelspec, [('outlier_files', 'outlier_files')]), (modelspec, modelfit, [ ('session_info', 'inputspec.session_info') ]), (preproc, modelfit, [('outputspec.highpassed_files', 'inputspec.functional_data')])]) # Comute TSNR on realigned data regressing polynomials upto order 2 tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr') wf.connect(preproc, "outputspec.realigned_files", tsnr, "in_file") # Compute the median image across runs calc_median = Node(CalculateMedian(), name='median') wf.connect(tsnr, 'detrended_file', calc_median, 'in_files') """ Reorder the copes so that now it combines across runs """ def sort_copes(copes, varcopes, contrasts): import numpy as np if not isinstance(copes, list): copes = [copes] varcopes = [varcopes] num_copes = len(contrasts) n_runs = len(copes) all_copes = np.array(copes).flatten() all_varcopes = np.array(varcopes).flatten() outcopes = all_copes.reshape( int(len(all_copes) / num_copes), num_copes).T.tolist() outvarcopes = all_varcopes.reshape( int(len(all_varcopes) / num_copes), num_copes).T.tolist() return outcopes, outvarcopes, n_runs cope_sorter = pe.Node( niu.Function( input_names=['copes', 'varcopes', 'contrasts'], output_names=['copes', 'varcopes', 'n_runs'], function=sort_copes), name='cope_sorter') pickfirst = lambda x: x[0] wf.connect(contrastgen, 'contrasts', cope_sorter, 'contrasts') wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst), 'flameo.mask_file')]), (modelfit, cope_sorter, [('outputspec.copes', 'copes')]), (modelfit, cope_sorter, [('outputspec.varcopes', 'varcopes')]), (cope_sorter, fixed_fx, [('copes', 'inputspec.copes'), ('varcopes', 'inputspec.varcopes'), ('n_runs', 'l2model.num_copes')]), (modelfit, fixed_fx, [ ('outputspec.dof_file', 'inputspec.dof_files'), ])]) wf.connect(calc_median, 'median_file', registration, 'inputspec.mean_image') if subjects_dir: wf.connect(infosource, 'subject_id', registration, 'inputspec.subject_id') registration.inputs.inputspec.subjects_dir = subjects_dir registration.inputs.inputspec.target_image = fsl.Info.standard_image( 'MNI152_T1_2mm_brain.nii.gz') if target: registration.inputs.inputspec.target_image = target else: wf.connect(datasource, 'anat', registration, 'inputspec.anatomical_image') registration.inputs.inputspec.target_image = fsl.Info.standard_image( 'MNI152_T1_2mm.nii.gz') registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image( 'MNI152_T1_2mm_brain.nii.gz') registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm' def merge_files(copes, varcopes, zstats): out_files = [] splits = [] out_files.extend(copes) splits.append(len(copes)) out_files.extend(varcopes) splits.append(len(varcopes)) out_files.extend(zstats) splits.append(len(zstats)) return out_files, splits mergefunc = pe.Node( niu.Function( input_names=['copes', 'varcopes', 'zstats'], output_names=['out_files', 'splits'], function=merge_files), name='merge_files') wf.connect([(fixed_fx.get_node('outputspec'), mergefunc, [ ('copes', 'copes'), ('varcopes', 'varcopes'), ('zstats', 'zstats'), ])]) wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files') def split_files(in_files, splits): copes = in_files[:splits[0]] varcopes = in_files[splits[0]:(splits[0] + splits[1])] zstats = in_files[(splits[0] + splits[1]):] return copes, varcopes, zstats splitfunc = pe.Node( niu.Function( input_names=['in_files', 'splits'], output_names=['copes', 'varcopes', 'zstats'], function=split_files), name='split_files') wf.connect(mergefunc, 'splits', splitfunc, 'splits') wf.connect(registration, 'outputspec.transformed_files', splitfunc, 'in_files') if subjects_dir: get_roi_mean = pe.MapNode( fs.SegStats(default_color_table=True), iterfield=['in_file'], name='get_aparc_means') get_roi_mean.inputs.avgwf_txt_file = True wf.connect( fixed_fx.get_node('outputspec'), 'copes', get_roi_mean, 'in_file') wf.connect(registration, 'outputspec.aparc', get_roi_mean, 'segmentation_file') get_roi_tsnr = pe.MapNode( fs.SegStats(default_color_table=True), iterfield=['in_file'], name='get_aparc_tsnr') get_roi_tsnr.inputs.avgwf_txt_file = True wf.connect(tsnr, 'tsnr_file', get_roi_tsnr, 'in_file') wf.connect(registration, 'outputspec.aparc', get_roi_tsnr, 'segmentation_file') """ Connect to a datasink """ def get_subs(subject_id, conds, run_id, model_id, task_id): subs = [('_subject_id_%s_' % subject_id, '')] subs.append(('_model_id_%d' % model_id, 'model%03d' % model_id)) subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id)) subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp', 'mean')) subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt', 'affine')) for i in range(len(conds)): subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1))) subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1))) subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1))) subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1))) subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1))) subs.append(('_warpall%d/cope1_warp.' % i, 'cope%02d.' % (i + 1))) subs.append(('_warpall%d/varcope1_warp.' % (len(conds) + i), 'varcope%02d.' % (i + 1))) subs.append(('_warpall%d/zstat1_warp.' % (2 * len(conds) + i), 'zstat%02d.' % (i + 1))) subs.append(('_warpall%d/cope1_trans.' % i, 'cope%02d.' % (i + 1))) subs.append(('_warpall%d/varcope1_trans.' % (len(conds) + i), 'varcope%02d.' % (i + 1))) subs.append(('_warpall%d/zstat1_trans.' % (2 * len(conds) + i), 'zstat%02d.' % (i + 1))) subs.append(('__get_aparc_means%d/' % i, '/cope%02d_' % (i + 1))) for i, run_num in enumerate(run_id): subs.append(('__get_aparc_tsnr%d/' % i, '/run%02d_' % run_num)) subs.append(('__art%d/' % i, '/run%02d_' % run_num)) subs.append(('__dilatemask%d/' % i, '/run%02d_' % run_num)) subs.append(('__realign%d/' % i, '/run%02d_' % run_num)) subs.append(('__modelgen%d/' % i, '/run%02d_' % run_num)) subs.append(('/model%03d/task%03d/' % (model_id, task_id), '/')) subs.append(('/model%03d/task%03d_' % (model_id, task_id), '/')) subs.append(('_bold_dtype_mcf_bet_thresh_dil', '_mask')) subs.append(('_output_warped_image', '_anat2target')) subs.append(('median_flirt_brain_mask', 'median_brain_mask')) subs.append(('median_bbreg_brain_mask', 'median_brain_mask')) return subs subsgen = pe.Node( niu.Function( input_names=[ 'subject_id', 'conds', 'run_id', 'model_id', 'task_id' ], output_names=['substitutions'], function=get_subs), name='subsgen') wf.connect(subjinfo, 'run_id', subsgen, 'run_id') datasink = pe.Node(interface=nio.DataSink(), name="datasink") wf.connect(infosource, 'subject_id', datasink, 'container') wf.connect(infosource, 'subject_id', subsgen, 'subject_id') wf.connect(infosource, 'model_id', subsgen, 'model_id') wf.connect(infosource, 'task_id', subsgen, 'task_id') wf.connect(contrastgen, 'contrasts', subsgen, 'conds') wf.connect(subsgen, 'substitutions', datasink, 'substitutions') wf.connect([(fixed_fx.get_node('outputspec'), datasink, [('res4d', 'res4d'), ('copes', 'copes'), ('varcopes', 'varcopes'), ('zstats', 'zstats'), ('tstats', 'tstats')])]) wf.connect([(modelfit.get_node('modelgen'), datasink, [ ('design_cov', 'qa.model'), ('design_image', 'qa.model.@matrix_image'), ('design_file', 'qa.model.@matrix'), ])]) wf.connect([(preproc, datasink, [('outputspec.motion_parameters', 'qa.motion'), ('outputspec.motion_plots', 'qa.motion.plots'), ('outputspec.mask', 'qa.mask')])]) wf.connect(registration, 'outputspec.mean2anat_mask', datasink, 'qa.mask.mean2anat') wf.connect(art, 'norm_files', datasink, 'qa.art.@norm') wf.connect(art, 'intensity_files', datasink, 'qa.art.@intensity') wf.connect(art, 'outlier_files', datasink, 'qa.art.@outlier_files') wf.connect(registration, 'outputspec.anat2target', datasink, 'qa.anat2target') wf.connect(tsnr, 'tsnr_file', datasink, 'qa.tsnr.@map') if subjects_dir: wf.connect(registration, 'outputspec.min_cost_file', datasink, 'qa.mincost') wf.connect([(get_roi_tsnr, datasink, [('avgwf_txt_file', 'qa.tsnr'), ('summary_file', 'qa.tsnr.@summary')])]) wf.connect([(get_roi_mean, datasink, [('avgwf_txt_file', 'copes.roi'), ('summary_file', 'copes.roi.@summary')])]) wf.connect([(splitfunc, datasink, [ ('copes', 'copes.mni'), ('varcopes', 'varcopes.mni'), ('zstats', 'zstats.mni'), ])]) wf.connect(calc_median, 'median_file', datasink, 'mean') wf.connect(registration, 'outputspec.transformed_mean', datasink, 'mean.mni') wf.connect(registration, 'outputspec.func2anat_transform', datasink, 'xfm.mean2anat') wf.connect(registration, 'outputspec.anat2target_transform', datasink, 'xfm.anat2target') """ Set processing parameters """ preproc.inputs.inputspec.fwhm = fwhm gethighpass.inputs.hpcutoff = hpcutoff modelspec.inputs.high_pass_filter_cutoff = hpcutoff modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivatives}} modelfit.inputs.inputspec.model_serial_correlations = True modelfit.inputs.inputspec.film_threshold = 1000 datasink.inputs.base_directory = output_dir return wf
Preliminaries ------------- Setup any package specific configuration. The output file format for FSL routines is being set to compressed NIFTI. """ fsl.FSLCommand.set_default_output_type('NIFTI_GZ') level1_workflow = pe.Workflow(name='level1flow') preproc = create_featreg_preproc(whichvol='first') modelfit = create_modelfit_workflow() fixed_fx = create_fixed_effects_flow() """ Add artifact detection and model specification nodes between the preprocessing and modelfitting workflows. """ art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False], use_norm=True, norm_threshold=1, zintensity_threshold=3, parameter_source='FSL', mask_type='file'), iterfield=['realigned_files', 'realignment_parameters', 'mask_file'], name="art")
def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, work_dir=None): """Analyzes an open fmri dataset Parameters ---------- data_dir : str Path to the base data directory work_dir : str Nipype working directory (defaults to cwd) """ """ Load nipype workflows """ preproc = create_featreg_preproc(whichvol='first') modelfit = create_modelfit_workflow() fixed_fx = create_fixed_effects_flow() """ Remove the plotting connection so that plot iterables don't propagate to the model stage """ preproc.disconnect(preproc.get_node('plot_motion'), 'out_file', preproc.get_node('outputspec'), 'motion_plots') """ Set up openfmri data specific components """ subjects = [path.split(os.path.sep)[-1] for path in glob(os.path.join(data_dir, 'sub*'))] infosource = pe.Node(niu.IdentityInterface(fields=['subject_id', 'model_id']), name='infosource') if subject is None: infosource.iterables = [('subject_id', subjects), ('model_id', [model_id])] else: infosource.iterables = [('subject_id', [subjects[subjects.index(subject)]]), ('model_id', [model_id])] subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir', 'task_id', 'model_id'], output_names=['run_id', 'conds', 'TR'], function=get_subjectinfo), name='subjectinfo') subjinfo.inputs.base_dir = data_dir """ Return data components as anat, bold and behav """ datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id', 'model_id'], outfields=['anat', 'bold', 'behav']), name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '*' datasource.inputs.field_template = {'anat': '%s/anatomy/highres001.nii.gz', 'bold': '%s/BOLD/task001_r*/bold.nii.gz', 'behav': ('%s/model/model%03d/onsets/task001_' 'run%03d/cond*.txt')} datasource.inputs.template_args = {'anat': [['subject_id']], 'bold': [['subject_id']], 'behav': [['subject_id', 'model_id', 'run_id']]} datasource.inputs.sorted = True """ Create meta workflow """ wf = pe.Workflow(name='openfmri') wf.connect(infosource, 'subject_id', subjinfo, 'subject_id') wf.connect(infosource, 'model_id', subjinfo, 'model_id') wf.connect(infosource, 'subject_id', datasource, 'subject_id') wf.connect(infosource, 'model_id', datasource, 'model_id') wf.connect(subjinfo, 'run_id', datasource, 'run_id') wf.connect([(datasource, preproc, [('bold', 'inputspec.func')]), ]) def get_highpass(TR, hpcutoff): return hpcutoff / (2 * TR) gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'], output_names=['highpass'], function=get_highpass), name='gethighpass') wf.connect(subjinfo, 'TR', gethighpass, 'TR') wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass') """ Setup a basic set of contrasts, a t-test per condition """ def get_contrasts(base_dir, model_id, conds): import numpy as np import os contrast_file = os.path.join(base_dir, 'models', 'model%03d' % model_id, 'task_contrasts.txt') contrast_def = np.genfromtxt(contrast_file, dtype=object) contrasts = [] for row in contrast_def: con = [row[0], 'T', ['cond%03d' % i for i in range(len(conds))], row[1:].astype(float).tolist()] contrasts.append(con) return contrasts contrastgen = pe.Node(niu.Function(input_names=['base_dir', 'model_id', 'conds'], output_names=['contrasts'], function=get_contrasts), name='contrastgen') contrastgen.inputs.base_dir = data_dir art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False], use_norm=True, norm_threshold=1, zintensity_threshold=3, parameter_source='FSL', mask_type='file'), iterfield=['realigned_files', 'realignment_parameters', 'mask_file'], name="art") modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") modelspec.inputs.input_units = 'secs' wf.connect(subjinfo, 'TR', modelspec, 'time_repetition') wf.connect(datasource, 'behav', modelspec, 'event_files') wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval') wf.connect(subjinfo, 'conds', contrastgen, 'conds') wf.connect(infosource, 'model_id', contrastgen, 'model_id') wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts') wf.connect([(preproc, art, [('outputspec.motion_parameters', 'realignment_parameters'), ('outputspec.realigned_files', 'realigned_files'), ('outputspec.mask', 'mask_file')]), (preproc, modelspec, [('outputspec.highpassed_files', 'functional_runs'), ('outputspec.motion_parameters', 'realignment_parameters')]), (art, modelspec, [('outlier_files', 'outlier_files')]), (modelspec, modelfit, [('session_info', 'inputspec.session_info')]), (preproc, modelfit, [('outputspec.highpassed_files', 'inputspec.functional_data')]) ]) """ Reorder the copes so that now it combines across runs """ def sort_copes(files): numelements = len(files[0]) outfiles = [] for i in range(numelements): outfiles.insert(i, []) for j, elements in enumerate(files): outfiles[i].append(elements[i]) return outfiles def num_copes(files): return len(files) pickfirst = lambda x: x[0] wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst), 'flameo.mask_file')]), (modelfit, fixed_fx, [(('outputspec.copes', sort_copes), 'inputspec.copes'), ('outputspec.dof_file', 'inputspec.dof_files'), (('outputspec.varcopes', sort_copes), 'inputspec.varcopes'), (('outputspec.copes', num_copes), 'l2model.num_copes'), ]) ]) """ Connect to a datasink """ def get_subs(subject_id, conds): subs = [('_subject_id_%s/' % subject_id, '')] for i in range(len(conds)): subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1))) subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1))) subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1))) subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1))) subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1))) return subs subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds'], output_names=['substitutions'], function=get_subs), name='subsgen') datasink = pe.Node(interface=nio.DataSink(), name="datasink") wf.connect(infosource, 'subject_id', datasink, 'container') wf.connect(infosource, 'subject_id', subsgen, 'subject_id') wf.connect(subjinfo, 'conds', subsgen, 'conds') wf.connect(subsgen, 'substitutions', datasink, 'substitutions') wf.connect([(fixed_fx.get_node('outputspec'), datasink, [('res4d', 'res4d'), ('copes', 'copes'), ('varcopes', 'varcopes'), ('zstats', 'zstats'), ('tstats', 'tstats')]) ]) """ Set processing parameters """ hpcutoff = 120. subjinfo.inputs.task_id = 1 preproc.inputs.inputspec.fwhm = 6.0 gethighpass.inputs.hpcutoff = hpcutoff modelspec.inputs.high_pass_filter_cutoff = hpcutoff modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}} modelfit.inputs.inputspec.model_serial_correlations = True modelfit.inputs.inputspec.film_threshold = 1000 if work_dir is None: work_dir = os.path.join(os.getcwd(), 'working') wf.base_dir = work_dir datasink.inputs.base_directory = os.path.join(work_dir, 'output') wf.config['execution'] = dict(crashdump_dir=os.path.join(work_dir, 'crashdumps'), stop_on_first_crash=True) wf.run('MultiProc', plugin_args={'n_procs': 2})
def create_modelfit_workflow_bfsl(name='modelfit_workflow_bfsl'): inputspec = pe.Node(util.IdentityInterface(fields=['functional_runs', 'bases', 'bfsl_files', 'contrasts', 'interscan_interval', 'film_threshold', 'model_serial_correlations', 'highpass_filter', 'mask', 'realignment_parameters'],), name='inputspec') workflow = pe.Workflow(name=name) modelfit_workflow = create_modelfit_workflow() inputspec.inputs.bases = {'dgamma': {'derivs': True}} inputspec.inputs.film_threshold = 1000 inputspec.inputs.interscan_interval = 2.0 inputspec.inputs.model_serial_correlations = True inputspec.inputs.highpass_filter = 128 for field in ['bases', 'contrasts', 'film_threshold', 'interscan_interval', 'model_serial_correlations']: workflow.connect(inputspec, field, modelfit_workflow, 'inputspec.%s' % field) from nipype.algorithms.modelgen import SpecifyModel from nipype.interfaces import fsl specifymodel = pe.Node(SpecifyModel(), name='specifymodel') specifymodel.inputs.input_units = 'secs' def get_highpas_filter_cutoff(hz, tr): return float(hz) / (tr * 2) get_highpas_filter_cutoff_node = pe.Node(util.Function(function=get_highpas_filter_cutoff, input_names=['hz', 'tr'], output_names='cutoff'), name='get_highpas_filter_cutoff_node') workflow.connect(inputspec, 'interscan_interval', get_highpas_filter_cutoff_node, 'tr') workflow.connect(inputspec, 'highpass_filter', get_highpas_filter_cutoff_node, 'hz') workflow.connect(get_highpas_filter_cutoff_node, 'cutoff', specifymodel, 'high_pass_filter_cutoff') workflow.connect(inputspec, 'interscan_interval', specifymodel, 'time_repetition') workflow.connect(inputspec, 'bfsl_files', specifymodel, 'event_files') workflow.connect(inputspec, 'functional_runs', specifymodel, 'functional_runs') workflow.connect(inputspec, 'realignment_parameters', specifymodel, 'realignment_parameters') workflow.connect(specifymodel, 'session_info', modelfit_workflow, 'inputspec.session_info') workflow.connect(inputspec, 'functional_runs', modelfit_workflow, 'inputspec.functional_data') fixedfx = create_fixed_effects_flow() workflow.connect(inputspec, 'mask', fixedfx, 'flameo.mask_file') def num_copes(files): return len(files) def transpose_copes(copes): import numpy as np return np.array(copes).T.tolist() workflow.connect([(modelfit_workflow, fixedfx, [(('outputspec.copes', transpose_copes), 'inputspec.copes'), (('outputspec.varcopes', transpose_copes), 'inputspec.varcopes'), ('outputspec.dof_file', 'inputspec.dof_files'), (('outputspec.copes', num_copes), 'l2model.num_copes')])]) ztopval = pe.MapNode(interface=fsl.ImageMaths(op_string='-ztop', suffix='_pval'), nested=True, iterfield=['in_file'], name='ztop',) fdr_workflow = create_fdr_threshold_workflow() workflow.connect([ (fixedfx, ztopval, [('outputspec.zstats', 'in_file'),]), (fixedfx, fdr_workflow, [('outputspec.zstats', 'inputspec.z_stats'),]), (ztopval, fdr_workflow, [('out_file', 'inputspec.p_values'),]), (inputspec, fdr_workflow, [('mask', 'inputspec.mask'),]), ]) outputpsec = pe.Node(util.IdentityInterface(fields=['zstats', 'level2_copes', 'level2_varcopes', 'level2_tdof', 'thresholded_zstats']), name='outputspec') workflow.connect(fixedfx, 'outputspec.zstats', outputpsec, 'zstats') workflow.connect(fixedfx, 'outputspec.copes', outputpsec, 'level2_copes') workflow.connect(fixedfx, 'outputspec.varcopes', outputpsec, 'level2_varcopes') workflow.connect(fixedfx, 'flameo.tdof', outputpsec, 'level2_tdof') workflow.connect(fdr_workflow, 'outputspec.thresholded_z_stats', outputpsec, 'thresholded_z_stats') return workflow
def create_random_effects_workflow(name='randomfx'): inputspec = pe.Node(util.IdentityInterface(fields=['cope_files', 'varcope_files', 'tdof_files', 'mask_file', 'fdr_q']), name='inputspec') inputspec.inputs.fdr_q = 0.05 workflow = pe.Workflow(name=name) fixedfx_flow = create_fixed_effects_flow() def num_copes(files): return len(files) def listify(x): return [x] workflow.connect(inputspec, ('cope_files', listify), fixedfx_flow, 'inputspec.copes') workflow.connect(inputspec, ('varcope_files', listify), fixedfx_flow, 'inputspec.varcopes') workflow.connect(inputspec, ('varcope_files', num_copes), fixedfx_flow, 'l2model.num_copes') workflow.connect(inputspec, 'mask_file', fixedfx_flow, 'flameo.mask_file') fixedfx_flow.inputs.flameo.run_mode = 'flame1' fixedfx_flow.disconnect([(fixedfx_flow.get_node('inputspec'), fixedfx_flow.get_node('gendofvolume'), [('dof_files', 'dof_files')]), (fixedfx_flow.get_node('copemerge'), fixedfx_flow.get_node('gendofvolume'), [('merged_file', 'cope_files')]), (fixedfx_flow.get_node('gendofvolume'), fixedfx_flow.get_node('flameo'), [('dof_volume', 'dof_var_cope_file')])]) fixedfx_flow.remove_nodes([fixedfx_flow.get_node('gendofvolume')]) tdof_merge = pe.Node(interface=fsl.Merge(dimension='t'), name="tdof_merge") workflow.connect(inputspec, 'tdof_files', tdof_merge, 'in_files') workflow.connect(tdof_merge, 'merged_file', fixedfx_flow, 'flameo.dof_var_cope_file') ztopval = pe.MapNode(interface=fsl.ImageMaths(op_string='-ztop', suffix='_pval'), iterfield=['in_file'], nested=True, name='ztop',) fdr_workflow = create_fdr_threshold_workflow() workflow.connect([ (fixedfx_flow, ztopval, [('outputspec.zstats', 'in_file'),]), (fixedfx_flow, fdr_workflow, [('outputspec.zstats', 'inputspec.z_stats'),]), (ztopval, fdr_workflow, [('out_file', 'inputspec.p_values'),]), ]) workflow.connect(inputspec, 'mask_file', fdr_workflow, 'inputspec.mask') workflow.connect(inputspec, 'fdr_q', fdr_workflow, 'inputspec.q') cluster = pe.MapNode(fsl.Cluster(), iterfield=['in_file'], name='cluster') cluster.inputs.threshold = 2.0 cluster.inputs.out_threshold_file = True cluster.inputs.out_localmax_txt_file = True workflow.connect(fdr_workflow, 'outputspec.thresholded_z_stats', cluster, 'in_file') outputspec = pe.Node(util.IdentityInterface(fields=['zstats', 'thresholded_z_stats', 'txt_index_file']), name='outputspec') workflow.connect(fixedfx_flow, 'outputspec.zstats', outputspec, 'zstats') workflow.connect(fdr_workflow, 'outputspec.thresholded_z_stats', outputspec, 'thresholded_z_stats') workflow.connect(cluster, 'localmax_txt_file', outputspec, 'txt_index_file') return workflow
def create_workflow(out_label, contrasts_name, RegSpace): level2_workflow = pe.Workflow(name='level2flow') level2_workflow.base_dir = os.path.abspath('./workingdirs/level2flow/' + contrasts_name + '/' + RegSpace + '/level2/' + out_label) # =================================================================== # _____ _ # |_ _| | | # | | _ __ _ __ _ _| |_ # | | | '_ \| '_ \| | | | __| # _| |_| | | | |_) | |_| | |_ # |_____|_| |_| .__/ \__,_|\__| # | | # |_| # =================================================================== # ------------------ Specify variables inputnode = pe.Node(niu.IdentityInterface(fields=[ 'ref_funcmask', 'copes', 'dof_file', 'varcopes', ]), name="inputspec") # ------------------------------------------------------------------- # /~_ _ _ _ _. _ _ . _ _ |. _ _ # \_/(/_| |(/_| |(_ |_)||_)(/_||| |(/_ # | | # ------------------------------------------------------------------- """ Preliminaries ------------- Setup any package specific configuration. The output file format for FSL routines is being set to compressed NIFTI. """ fsl.FSLCommand.set_default_output_type('NIFTI_GZ') fixed_fx = fslflows.create_fixed_effects_flow() def sort_copes(files): """ Sort by copes and the runs, ie. [[cope1_run1, cope1_run2], [cope2_run1, cope2_run2]] """ assert files[0] is not str numcopes = len(files[0]) assert numcopes > 1 outfiles = [] for i in range(numcopes): outfiles.insert(i, []) for j, elements in enumerate(files): outfiles[i].append(elements[i]) return outfiles def num_copes(files): return len(files) # Level2 fixed effects level2_workflow.connect([(inputnode, fixed_fx, [ ('ref_funcmask', 'flameo.mask_file'), (('copes', sort_copes), 'inputspec.copes'), ('dof_file', 'inputspec.dof_files'), (('varcopes', sort_copes), 'inputspec.varcopes'), (('copes', num_copes), 'l2model.num_copes'), ])]) # =================================================================== # ____ _ _ # / __ \ | | | | # | | | |_ _| |_ _ __ _ _| |_ # | | | | | | | __| '_ \| | | | __| # | |__| | |_| | |_| |_) | |_| | |_ # \____/ \__,_|\__| .__/ \__,_|\__| # | | # |_| # =================================================================== # Datasink outputfiles = pe.Node( nio.DataSink(base_directory=ds_root, container='derivatives/modelfit/' + contrasts_name + '/' + RegSpace + '/level2/' + out_label, parameterization=True), name="output_files") # Use the following DataSink output substitutions outputfiles.inputs.substitutions = [ ('subject_id_', 'sub-'), ('session_id_', 'ses-'), ] # Put result into a BIDS-like format outputfiles.inputs.regexp_substitutions = [ (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1'), (r'_refsub([a-zA-Z0-9]+)', r''), ] level2_workflow.connect([ (fixed_fx, outputfiles, [ ('outputspec.res4d', 'fx.res4d'), ('outputspec.copes', 'fx.copes'), ('outputspec.varcopes', 'fx.varcopes'), ('outputspec.zstats', 'fx.zstats'), ('outputspec.tstats', 'fx.tstats'), ]), ]) return (level2_workflow)