def create_DARTEL_wf(subj_list, file_template, work_dir, out_dir): ''' Aligns all images to a template (average of all images), then warps images into MNI space (using an SPM tissue probability map, see https://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf, section 25.4). subj_list: list of subject IDs e.g. [sub-001, sub-002] file_template: string to identify all files to align (using glob). e.g. file_template = os.path.join(work_dir, 'pag_mask', '*_pag_mask.nii') The template can identify a larger set of files, and the subject_list will grab a subset. e.g. The template may grab sub-001, sub-002, sub-003 ... But if the subject_list only includes sub-001, then only sub-001 will be used. This means the template can overgeneralize, but specific subjects can be easily excluded (e.g. for movement) work_dir: string, denoting path to working directory. out_dir: string, denoting output directory (results saved to work directory and output) ''' import nibabel as nib import numpy as np from nipype.interfaces.spm.preprocess import DARTEL, CreateWarped from nipype.interfaces.io import DataSink import nipype.pipeline.engine as pe import os from jtnipyutil.util import files_from_template # set up workflow. DARTEL_wf = pe.Workflow(name='DARTEL_wf') DARTEL_wf.base_dir = work_dir # get images images = files_from_template(subj_list, file_template) # set up DARTEL. dartel = pe.Node(interface=DARTEL(), name='dartel') dartel.inputs.image_files = [images] dartel_warp = pe.Node(interface=CreateWarped(), name='dartel_warp') dartel_warp.inputs.image_files = images # warp_data.inputs.flowfield_files = # from inputspec ################## Setup datasink. sinker = pe.Node(DataSink(parameterization=True), name='sinker') sinker.inputs.base_directory = out_dir DARTEL_wf.connect([ (dartel, dartel_warp, [('dartel_flow_fields', 'flowfield_files')]), (dartel, sinker, [('final_template_file', 'avg_template'), ('template_files', 'avg_template.@template_stages'), ('dartel_flow_fields', 'dartel_flow')]), (dartel_warp, sinker, [('warped_files', 'warped_PAG')]) ]) return DARTEL_wf
def setup_DARTEL_warp_wf(subj_list, data_template, warp_template, work_dir, out_dir): ''' subj_list: list of strings for each subject e.g. ['sub-001', 'sub-002', 'sub-003'] data_template: string to identify all data files (using glob). e.g. template = '/home/neuro/data/rest1_AROMA/nosmooth/sub-*/model/sub-*/_modelestimate0/res4d.nii.gz' The template can identify a larger set of files, and the subject_list will grab a subset. e.g. The template may grab sub-001, sub-002, sub-003 ... But if the subject_list only includes sub-001, then only sub-001 will be used. This means the template can overgeneralize, but specific subjects can be easily excluded (e.g. for movement) warp_template: string to identify all dartel flowfield files (using glob). same as above. Dartel flowfield files are made by create_DARTEL_wf, also see jtnipyutil.fsmap.make_PAG_masks, and jtnipyutil.fsmap.create_aqueduct_template work_dir: string naming directory to store work. out_dir: string naming directory for output. ''' import os import nibabel as nib import numpy as np import nipype.pipeline.engine as pe from nipype import IdentityInterface from nipype.interfaces.io import DataSink from nipype.interfaces.utility.wrappers import Function from nipype.interfaces.spm.preprocess import CreateWarped from jtnipyutil.util import files_from_template # create working directory if necessary. if not os.path.isdir(work_dir): os.makedirs(work_dir) if not os.path.isdir(out_dir): os.makedirs(out_dir) # set up data warp workflow apply_warp_wf = pe.Workflow(name='apply_warp_wf') apply_warp_wf.base_dir = work_dir # set up file lists inputspec = pe.Node(IdentityInterface(fields=['file_list', 'warp_list']), name='inputspec') inputspec.inputs.file_list = files_from_template(subj_list, data_template) inputspec.inputs.warp_list = files_from_template(subj_list, warp_template) # rename files, as names are often indistinguishable (e.g. res4d.nii.gz) def rename_list(in_list): import nibabel as nib import os out_list = [] for file in in_list: file_in = nib.load(file) nib.save(file_in, os.path.join(os.getcwd(), '_'.join(file.split('/')[-3:]))) out_list.append( os.path.join(os.getcwd(), '_'.join(file.split('/')[-3:]))) return out_list rename = pe.Node(Function(input_names=['in_list'], output_names=['out_list'], function=rename_list), name='rename') # dartel warping node. warp_data = pe.Node(interface=CreateWarped(), name='warp_data') # warp_data.inputs.image_files = # from inputspec OR gunzip # warp_data.inputs.flowfield_files = # from inputspec sinker = pe.Node(DataSink(), name='sinker') sinker.inputs.base_directory = out_dir # check if unzipping is necessary. apply_warp_wf.connect([ (inputspec, rename, [('file_list', 'in_list')]), (inputspec, warp_data, [('warp_list', 'flowfield_files')]), (warp_data, sinker, [('warped_files', 'warped_files')]) ]) if any('nii.gz' in file for file in files_from_template(subj_list, data_template)): from nipype.algorithms.misc import Gunzip gunzip = pe.MapNode(interface=Gunzip(), name='gunzip', iterfield=['in_file']) apply_warp_wf.connect([(rename, gunzip, [('out_list', 'in_file')]), (gunzip, warp_data, [('out_file', 'image_files') ])]) else: apply_warp_wf.connect([(rename, warp_data, [('out_list', 'image_files') ])]) return apply_warp_wf