def tourette_reconall(population, data_dir, freesurfer_dir): for subject in population: #subject = population[subject_index] print '========================================================================================' print ' Runnning FREESURFER reconall on subject %s' % subject print '========================================================================================' brain = os.path.join(data_dir, subject, 'NIFTI', 'MP2RAGE_BRAIN.nii') if os.path.isfile(brain): if os.path.isfile( os.path.join(freesurfer_dir, subject, 'mri', 'aparc.a2009s+aseg.mgz')): print 'Brain already segmented......... moving on' print 'check data here ---> %s' % (os.path.join( freesurfer_dir, subject)) else: fs_subdir = os.path.join(freesurfer_dir, subject) print 'recon all not complete.. deleting incomplete fs_subdir' os.system('rm -rf %s' % fs_subdir) print 'Running recon-all' '========================= ' ' Freesurfer Reconall ' '========================= ' autorecon1 = fs.ReconAll() autorecon1.plugin_args = { 'submit_specs': 'request_memory = 4000' } autorecon1.inputs.T1_files = brain autorecon1.inputs.directive = "autorecon1" autorecon1.inputs.args = "-noskullstrip" #####autorecon1._interface._can_resume = False autorecon1.inputs.subject_id = subject autorecon1.inputs.subjects_dir = freesurfer_dir autorecon1.run() os.symlink( os.path.join(freesurfer_dir, subject, "mri", "T1.mgz"), os.path.join(freesurfer_dir, subject, "mri", "brainmask.auto.mgz")) os.symlink( os.path.join(freesurfer_dir, subject, "mri", "brainmask.auto.mgz"), os.path.join(freesurfer_dir, subject, "mri", "brainmask.mgz")) autorecon_resume = fs.ReconAll() autorecon_resume.plugin_args = { 'submit_specs': 'request_memory = 4000' } autorecon_resume.inputs.args = "-no-isrunning" autorecon_resume.inputs.subject_id = subject autorecon_resume.inputs.subjects_dir = freesurfer_dir autorecon_resume.run() else: print 'Deskull brain before and then come back' raise ValueError( 'MP2RAGE_BRAIN.nii file for subject %s does not exist')
def make_freesurfer(self): # Ref: http://nipype.readthedocs.io/en/1.0.4/interfaces/generated/interfaces.freesurfer/preprocess.html#reconall fs_recon1 = Node(interface=fs.ReconAll(directive='autorecon1', mris_inflate='-n 15', hires=True, mprage=True, openmp=self.omp_nthreads), name='fs_recon1', n_procs=self.omp_nthreads) fs_mriconv = Node(interface=fs.MRIConvert(out_type='mgz'), name='fs_mriconv') fs_vol2vol = Node(interface=fs.ApplyVolTransform(mni_152_reg=True), name='fs_vol2vol') fs_mrimask = Node(interface=fs.ApplyMask(), name='fs_mrimask') fs_recon2 = Node(interface=fs.ReconAll(directive='autorecon2', hires=True, mprage=True, hippocampal_subfields_T1=False, openmp=self.omp_nthreads), name='fs_recon2', n_procs=self.omp_nthreads) fs_recon3 = Node(interface=fs.ReconAll(directive='autorecon3', hires=True, mprage=True, hippocampal_subfields_T1=False, openmp=self.omp_nthreads), name='fs_recon3', n_procs=self.omp_nthreads) copy_brainmask = Node(Function(['in_file', 'fs_dir'], ['fs_dir'], self.copy_mask), name='copy_brainmask') segment_hp = Node(interface=SegmentHA_T1(), name='segment_hp') freesurfer = Workflow(name='freesurfer', base_dir=self.temp_dir) freesurfer.connect(fs_recon1, 'T1', fs_vol2vol, 'target_file') freesurfer.connect(fs_mriconv, 'out_file', fs_vol2vol, 'source_file') freesurfer.connect(fs_recon1, 'T1', fs_mrimask, 'in_file') freesurfer.connect(fs_vol2vol, 'transformed_file', fs_mrimask, 'mask_file') freesurfer.connect(fs_mrimask, 'out_file', copy_brainmask, 'in_file') freesurfer.connect(fs_recon1, 'subjects_dir', copy_brainmask, 'fs_dir') freesurfer.connect(copy_brainmask, 'fs_dir', fs_recon2, 'subjects_dir') freesurfer.connect(fs_recon2, 'subjects_dir', fs_recon3, 'subjects_dir') freesurfer.connect(fs_recon3, 'subjects_dir', segment_hp, 'subjects_dir') return freesurfer
def _generate_segment(self): if not isdefined(self.inputs.subjects_dir): freesurfer_status = "Not run" else: recon = fs.ReconAll( subjects_dir=self.inputs.subjects_dir, subject_id=self.inputs.subject_id, T1_files=self.inputs.t1w, flags="-noskullstrip", ) if recon.cmdline.startswith("echo"): freesurfer_status = "Pre-existing directory" else: freesurfer_status = "Run by sMRIPrep" t2w_seg = "" if self.inputs.t2w: t2w_seg = "(+ {:d} T2-weighted)".format(len(self.inputs.t2w)) output_spaces = self.inputs.output_spaces if not isdefined(output_spaces): output_spaces = "<none given>" else: output_spaces = ", ".join(output_spaces) return SUBJECT_TEMPLATE.format( subject_id=self.inputs.subject_id, n_t1s=len(self.inputs.t1w), t2w=t2w_seg, output_spaces=output_spaces, freesurfer_status=freesurfer_status, )
def _generate_segment(self): if isdefined(self.inputs.template): template = "\t\t<li>Volume standard spaces: {}</li>\n".format( ', '.join(self.inputs.template) if isinstance( self.inputs.template, list) else self.inputs.template) if not isdefined(self.inputs.subjects_dir): freesurfer_status = 'Not run' else: recon = fs.ReconAll(subjects_dir=self.inputs.subjects_dir, subject_id=self.inputs.subject_id, T1_files=self.inputs.t1w, flags='-noskullstrip') if recon.cmdline.startswith('echo'): freesurfer_status = 'Pre-existing directory' else: freesurfer_status = 'Run by sMRIPrep' t2w_seg = '' if self.inputs.t2w: t2w_seg = '(+ {:d} T2-weighted)'.format(len(self.inputs.t2w)) return SUBJECT_TEMPLATE.format(subject_id=self.inputs.subject_id, n_t1s=len(self.inputs.t1w), t2w=t2w_seg, template=template, fs_spaces=', '.join( self.inputs.fs_spaces), freesurfer_status=freesurfer_status)
def create_reconall_pipeline(name='reconall'): reconall=Workflow(name='reconall') #inputnode inputnode=Node(util.IdentityInterface(fields=['anat', 'fs_subjects_dir', 'fs_subject_id' ]), name='inputnode') outputnode=Node(util.IdentityInterface(fields=['fs_subjects_dir', 'fs_subject_id']), name='outputnode') def rename_subject_for_fu(input_id): output_id=input_id+"_fu" return output_id #modify subject name so it can be saved in the same folder as other LIFE- freesurfer data rename=Node(util.Function(input_names=['input_id'], output_names=['output_id'], function = rename_subject_for_fu), name="rename") # run reconall recon_all = Node(fs.ReconAll(args='-all -hippo-subfields -no-isrunning', openmp=24), #FS version 6.0: -hippocampal-subfields-T1, version 5.3.. -hippo-subfields name="recon_all") #recon_all.inputs.directive= 'autorecon2-wm' # -autorecon3 recon_all.plugin_args={'submit_specs': 'request_memory = 9000'} reconall.connect([ (inputnode, rename, [('fs_subject_id', 'input_id')]), (rename, recon_all, [('output_id', 'subject_id')]), (inputnode, recon_all, [('fs_subjects_dir', 'subjects_dir'), ('anat', 'T1_files')]), (recon_all, outputnode, [('subject_id', 'fs_subject_id'), ('subjects_dir', 'fs_subjects_dir')]) ]) return reconall
def create_reconall_pipeline(name='reconall_wf'): reconall_wf = Workflow(name='reconall_wf') #inputnode inputnode = Node(util.IdentityInterface( fields=['anat', 'fs_subjects_dir', 'fs_subject_id']), name='inputnode') outputnode = Node( util.IdentityInterface(fields=['fs_subjects_dir', 'fs_subject_id']), name='outputnode') # run reconall reconall = Node( fs.ReconAll(args='-all -no-isrunning', openmp=8), #subfield segmentation after recon-all name="reconall") #recon_all.inputs.directive= 'autorecon2-wm' # -autorecon3 reconall_wf.plugin_args = {'submit_specs': 'request_memory = 9000'} reconall_wf.connect([ (inputnode, reconall, [('fs_subject_id', 'subject_id')]), (inputnode, reconall, [('fs_subjects_dir', 'subjects_dir'), ('anat', 'T1_files')]), (reconall, outputnode, [('subject_id', 'fs_subject_id'), ('subjects_dir', 'fs_subjects_dir')]) ]) return reconall_wf
def _generate_segment(self): if not isdefined(self.inputs.subjects_dir): freesurfer_status = 'Not run' else: recon = fs.ReconAll(subjects_dir=self.inputs.subjects_dir, subject_id=self.inputs.subject_id, T1_files=self.inputs.t1w, flags='-noskullstrip') if recon.cmdline.startswith('echo'): freesurfer_status = 'Pre-existing directory' else: freesurfer_status = 'Run by dMRIPrep' t2w_seg = '' if self.inputs.t2w: t2w_seg = '(+ {:d} T2-weighted)'.format(len(self.inputs.t2w)) dwi_files = self.inputs.dwi if isdefined(self.inputs.dwi) else [] dwi_files = [s[0] if isinstance(s, list) else s for s in dwi_files] return SUBJECT_TEMPLATE.format( subject_id=self.inputs.subject_id, n_t1s=len(self.inputs.t1w), t2w=t2w_seg, n_dwi=len(dwi_files), std_spaces=', '.join(self.inputs.std_spaces), nstd_spaces=', '.join(self.inputs.nstd_spaces), freesurfer_status=freesurfer_status)
def create_reconall_pipeline(name='reconall'): reconall = Workflow(name='reconall') #inputnode inputnode = Node(util.IdentityInterface( fields=['anat', 'fs_subjects_dir', 'fs_subject_id']), name='inputnode') outputnode = Node( util.IdentityInterface(fields=['fs_subjects_dir', 'fs_subject_id']), name='outputnode') # run reconall recon_all = Node(fs.ReconAll(args='-nuiterations 7 -no-isrunning'), name="recon_all") recon_all.plugin_args = {'submit_specs': 'request_memory = 9000'} # function to replace / in subject id string with a _ def sub_id(sub_id): return sub_id.replace('/', '_') reconall.connect([ (inputnode, recon_all, [('fs_subjects_dir', 'subjects_dir'), ('anat', 'T1_files'), (('fs_subject_id', sub_id), 'subject_id')]), (recon_all, outputnode, [('subject_id', 'fs_subject_id'), ('subjects_dir', 'fs_subjects_dir')]) ]) return reconall
def create_align_to_anatomy_workflow(name='align_to_anatomy', format_string='inplane_to_anatomy'): align_to_anatomy = pe.Workflow(name=name) inputs = pe.Node(interface=util.IdentityInterface( fields=['inplane_file', 'anatomy_file']), name='inputs') strip = pe.Node(interface=fs.ReconAll(), name='strip') #FIXME: reconall interface barfs if rerun strip.inputs.directive = 'autorecon1' strip.inputs.flags = '-nowsgcaatlas' register = pe.Node(interface=fs.RobustRegister(), name='register') register.inputs.auto_sens = True #register.inputs.init_orient = True #FIXME: disabled due to bug in binary convert_xfm = pe.Node(interface=nmutil.LtaToXfm(), name='convert_xfm') rename_xfm = pe.Node(interface=util.Rename(format_string), name='rename_xfm') rename_xfm.inputs.keep_ext = True outputs = pe.Node( interface=util.IdentityInterface(fields=['xfm_file', 'strip_file']), name='outputs') align_to_anatomy.connect(inputs, 'inplane_file', strip, 'T1_files') align_to_anatomy.connect(strip, 'brainmask', register, 'source_file') align_to_anatomy.connect(inputs, 'anatomy_file', register, 'target_file') align_to_anatomy.connect(register, 'out_reg_file', convert_xfm, 'in_file') align_to_anatomy.connect(convert_xfm, 'out_file', rename_xfm, 'in_file') align_to_anatomy.connect(rename_xfm, 'out_file', outputs, 'xfm_file') align_to_anatomy.connect(strip, 'brainmask', outputs, 'strip_file') return align_to_anatomy
def create_reconall_pipeline(name='reconall'): reconall = Workflow(name='reconall') #inputnode inputnode = Node(util.IdentityInterface( fields=['anat', 'fs_subjects_dir', 'fs_subject_id']), name='inputnode') outputnode = Node( util.IdentityInterface(fields=['fs_subjects_dir', 'fs_subject_id']), name='outputnode') # run reconall recon_all = Node( fs.ReconAll( args='-all -hippo-subfields -no-isrunning' ), #for RSV152 took out s because of preprocessing with version 6.0 name="recon_all") #recon_all.inputs.directive= 'autorecon2-wm' # -autorecon3 recon_all.plugin_args = {'submit_specs': 'request_memory = 9000'} # function to replace / in subject id string with a _ def sub_id(sub_id): return sub_id.replace('/', '_') reconall.connect([ (inputnode, recon_all, [('fs_subjects_dir', 'subjects_dir'), ('anat', 'T1_files'), (('fs_subject_id', sub_id), 'subject_id')]), (recon_all, outputnode, [('subject_id', 'fs_subject_id'), ('subjects_dir', 'fs_subjects_dir')]) ]) return reconall
def _generate_segment(self): if not isdefined(self.inputs.subjects_dir): freesurfer_status = 'Not run' else: recon = fs.ReconAll(subjects_dir=self.inputs.subjects_dir, subject_id=self.inputs.subject_id, T1_files=self.inputs.t1w, flags='-noskullstrip') if recon.cmdline.startswith('echo'): freesurfer_status = 'Pre-existing directory' else: freesurfer_status = 'Run by qsiprep' output_spaces = [ self.inputs.template if space == 'template' else space for space in self.inputs.output_spaces ] t2w_seg = '' if self.inputs.t2w: t2w_seg = '(+ {:d} T2-weighted)'.format(len(self.inputs.t2w)) # Add text for how the dwis are grouped n_dwis = 0 n_outputs = 0 groupings = '' if isdefined(self.inputs.dwi_groupings): for output_fname, group_info in self.inputs.dwi_groupings.items(): n_outputs += 1 files_desc = [] files_desc.append( '\t\t\t<li>Scan group: %s (PE Dir %s)</li><ul>' % (output_fname, group_info['dwi_series_pedir'])) files_desc.append('\t\t\t\t<li>DWI Files: </li>') for dwi_file in group_info['dwi_series']: files_desc.append("\t\t\t\t\t<li> %s </li>" % dwi_file) n_dwis += 1 fieldmap_type = group_info['fieldmap_info']['suffix'] if fieldmap_type is not None: files_desc.append('\t\t\t\t<li>Fieldmap type: %s </li>' % fieldmap_type) for key, value in group_info['fieldmap_info'].items(): files_desc.append("\t\t\t\t\t<li> %s: %s </li>" % (key, str(value))) n_dwis += 1 files_desc.append("</ul>") groupings += GROUPING_TEMPLATE.format( output_name=output_fname, input_files='\n'.join(files_desc)) return SUBJECT_TEMPLATE.format(subject_id=self.inputs.subject_id, n_t1s=len(self.inputs.t1w), t2w=t2w_seg, n_dwis=n_dwis, n_outputs=n_outputs, groupings=groupings, output_spaces=', '.join(output_spaces), freesurfer_status=freesurfer_status)
def _generate_segment(self): BIDS_NAME = re.compile(r'^(.*\/)?' '(?P<subject_id>sub-[a-zA-Z0-9]+)' '(_(?P<session_id>ses-[a-zA-Z0-9]+))?' '(_(?P<task_id>task-[a-zA-Z0-9]+))?' '(_(?P<acq_id>acq-[a-zA-Z0-9]+))?' '(_(?P<rec_id>rec-[a-zA-Z0-9]+))?' '(_(?P<run_id>run-[a-zA-Z0-9]+))?') if not isdefined(self.inputs.subjects_dir): freesurfer_status = 'Not run' else: recon = fs.ReconAll(subjects_dir=self.inputs.subjects_dir, subject_id=self.inputs.subject_id, T1_files=self.inputs.t1w, flags='-noskullstrip') if recon.cmdline.startswith('echo'): freesurfer_status = 'Pre-existing directory' else: freesurfer_status = 'Run by fMRIPrep' t2w_seg = '' if self.inputs.t2w: t2w_seg = '(+ {:d} T2-weighted)'.format(len(self.inputs.t2w)) # Add list of tasks with number of runs bold_series = self.inputs.bold if isdefined(self.inputs.bold) else [] bold_series = [s[0] if isinstance(s, list) else s for s in bold_series] counts = Counter( BIDS_NAME.search(series).groupdict()['task_id'][5:] for series in bold_series) tasks = '' if counts: header = '\t\t<ul class="elem-desc">' footer = '\t\t</ul>' lines = [ '\t\t\t<li>Task: {task_id} ({n_runs:d} run{s})</li>'.format( task_id=task_id, n_runs=n_runs, s='' if n_runs == 1 else 's') for task_id, n_runs in sorted(counts.items()) ] tasks = '\n'.join([header] + lines + [footer]) return SUBJECT_TEMPLATE.format( subject_id=self.inputs.subject_id, n_t1s=len(self.inputs.t1w), t2w=t2w_seg, n_bold=len(bold_series), tasks=tasks, std_spaces=', '.join(self.inputs.std_spaces), nstd_spaces=', '.join(self.inputs.nstd_spaces), freesurfer_status=freesurfer_status)
def create_workflow(self, flow, inputnode, outputnode): if self.config.seg_tool == "Freesurfer": # Converting to .mgz format fs_mriconvert = pe.Node(interface=fs.MRIConvert(out_type="mgz", out_file="T1.mgz"), name="mgz_convert") if self.config.make_isotropic: fs_mriconvert.inputs.vox_size = ( self.config.isotropic_vox_size, self.config.isotropic_vox_size, self.config.isotropic_vox_size) fs_mriconvert.inputs.resample_type = self.config.isotropic_interpolation rename = pe.Node(util.Rename(), name="copy_orig") orig_dir = os.path.join(self.config.freesurfer_subject_id, "mri", "orig") if not os.path.exists(orig_dir): os.makedirs(orig_dir) print "Folder not existing; %s created!" % orig_dir rename.inputs.format_string = os.path.join(orig_dir, "001.mgz") # ReconAll => named outputnode as we don't want to select a specific output.... fs_reconall = pe.Node(interface=fs.ReconAll( flags='-no-isrunning -parallel -openmp {}'.format( self.config.fs_number_of_cores)), name="reconall") fs_reconall.inputs.directive = 'all' #fs_reconall.inputs.args = self.config.freesurfer_args #fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set in cmp/pipelines/diffusion/diffusion.py fs_reconall.inputs.subjects_dir = self.config.freesurfer_subjects_dir # fs_reconall.inputs.hippocampal_subfields_T1 = self.config.segment_hippocampal_subfields # fs_reconall.inputs.brainstem = self.config.segment_brainstem def isavailable(file): print "T1 is available" return file flow.connect([ (inputnode, fs_mriconvert, [(('T1', isavailable), 'in_file')]), (fs_mriconvert, rename, [('out_file', 'in_file')]), (rename, fs_reconall, [(("out_file", extract_base_directory), "subject_id")]), (fs_reconall, outputnode, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id')]), ])
def test_reconall(): input_map = dict( T1_files=dict(argstr='-i %s...', ), args=dict(argstr='%s', ), directive=dict(argstr='-%s', ), environ=dict(), flags=dict(argstr='%s', ), hemi=dict(), subject_id=dict(argstr='-subjid %s', ), subjects_dir=dict(argstr='-sd %s', ), ) instance = freesurfer.ReconAll() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(instance.inputs.traits()[key], metakey), value, "key = %s" % key
def create_workflow(self, flow, inputnode, outputnode): if self.config.seg_tool == "Freesurfer": if self.config.use_existing_freesurfer_data == False: # Converting to .mgz format fs_mriconvert = pe.Node(interface=fs.MRIConvert( out_type="mgz", out_file="T1.mgz"), name="mgz_convert") rename = pe.Node(util.Rename(), name="copy_orig") orig_dir = os.path.join(self.config.freesurfer_subject_id, "mri", "orig") if not os.path.exists(orig_dir): os.makedirs(orig_dir) print "Folder not existing; %s created!" % orig_dir rename.inputs.format_string = os.path.join(orig_dir, "001.mgz") # ReconAll => named outputnode as we don't want to select a specific output.... fs_reconall = pe.Node( interface=fs.ReconAll(flags='-no-isrunning'), name="reconall") fs_reconall.inputs.args = self.config.freesurfer_args #fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set in cmp/pipelines/diffusion/diffusion.py fs_reconall.inputs.subjects_dir = self.config.freesurfer_subjects_dir def isavailable(file): print "T1 is available" return file flow.connect([ (inputnode, fs_mriconvert, [(('T1', isavailable), 'in_file')]), (fs_mriconvert, rename, [('out_file', 'in_file')]), (rename, fs_reconall, [(("out_file", extract_base_directory), "subject_id")]), (fs_reconall, outputnode, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id')]), ]) else: outputnode.inputs.subjects_dir = self.config.freesurfer_subjects_dir outputnode.inputs.subject_id = self.config.freesurfer_subject_id elif self.config.seg_tool == "Custom segmentation": outputnode.inputs.custom_wm_mask = self.config.white_matter_mask
def _generate_segment(self): if not isdefined(self.inputs.subjects_dir): freesurfer_status = 'Not run' else: recon = fs.ReconAll(subjects_dir=self.inputs.subjects_dir, subject_id=self.inputs.subject_id, T1_files=self.inputs.t1w, flags='-noskullstrip') if recon.cmdline.startswith('echo'): freesurfer_status = 'Pre-existing directory' else: freesurfer_status = 'Run by sMRIPrep' t2w_seg = '' if self.inputs.t2w: t2w_seg = '(+ {:d} T2-weighted)'.format(len(self.inputs.t2w)) return SUBJECT_TEMPLATE.format(subject_id=self.inputs.subject_id, n_t1s=len(self.inputs.t1w), t2w=t2w_seg, output_spaces=', '.join(self.inputs.output_spaces), freesurfer_status=freesurfer_status)
def nipype_reconall(t1path, t2path=None): splitpath = t1path.split(os.sep) # Reorientation using FSL reodir = os.path.join(BIDS_DATA_DIR, "derivatives", "reorient_{0}".format(splitpath[-3]), splitpath[-4]) if not os.path.isdir(reodir): os.makedirs(reodir) reopath = [] for path in (t1path, t2path): if path is not None: reopath.append(os.path.join(reodir, os.path.basename(path))) else: reopath.append(None) continue reorient = fsl.Reorient2Std(in_file=path, out_file=reopath[-1]) if PROCESS: reorient.run() t1path, t2path = reopath # Segmentation using FreeSurfer fsdir = os.path.join(BIDS_DATA_DIR, "derivatives", "freesurfer_{0}".format(splitpath[-3])) if not os.path.isdir(fsdir): os.makedirs(fsdir) reconall = freesurfer.ReconAll(subject_id=splitpath[-4], directive="all", subjects_dir=fsdir, T1_files=t1path) if t2path is not None: reconall.inputs.T2_file = t2path reconall.inputs.use_T2 = True if PROCESS: reconall.run() return reconall.cmdline
def cortical_recon(self, filepath=None): if filepath == None: filepath = self._data_dir freesurfer.ReconAll(filepath)
def create_skullstripped_recon_flow(name="skullstripped_recon_all"): """Performs recon-all on voulmes that are already skull stripped. FreeSurfer failes to perform skullstrippig on some volumes (especially MP2RAGE). This can be avoided by doing skullstripping before runnig recon-all (using for example SPECTRE algorithm) Example ------- >>> from nipype.workflows.smri.freesurfer import create_skullstripped_recon_flow >>> recon_flow = create_skullstripped_recon_flow() >>> recon_flow.inputs.inputspec.subject_id = 'subj1' >>> recon_flow.inputs.inputspec.T1_files = 'T1.nii.gz' >>> recon_flow.run() # doctest: +SKIP Inputs:: inputspec.T1_files : skullstripped T1_files (mandatory) inputspec.subject_id : freesurfer subject id (optional) inputspec.subjects_dir : freesurfer subjects directory (optional) Outputs:: outputspec.subject_id : freesurfer subject id outputspec.subjects_dir : freesurfer subjects directory """ wf = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['subject_id', 'subjects_dir', 'T1_files']), name='inputspec') autorecon1 = pe.Node(fs.ReconAll(), name="autorecon1") autorecon1.plugin_args={'submit_specs': 'request_memory = 2500'} autorecon1.inputs.directive = "autorecon1" autorecon1.inputs.args = "-noskullstrip" autorecon1._interface._can_resume = False wf.connect(inputnode, "T1_files", autorecon1, "T1_files") wf.connect(inputnode, "subjects_dir", autorecon1, "subjects_dir") wf.connect(inputnode, "subject_id", autorecon1, "subject_id") def link_masks(subjects_dir, subject_id): import os os.symlink(os.path.join(subjects_dir, subject_id, "mri", "T1.mgz"), os.path.join(subjects_dir, subject_id, "mri", "brainmask.auto.mgz")) os.symlink(os.path.join(subjects_dir, subject_id, "mri", "brainmask.auto.mgz"), os.path.join(subjects_dir, subject_id, "mri", "brainmask.mgz")) return subjects_dir, subject_id masks = pe.Node(niu.Function(input_names=['subjects_dir', 'subject_id'], output_names=['subjects_dir', 'subject_id'], function=link_masks), name="link_masks") wf.connect(autorecon1, "subjects_dir", masks, "subjects_dir") wf.connect(autorecon1, "subject_id", masks, "subject_id") autorecon_resume = pe.Node(fs.ReconAll(), name="autorecon_resume") autorecon_resume.plugin_args={'submit_specs': 'request_memory = 2500'} autorecon_resume.inputs.args = "-no-isrunning" wf.connect(masks, "subjects_dir", autorecon_resume, "subjects_dir") wf.connect(masks, "subject_id", autorecon_resume, "subject_id") outputnode = pe.Node(niu.IdentityInterface(fields=['subject_id', 'subjects_dir']), name='outputspec') wf.connect(autorecon_resume, "subjects_dir", outputnode, "subjects_dir") wf.connect(autorecon_resume, "subject_id", outputnode, "subject_id") return wf
def create_workflow(self, flow, inputnode, outputnode): """Create the stage workflow. Parameters ---------- flow : nipype.pipeline.engine.Workflow The nipype.pipeline.engine.Workflow instance of the anatomical pipeline inputnode : nipype.interfaces.utility.IdentityInterface Identity interface describing the inputs of the segmentation stage outputnode : nipype.interfaces.utility.IdentityInterface Identity interface describing the outputs of the segmentation stage """ if self.config.seg_tool == "Freesurfer": def correct_freesurfer_subjectid_path(path): if os.path.exists('/output_dir') and '/output_dir' not in path: subject_id = path.split(f"{__freesurfer_directory__}/")[-1] path = os.path.abspath(f'/output_dir/{__freesurfer_directory__}/{subject_id}') return path def correct_freesurfer_subjects_path(path): if os.path.exists('/output_dir') and '/output_dir' not in path: path = os.path.abspath(f'/output_dir/{__freesurfer_directory__}') return path orig_dir = os.path.join( correct_freesurfer_subjectid_path(self.config.freesurfer_subject_id), "mri", "orig" ) print(f'INFO : orig_dir = {orig_dir}') # Skip Freesurfer recon-all if 001.mgz exists which typically means it has been already run self.config.use_existing_freesurfer_data = True if os.path.exists(orig_dir) else False print(f'INFO : orig_dir exists? {self.config.use_existing_freesurfer_data}') if self.config.use_existing_freesurfer_data is False: # Converting to .mgz format fs_mriconvert = pe.Node( interface=fs.MRIConvert(out_type="mgz", out_file="T1.mgz"), name="mgzConvert", ) if self.config.make_isotropic: fs_mriconvert.inputs.vox_size = ( self.config.isotropic_vox_size, self.config.isotropic_vox_size, self.config.isotropic_vox_size, ) fs_mriconvert.inputs.resample_type = ( self.config.isotropic_interpolation ) rename = pe.Node(Rename001(), name="copyOrig") if not os.path.exists(orig_dir): print(f'INFO : Create folder: {orig_dir}') os.makedirs(orig_dir) rename.inputs.format_string = os.path.join(orig_dir, "001.mgz") if self.config.brain_mask_extraction_tool == "Freesurfer": # ReconAll => named outputnode as we don't want to select a specific output.... fs_reconall = pe.Node( interface=fs.ReconAll( flags=f'-no-isrunning -parallel -openmp {self.config.number_of_threads}' ), name="reconall", ) fs_reconall.inputs.directive = "all" fs_reconall.inputs.args = self.config.freesurfer_args # fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set # in cmp/pipelines/diffusion/diffusion.py fs_reconall.inputs.subjects_dir = ( correct_freesurfer_subjects_path(self.config.freesurfer_subjects_dir) ) # fmt: off flow.connect( [ (inputnode, fs_mriconvert, [(("T1", isavailable), "in_file")]), (fs_mriconvert, rename, [("out_file", "in_file")]), (rename, fs_reconall, [(("out_file", extract_reconall_base_dir), "subject_id")]), (fs_reconall, outputnode, [("subjects_dir", "subjects_dir"), ("subject_id", "subject_id")]), ] ) # fmt: on else: # ReconAll => named outputnode as we don't want to select a specific output.... fs_autorecon1 = pe.Node( interface=fs.ReconAll( flags="-no-isrunning -parallel -openmp {}".format( self.config.number_of_threads ) ), name="autorecon1", ) fs_autorecon1.inputs.directive = "autorecon1" if self.config.brain_mask_extraction_tool == "ANTs": fs_autorecon1.inputs.flags = ( "-no-isrunning -noskullstrip -parallel -openmp {}".format( self.config.number_of_threads ) ) fs_autorecon1.inputs.args = self.config.freesurfer_args # fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set # in cmp/pipelines/diffusion/diffusion.py fs_autorecon1.inputs.subjects_dir = ( correct_freesurfer_subjects_path(self.config.freesurfer_subjects_dir) ) # fmt: off flow.connect( [ (inputnode, fs_mriconvert, [(("T1", isavailable), "in_file")]), (fs_mriconvert, rename, [("out_file", "in_file")]), (rename, fs_autorecon1, [(("out_file", extract_reconall_base_dir), "subject_id")]), ] ) # fmt: on fs_source = pe.Node(interface=FreeSurferSource(), name="fsSource") fs_mriconvert_nu = pe.Node( interface=fs.MRIConvert(out_type="niigz", out_file="nu.nii.gz"), name="niigzConvert", ) # fmt: off flow.connect( [ (fs_autorecon1, fs_source, [("subjects_dir", "subjects_dir"), ("subject_id", "subject_id")]), (fs_source, fs_mriconvert_nu, [("nu", "in_file")]), ] ) # fmt: on fs_mriconvert_brainmask = pe.Node( interface=fs.MRIConvert( out_type="mgz", out_file="brainmask.mgz" ), name="fsMriconvertBETbrainmask", ) if self.config.brain_mask_extraction_tool == "BET": fsl_bet = pe.Node( interface=fsl.BET( out_file="brain.nii.gz", mask=True, skull=True, robust=True, ), name="fsl_bet", ) # fmt: off flow.connect( [ (fs_mriconvert_nu, fsl_bet, [("out_file", "in_file")]), (fsl_bet, fs_mriconvert_brainmask, [("out_file", "in_file")]), ] ) # fmt: on elif self.config.brain_mask_extraction_tool == "ANTs": ants_bet = pe.Node( interface=ants.BrainExtraction(out_prefix="ants_bet_"), name="antsBET", ) ants_bet.inputs.brain_template = self.config.ants_templatefile ants_bet.inputs.brain_probability_mask = ( self.config.ants_probmaskfile ) ants_bet.inputs.extraction_registration_mask = ( self.config.ants_regmaskfile ) ants_bet.inputs.num_threads = self.config.number_of_threads # fmt: off flow.connect( [ (fs_mriconvert_nu, ants_bet, [("out_file", "anatomical_image")]), (ants_bet, fs_mriconvert_brainmask, [("BrainExtractionBrain", "in_file")]), ] ) # fmt: on copy_brainmask_to_fs = pe.Node( interface=copyBrainMaskToFreesurfer(), name="copyBrainmaskTofs" ) # fmt: off flow.connect( [ (rename, copy_brainmask_to_fs, [(("out_file", extract_reconall_base_dir), "subject_dir")]), (fs_mriconvert_brainmask, copy_brainmask_to_fs, [("out_file", "in_file")]), ] ) # fmt: on fs_reconall23 = pe.Node( interface=fs.ReconAll( flags="-no-isrunning -parallel -openmp {}".format( self.config.number_of_threads ) ), name="reconall23", ) fs_reconall23.inputs.directive = "autorecon2" fs_reconall23.inputs.args = self.config.freesurfer_args fs_reconall23.inputs.flags = "-autorecon3" fs_reconall23.inputs.subjects_dir = ( correct_freesurfer_subjects_path(self.config.freesurfer_subjects_dir) ) # fmt: off flow.connect( [ (copy_brainmask_to_fs,fs_reconall23, [(("out_brainmask_file", get_freesurfer_subject_id), "subject_id")]), (fs_reconall23, outputnode, [("subjects_dir", "subjects_dir"), ("subject_id", "subject_id")]), ] ) # fmt: on else: outputnode.inputs.subjects_dir = correct_freesurfer_subjects_path(self.config.freesurfer_subjects_dir) outputnode.inputs.subject_id = correct_freesurfer_subjectid_path(self.config.freesurfer_subject_id) print(f'INFO : Found existing {os.path.join(orig_dir, "001.mgz")} -> Skip Freesurfer recon-all') print(f' - outputnode.inputs.subjects_dir: {outputnode.inputs.subjects_dir}') print(f' - outputnode.inputs.subject_id: {outputnode.inputs.subject_id}') elif self.config.seg_tool == "Custom segmentation": self.create_workflow_custom(flow, inputnode, outputnode)
preprocessing.connect(iter_fwhm, "fwhm", isotropic_voxel_smooth, "fwhm") compute_mask = pe.Node(interface=nipy.ComputeMask(), name="compute_mask") preprocessing.connect(realign, "mean_image", compute_mask, "mean_volume") anisotropic_voxel_smooth = fsl_wf.create_susan_smooth( name="anisotropic_voxel_smooth", separate_masks=False) anisotropic_voxel_smooth.inputs.smooth.output_type = 'NIFTI' preprocessing.connect(realign, "realigned_files", anisotropic_voxel_smooth, "inputnode.in_files") preprocessing.connect(iter_fwhm, "fwhm", anisotropic_voxel_smooth, "inputnode.fwhm") preprocessing.connect(compute_mask, "brain_mask", anisotropic_voxel_smooth, 'inputnode.mask_file') recon_all = pe.Node(interface=fs.ReconAll(), name="recon_all") surfregister = pe.Node(interface=fs.BBRegister(), name='surfregister') surfregister.inputs.init = 'fsl' surfregister.inputs.contrast_type = 't2' preprocessing.connect(realign, 'mean_image', surfregister, 'source_file') preprocessing.connect(recon_all, 'subject_id', surfregister, 'subject_id') preprocessing.connect(recon_all, 'subjects_dir', surfregister, 'subjects_dir') isotropic_surface_smooth = pe.MapNode( interface=fs.Smooth(proj_frac_avg=(0, 1, 0.1)), iterfield=['in_file'], name="isotropic_surface_smooth") preprocessing.connect(surfregister, 'out_reg_file', isotropic_surface_smooth, 'reg_file') preprocessing.connect(realign, "realigned_files", isotropic_surface_smooth,
def reconall(subjfile, subjID=None, subjdir=None, runreconall=True): """ Carries out Freesurfer's reconall on T1 nifti file WARNING: Reconall takes very long to run!! http://nipy.sourceforge.net/nipype/users/examples/smri_freesurfer.html Parameters ---------- subjfile: nifti file Path to subject's T1 nifti file subjID: string optional name for subject's output folder subjdir: string The directory to where segmentation results should be saved. Defaults to same directory as subjfile. runreconall: boolean If set to true, runs reconall, otherwise just converts assorted mgz files to nii """ T1dir = os.path.dirname(subjfile) filename = os.path.basename(subjfile) # subject ID if subjID == None: m = re.search('(\w+?)_*_', subjfile) subjID = m.group(0) + 'seg' # Tell freesurfer what subjects directory to use if subjdir == None: subjdir = T1dir fs.FSCommand.set_default_subjects_dir(subjdir) segdir = subjdir + '/' + subjID + '/' print('saving to ' + subjdir) # check if file exists if os.path.isfile(subjfile): print('running recon-all on ' + filename) else: raise ValueError("File: %s does not exist!" % filename) # check if nifti format ext = filename.split('.')[1].lower() if ext != "nii": raise ValueError("File: %s is not a nifti file!" % filename) wf = pe.Workflow(name="segment") wf.base_dir = T1dir if runreconall: # run recon-all reconall = pe.Node(interface=fs.ReconAll(), name='reconall') reconall.inputs.subject_id = subjID reconall.inputs.directive = 'all' reconall.inputs.subjects_dir = subjdir reconall.inputs.T1_files = subjfile wf.add_nodes([reconall]) result = wf.run() # convert mgz to nii wf2 = pe.Workflow(name="convertmgz") wf2.base_dir = T1dir convertmgz = pe.Node(interface=fs.MRIConvert(), name='convertmgz') convertmgz.inputs.in_file = segdir + 'mri/aseg.auto.mgz' convertmgz.inputs.out_orientation = 'LPS' convertmgz.inputs.resample_type = 'nearest' convertmgz.inputs.reslice_like = subjfile convertmgz.inputs.out_file = segdir + subjID + '_aseg.nii.gz' wf2.add_nodes([convertmgz]) result2 = wf2.run() if runreconall: return (result, result2) else: return (result2)
def create_surface_registration(wf_name='surface_registration'): """ Workflow to generate surface from anatomical data and register the structural data to FreeSurfer anatomical and assign it to a surface vertex. Parameters ---------- wf_name : string name of the workflow Returns ------- wflow : workflow object workflow object Notes ----- `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/timeseries/timeseries_analysis.py>`_ Workflow Inputs:: inputspec.rest : string (nifti file) path to input functional data inputspec.brain : string (nifti file) path to skull stripped anatomical image inputspec.recon_subjects : string path to subjects directory inputspec.subject_id : string subject id Workflow Outputs:: outputspec.reconall_subjects_dir : string freesurfer subjects directory outputspec.reconall_subjects_id : string subject id for which anatomical data is taken outputspec.out_reg_file : string path to bbregister output registration file outputspec.lh_surface_file : string (mgz file) path to left hemisphere cortical surface file outputspec.rh_surface_file : string (mgz file) path to right hemisphere cortical surface file Order of commands: - Generate surfaces and rois of structural data from T1 anatomical Image using FreeSurfer's reconall . For details see `ReconAll <https://surfer.nmr.mgh.harvard.edu/fswiki/recon-all>`_:: recon-all -all -subjid 0010001 -sd working_dir/SurfaceRegistration/anat_reconall - Register input volume to the FreeSurfer anatomical using FreeSurfer's bbregister. The input is the output of the recon-all command. For details see `BBRegister <http://surfer.nmr.mgh.harvard.edu/fswiki/bbregister>`_:: bbregister --t2 --init -fsl --reg structural_bbreg_me.dat --mov structural.nii --s 0010001 - Assign values from a volume to each surface vertex using FreeSurfer's mri_vol2surf . For details see `mri_vol2surf <http://surfer.nmr.mgh.harvard.edu/fswiki/mri_vol2surf>`_:: For left hemisphere mri_vol2surf --mov structural.nii --reg structural_bbreg_me.dat --interp trilin --projfrac 0.5 --hemi lh --o surface_file.nii.gz For right hemisphere mri_vol2surf --mov structural.nii --reg structural_bbreg_me.dat --interp trilin --projfrac 0.5 --hemi rh --o surface_file.nii.gz High Level Workflow Graph: .. image:: ../images/surface_registration.dot.png :width: 1000 Detailed Workflow Graph: .. image:: ../images/surface_registration_detailed.dot.png :width: 1000 Example ------- >>> import CPAC.timeseries.timeseries_analysis as t >>> wf = t.create_surface_registration() >>> wf.inputs.inputspec.rest = '/home/data/sub001/rest.nii.gz' >>> wf.inputs.inputspec.brain = '/home/data/sub001/anat.nii.gz' >>> wf.inputs.inputspec.recon_subjects = '/home/data/working_dir/SurfaceRegistration/anat_reconall' >>> wf.inputs.inputspec.subject_id = 'sub001' >>> wf.base_dir = './' >>> wf.run() """ wflow = pe.Workflow(name=wf_name) inputNode = pe.Node(util.IdentityInterface(fields=['recon_subjects', 'brain', 'subject_id', 'rest']), name='inputspec') outputNode = pe.Node(util.IdentityInterface(fields=['reconall_subjects_dir', 'reconall_subjects_id', 'out_reg_file', 'lh_surface_file', 'rh_surface_file']), name='outputspec') reconall = pe.Node(interface=fs.ReconAll(), name="reconall") reconall.inputs.directive = 'all' wflow.connect(inputNode, 'brain', reconall, 'T1_files') wflow.connect(inputNode, 'subject_id', reconall, 'subject_id') wflow.connect(inputNode, 'recon_subjects', reconall, 'subjects_dir') wflow.connect(reconall, 'subjects_dir', outputNode, 'reconall_subjects_dir') wflow.connect(reconall, 'subject_id', outputNode, 'reconall_subjects_id') bbregister = pe.Node(interface=fs.BBRegister(init='fsl', contrast_type='t2', registered_file=True, out_fsl_file=True), name='bbregister') wflow.connect(inputNode, 'rest', bbregister, 'source_file') wflow.connect(reconall, 'subjects_dir', bbregister, 'subjects_dir') wflow.connect(reconall, 'subject_id', bbregister, 'subject_id') wflow.connect(bbregister, 'out_reg_file', outputNode, 'out_reg_file') sample_to_surface_lh = pe.Node(interface=fs.SampleToSurface(hemi="lh"), name='sample_to_surface_lh') sample_to_surface_lh.inputs.no_reshape = True sample_to_surface_lh.inputs.interp_method = 'trilinear' sample_to_surface_lh.inputs.sampling_method = "point" sample_to_surface_lh.inputs.sampling_range = 0.5 sample_to_surface_lh.inputs.sampling_units = "frac" wflow.connect(bbregister, 'out_reg_file', sample_to_surface_lh, 'reg_file') wflow.connect(inputNode, 'rest', sample_to_surface_lh, 'source_file') wflow.connect(sample_to_surface_lh, 'out_file', outputNode, 'lh_surface_file') sample_to_surface_rh = pe.Node(interface=fs.SampleToSurface(hemi="rh"), name='sample_to_surface_rh') sample_to_surface_rh.inputs.no_reshape = True sample_to_surface_rh.inputs.interp_method = 'trilinear' sample_to_surface_rh.inputs.sampling_method = "point" sample_to_surface_rh.inputs.sampling_range = 0.5 sample_to_surface_rh.inputs.sampling_units = "frac" wflow.connect(bbregister, 'out_reg_file', sample_to_surface_rh, 'reg_file') wflow.connect(inputNode, 'rest', sample_to_surface_rh, 'source_file') wflow.connect(sample_to_surface_rh, 'out_file', outputNode, 'rh_surface_file') return wflow
def init_autorecon_resume_wf(omp_nthreads, name='autorecon_resume_wf'): workflow = pe.Workflow(name=name) inputnode = pe.Node( niu.IdentityInterface(fields=['subjects_dir', 'subject_id', 'use_T2']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['subjects_dir', 'subject_id', 'out_report']), name='outputnode') autorecon2_vol = pe.Node(fs.ReconAll(directive='autorecon2-volonly', openmp=omp_nthreads), name='autorecon2_vol') autorecon2_vol.interface.num_threads = omp_nthreads autorecon2_surfs = pe.MapNode(fs.ReconAll(directive='autorecon2-perhemi', openmp=omp_nthreads), iterfield='hemi', name='autorecon2_surfs') autorecon2_surfs.interface.num_threads = omp_nthreads autorecon2_surfs.inputs.hemi = ['lh', 'rh'] autorecon_surfs = pe.MapNode(fs.ReconAll(directive='autorecon-hemi', flags=[ '-noparcstats', '-noparcstats2', '-noparcstats3', '-nohyporelabel', '-nobalabels' ], openmp=omp_nthreads), iterfield='hemi', name='autorecon_surfs') autorecon_surfs.interface.num_threads = omp_nthreads autorecon_surfs.inputs.hemi = ['lh', 'rh'] autorecon3 = pe.Node(ReconAllRPT(directive='autorecon3', openmp=omp_nthreads, generate_report=True), name='autorecon3') autorecon3.interface.num_threads = omp_nthreads def _dedup(in_list): vals = set(in_list) if len(vals) > 1: raise ValueError( "Non-identical values can't be deduplicated:\n{!r}".format( in_list)) return vals.pop() workflow.connect([ (inputnode, autorecon_surfs, [('use_T2', 'use_T2')]), (inputnode, autorecon2_vol, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id')]), (autorecon2_vol, autorecon2_surfs, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id')]), (autorecon2_surfs, autorecon_surfs, [(('subjects_dir', _dedup), 'subjects_dir'), (('subject_id', _dedup), 'subject_id')]), (autorecon_surfs, autorecon3, [(('subjects_dir', _dedup), 'subjects_dir'), (('subject_id', _dedup), 'subject_id')]), (autorecon3, outputnode, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id'), ('out_report', 'out_report')]), ]) return workflow
def init_surface_recon_wf(omp_nthreads, hires, name='surface_recon_wf'): workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface( fields=['t1w', 't2w', 'skullstripped_t1', 'subjects_dir']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=[ 'subjects_dir', 'subject_id', 'fs_2_t1_transform', 'surfaces', 'out_report' ]), name='outputnode') def detect_inputs(t1w_list, t2w_list=[], hires_enabled=True): from nipype.interfaces.base import isdefined from nipype.utils.filemanip import filename_to_list from nipype.interfaces.traits_extension import Undefined import nibabel as nib t1w_list = filename_to_list(t1w_list) t2w_list = filename_to_list(t2w_list) if isdefined(t2w_list) else [] t1w_ref = nib.load(t1w_list[0]) # Use high resolution preprocessing if voxel size < 1.0mm # Tolerance of 0.05mm requires that rounds down to 0.9mm or lower hires = hires_enabled and max(t1w_ref.header.get_zooms()) < 1 - 0.05 t2w = Undefined if t2w_list and max(nib.load(t2w_list[0]).header.get_zooms()) < 1.2: t2w = t2w_list[0] # https://surfer.nmr.mgh.harvard.edu/fswiki/SubmillimeterRecon mris_inflate = '-n 50' if hires else Undefined return (t2w, isdefined(t2w), hires, mris_inflate) recon_config = pe.Node(niu.Function( function=detect_inputs, output_names=['t2w', 'use_T2', 'hires', 'mris_inflate']), name='recon_config', run_without_submitting=True) recon_config.inputs.hires_enabled = hires def bidsinfo(in_file): from fmriprep.interfaces.bids import BIDS_NAME match = BIDS_NAME.search(in_file) params = match.groupdict() if match is not None else {} return tuple( map(params.get, [ 'subject_id', 'ses_id', 'task_id', 'acq_id', 'rec_id', 'run_id' ])) bids_info = pe.Node(niu.Function(function=bidsinfo, output_names=[ 'subject_id', 'ses_id', 'task_id', 'acq_id', 'rec_id', 'run_id' ]), name='bids_info', run_without_submitting=True) autorecon1 = pe.Node(fs.ReconAll(directive='autorecon1', flags='-noskullstrip', openmp=omp_nthreads), name='autorecon1') autorecon1.interface._can_resume = False autorecon1.interface.num_threads = omp_nthreads def inject_skullstripped(subjects_dir, subject_id, skullstripped): import os import nibabel as nib from nilearn.image import resample_to_img, new_img_like from nipype.utils.filemanip import copyfile mridir = os.path.join(subjects_dir, subject_id, 'mri') t1 = os.path.join(mridir, 'T1.mgz') bm_auto = os.path.join(mridir, 'brainmask.auto.mgz') bm = os.path.join(mridir, 'brainmask.mgz') if not os.path.exists(bm_auto): img = nib.load(t1) mask = nib.load(skullstripped) bmask = new_img_like(mask, mask.get_data() > 0) resampled_mask = resample_to_img(bmask, img, 'nearest') masked_image = new_img_like( img, img.get_data() * resampled_mask.get_data()) masked_image.to_filename(bm_auto) if not os.path.exists(bm): copyfile(bm_auto, bm, copy=True, use_hardlink=True) return subjects_dir, subject_id skull_strip_extern = pe.Node(niu.Function( function=inject_skullstripped, output_names=['subjects_dir', 'subject_id']), name='skull_strip_extern') fs_transform = pe.Node(fs.Tkregister2(fsl_out='freesurfer2subT1.mat', reg_header=True), name='fs_transform') autorecon_resume_wf = init_autorecon_resume_wf(omp_nthreads=omp_nthreads) gifti_surface_wf = init_gifti_surface_wf() workflow.connect([ # Configuration (inputnode, recon_config, [('t1w', 't1w_list'), ('t2w', 't2w_list')]), (inputnode, bids_info, [(('t1w', fix_multi_T1w_source_name), 'in_file') ]), # Passing subjects_dir / subject_id enforces serial order (inputnode, autorecon1, [('subjects_dir', 'subjects_dir')]), (bids_info, autorecon1, [('subject_id', 'subject_id')]), (autorecon1, skull_strip_extern, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id')]), (skull_strip_extern, autorecon_resume_wf, [('subjects_dir', 'inputnode.subjects_dir'), ('subject_id', 'inputnode.subject_id')]), (autorecon_resume_wf, gifti_surface_wf, [('outputnode.subjects_dir', 'inputnode.subjects_dir'), ('outputnode.subject_id', 'inputnode.subject_id')]), # Reconstruction phases (inputnode, autorecon1, [('t1w', 'T1_files')]), ( recon_config, autorecon1, [ ('t2w', 'T2_file'), ('hires', 'hires'), # First run only (recon-all saves expert options) ('mris_inflate', 'mris_inflate') ]), (inputnode, skull_strip_extern, [('skullstripped_t1', 'skullstripped') ]), (recon_config, autorecon_resume_wf, [('use_T2', 'inputnode.use_T2')]), # Construct transform from FreeSurfer conformed image to FMRIPREP # reoriented image (inputnode, fs_transform, [('t1w', 'target_image')]), (autorecon1, fs_transform, [('T1', 'moving_image')]), # Output (autorecon_resume_wf, outputnode, [('outputnode.subjects_dir', 'subjects_dir'), ('outputnode.subject_id', 'subject_id'), ('outputnode.out_report', 'out_report')]), (gifti_surface_wf, outputnode, [('outputnode.surfaces', 'surfaces')]), (fs_transform, outputnode, [('fsl_file', 'fs_2_t1_transform')]), ]) return workflow
def create_workflow(self, flow, inputnode, outputnode): if self.config.seg_tool == "Freesurfer": if self.config.use_existing_freesurfer_data is False: # Converting to .mgz format fs_mriconvert = pe.Node(interface=fs.MRIConvert( out_type="mgz", out_file="T1.mgz"), name="mgzConvert") if self.config.make_isotropic: fs_mriconvert.inputs.vox_size = ( self.config.isotropic_vox_size, self.config.isotropic_vox_size, self.config.isotropic_vox_size) fs_mriconvert.inputs.resample_type = self.config.isotropic_interpolation rename = pe.Node(util.Rename(), name='copyOrig') orig_dir = os.path.join(self.config.freesurfer_subject_id, "mri", "orig") if not os.path.exists(orig_dir): os.makedirs(orig_dir) print("INFO : Folder not existing; %s created!" % orig_dir) rename.inputs.format_string = os.path.join(orig_dir, "001.mgz") if self.config.brain_mask_extraction_tool == "Freesurfer": # ReconAll => named outputnode as we don't want to select a specific output.... fs_reconall = pe.Node(interface=fs.ReconAll( flags='-no-isrunning -parallel -openmp {}'.format( self.config.number_of_threads)), name='reconall') fs_reconall.inputs.directive = 'all' fs_reconall.inputs.args = self.config.freesurfer_args # fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set in cmp/pipelines/diffusion/diffusion.py fs_reconall.inputs.subjects_dir = self.config.freesurfer_subjects_dir # fs_reconall.inputs.hippocampal_subfields_T1 = self.config.segment_hippocampal_subfields # fs_reconall.inputs.brainstem = self.config.segment_brainstem def isavailable(file): # print "T1 is available" return file flow.connect([ (inputnode, fs_mriconvert, [(('T1', isavailable), 'in_file')]), (fs_mriconvert, rename, [('out_file', 'in_file')]), (rename, fs_reconall, [(("out_file", extract_base_directory), "subject_id") ]), (fs_reconall, outputnode, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id')]), ]) else: # ReconAll => named outputnode as we don't want to select a specific output.... fs_autorecon1 = pe.Node(interface=fs.ReconAll( flags='-no-isrunning -parallel -openmp {}'.format( self.config.number_of_threads)), name='autorecon1') fs_autorecon1.inputs.directive = 'autorecon1' # if self.config.brain_mask_extraction_tool == "Custom" or self.config.brain_mask_extraction_tool == "ANTs": if self.config.brain_mask_extraction_tool == "ANTs": fs_autorecon1.inputs.flags = '-no-isrunning -noskullstrip -parallel -openmp {}'.format( self.config.number_of_threads) fs_autorecon1.inputs.args = self.config.freesurfer_args # fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set in cmp/pipelines/diffusion/diffusion.py fs_autorecon1.inputs.subjects_dir = self.config.freesurfer_subjects_dir def isavailable(file): # print "Is available" return file flow.connect([(inputnode, fs_mriconvert, [ (('T1', isavailable), 'in_file') ]), (fs_mriconvert, rename, [('out_file', 'in_file')]), (rename, fs_autorecon1, [(("out_file", extract_base_directory), "subject_id")])]) fs_source = pe.Node(interface=FreeSurferSource(), name='fsSource') fs_mriconvert_nu = pe.Node(interface=fs.MRIConvert( out_type="niigz", out_file="nu.nii.gz"), name='niigzConvert') flow.connect([(fs_autorecon1, fs_source, [ ('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id') ]), (fs_source, fs_mriconvert_nu, [('nu', 'in_file')])]) fs_mriconvert_brainmask = pe.Node( interface=fs.MRIConvert(out_type="mgz", out_file="brainmask.mgz"), name='fsMriconvertBETbrainmask') if self.config.brain_mask_extraction_tool == "BET": fsl_bet = pe.Node(interface=fsl.BET( out_file='brain.nii.gz', mask=True, skull=True, robust=True), name='fsl_bet') flow.connect([(fs_mriconvert_nu, fsl_bet, [('out_file', 'in_file')]), (fsl_bet, fs_mriconvert_brainmask, [('out_file', 'in_file')])]) elif self.config.brain_mask_extraction_tool == "ANTs": # templatefile = # pkg_resources.resource_filename('cmtklib', os.path.join('data', 'segmentation', # 'ants_template_IXI', 'T_template2_BrainCerebellum.nii.gz')) # probmaskfile = pkg_resources.resource_filename('cmtklib', # os.path.join('data', 'segmentation', 'ants_template_IXI', # 'T_template_BrainCerebellumProbabilityMask.nii.gz')) ants_bet = pe.Node(interface=ants.BrainExtraction( out_prefix='ants_bet_'), name='antsBET') ants_bet.inputs.brain_template = self.config.ants_templatefile ants_bet.inputs.brain_probability_mask = self.config.ants_probmaskfile ants_bet.inputs.extraction_registration_mask = self.config.ants_regmaskfile ants_bet.inputs.num_threads = self.config.number_of_threads flow.connect([(fs_mriconvert_nu, ants_bet, [('out_file', 'anatomical_image')]), (ants_bet, fs_mriconvert_brainmask, [('BrainExtractionBrain', 'in_file')])]) # elif self.config.brain_mask_extraction_tool == "Custom": # fs_mriconvert_brainmask.inputs.in_file = os.path.abspath( # self.config.brain_mask_path) # copy_brainmask_to_fs = pe.Node(interface=copyFileToFreesurfer(),name='copy_brainmask_to_fs') # copy_brainmask_to_fs.inputs.out_file = # os.path.join(self.config.freesurfer_subject_id,"mri","brainmask.mgz") # copy_brainmaskauto_to_fs = pe.Node(interface=copyFileToFreesurfer(),name='copy_brainmaskauto_to_fs') # copy_brainmaskauto_to_fs.inputs.out_file = # os.path.join(self.config.freesurfer_subject_id,"mri","brainmask.auto.mgz") # flow.connect([ # (fs_mriconvert_brainmask,copy_brainmask_to_fs,[('out_file','in_file')]), # (fs_mriconvert_brainmask,copy_brainmaskauto_to_fs,[('out_file','in_file')]) # ]) copy_brainmask_to_fs = pe.Node( interface=copyBrainMaskToFreesurfer(), name='copyBrainmaskTofs') flow.connect([(rename, copy_brainmask_to_fs, [ (("out_file", extract_base_directory), "subject_dir") ]), (fs_mriconvert_brainmask, copy_brainmask_to_fs, [('out_file', 'in_file')])]) # flow.connect([ # (fs_source,fs_mriconvert_nu,[('nu','in_file')]) # ]) def get_freesurfer_subject_id(file): # print("Extract reconall base dir : %s" % file[:-18]) return file[:-18] fs_reconall23 = pe.Node(interface=fs.ReconAll( flags='-no-isrunning -parallel -openmp {}'.format( self.config.number_of_threads)), name='reconall23') fs_reconall23.inputs.directive = 'autorecon2' fs_reconall23.inputs.args = self.config.freesurfer_args fs_reconall23.inputs.flags = '-autorecon3' # fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set in cmp/pipelines/diffusion/diffusion.py fs_reconall23.inputs.subjects_dir = self.config.freesurfer_subjects_dir # fs_reconall.inputs.hippocampal_subfields_T1 = self.config.segment_hippocampal_subfields # fs_reconall.inputs.brainstem = self.config.segment_brainstem flow.connect([(copy_brainmask_to_fs, fs_reconall23, [ (("out_brainmask_file", get_freesurfer_subject_id), "subject_id") ]), (fs_reconall23, outputnode, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id')])]) else: outputnode.inputs.subjects_dir = self.config.freesurfer_subjects_dir outputnode.inputs.subject_id = self.config.freesurfer_subject_id
def init_autorecon_resume_wf(omp_nthreads, name='autorecon_resume_wf'): r""" Resume recon-all execution, assuming the `-autorecon1` stage has been completed. In order to utilize resources efficiently, this is broken down into five sub-stages; after the first stage, the second and third stages may be run simultaneously, and the fourth and fifth stages may be run simultaneously, if resources permit:: $ recon-all -sd <output dir>/freesurfer -subjid sub-<subject_label> \ -autorecon2-volonly $ recon-all -sd <output dir>/freesurfer -subjid sub-<subject_label> \ -autorecon-hemi lh \ -noparcstats -nocortparc2 -noparcstats2 -nocortparc3 \ -noparcstats3 -nopctsurfcon -nohyporelabel -noaparc2aseg \ -noapas2aseg -nosegstats -nowmparc -nobalabels $ recon-all -sd <output dir>/freesurfer -subjid sub-<subject_label> \ -autorecon-hemi rh \ -noparcstats -nocortparc2 -noparcstats2 -nocortparc3 \ -noparcstats3 -nopctsurfcon -nohyporelabel -noaparc2aseg \ -noapas2aseg -nosegstats -nowmparc -nobalabels $ recon-all -sd <output dir>/freesurfer -subjid sub-<subject_label> \ -autorecon3 -hemi lh -T2pial $ recon-all -sd <output dir>/freesurfer -subjid sub-<subject_label> \ -autorecon3 -hemi rh -T2pial The excluded steps in the second and third stages (``-no<option>``) are not fully hemisphere independent, and are therefore postponed to the final two stages. .. workflow:: :graph2use: orig :simple_form: yes from smriprep.workflows.surfaces import init_autorecon_resume_wf wf = init_autorecon_resume_wf(omp_nthreads=1) **Inputs** subjects_dir FreeSurfer SUBJECTS_DIR subject_id FreeSurfer subject ID use_T2 Refine pial surface using T2w image use_FLAIR Refine pial surface using FLAIR image **Outputs** subjects_dir FreeSurfer SUBJECTS_DIR subject_id FreeSurfer subject ID """ workflow = Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface( fields=['subjects_dir', 'subject_id', 'use_T2', 'use_FLAIR']), name='inputnode') outputnode = pe.Node( niu.IdentityInterface(fields=['subjects_dir', 'subject_id']), name='outputnode') autorecon2_vol = pe.Node(fs.ReconAll(directive='autorecon2-volonly', openmp=omp_nthreads), n_procs=omp_nthreads, mem_gb=5, name='autorecon2_vol') autorecon2_vol.interface._always_run = True autorecon_surfs = pe.MapNode(fs.ReconAll( directive='autorecon-hemi', flags=[ '-noparcstats', '-nocortparc2', '-noparcstats2', '-nocortparc3', '-noparcstats3', '-nopctsurfcon', '-nohyporelabel', '-noaparc2aseg', '-noapas2aseg', '-nosegstats', '-nowmparc', '-nobalabels' ], openmp=omp_nthreads), iterfield='hemi', n_procs=omp_nthreads, mem_gb=5, name='autorecon_surfs') autorecon_surfs.inputs.hemi = ['lh', 'rh'] autorecon_surfs.interface._always_run = True autorecon3 = pe.MapNode(fs.ReconAll(directive='autorecon3', openmp=omp_nthreads), iterfield='hemi', n_procs=omp_nthreads, mem_gb=5, name='autorecon3') autorecon3.inputs.hemi = ['lh', 'rh'] autorecon3.interface._always_run = True def _dedup(in_list): vals = set(in_list) if len(vals) > 1: raise ValueError( "Non-identical values can't be deduplicated:\n{!r}".format( in_list)) return vals.pop() workflow.connect([ (inputnode, autorecon3, [('use_T2', 'use_T2'), ('use_FLAIR', 'use_FLAIR')]), (inputnode, autorecon2_vol, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id')]), (autorecon2_vol, autorecon_surfs, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id')]), (autorecon_surfs, autorecon3, [(('subjects_dir', _dedup), 'subjects_dir'), (('subject_id', _dedup), 'subject_id')]), (autorecon3, outputnode, [(('subjects_dir', _dedup), 'subjects_dir'), (('subject_id', _dedup), 'subject_id')]), ]) return workflow
def fs_Processing(data_dir, subject, tag): ''' Parameters ---------- data_dir : str path to the subject directory. subject : str subject id. tag : str containing the image type. Returns ------- all freesufer files at FS_Subjects_DIR ''' datapath = data_dir+'/'+subject+'/anat' # raw image datapathAlign = data_dir+'/'+subject+'/align'# rigid transformed if os.path.exists(datapath) and os.listdir(datapath): print('doing FS recon-all and hippocampal subfields...') reconall = freesurfer.ReconAll() reconall.inputs.subject_id = subject # Subject folder reconall.inputs.directive = 'all' FS_Subjects_DIR = "/usr/users/nmri/projects/tummala/FS_Subjects_DIR" if not os.path.exists(FS_Subjects_DIR): os.mkdir(FS_Subjects_DIR) if tag == 'align': if os.path.exists(datapathAlign) and os.listdir(datapathAlign): print('doing recon-all on aligned images\n...') FS_Subjects_Align = FS_Subjects_DIR+'/'+'FS_Subjects_Align' if not os.path.exists(FS_Subjects_Align): os.mkdir(FS_Subjects_Align) reconall.inputs.subjects_dir = FS_Subjects_Align # Path to freesurfer subjects directory alignimages = os.listdir(datapathAlign) isT2, isFLAIR = doCheckforT2andFLAIR(datapathAlign) for alignimage in alignimages: if alignimage.endswith('reoriented.align.nii') and 'hrT1' in alignimage: reconall.inputs.T1_files = datapathAlign+'/'+alignimage elif alignimage.endswith('reoriented.align.nii') and (isT2 and not isFLAIR): reconall.inputs.T2_file = datapathAlign+'/'+alignimage reconall.inputs.use_T2 = True elif alignimage.endswith('reoriented.align.nii') and isFLAIR: reconall.inputs.FLAIR_file = datapathAlign+'/'+alignimage reconall.inputs.use_FLAIR = True else: print(f'no align directory/no files in the align directory for {subject}\n') elif tag == 'anat': print('doing recon-all on anat images\n...') FS_Subjects_Anat = FS_Subjects_DIR+'/'+'FS_Subjects_Anat' if not os.path.exists(FS_Subjects_Anat): os.mkdir(FS_Subjects_Anat) reconall.inputs.subjects_dir = FS_Subjects_Anat # Path to freesurfer subjects directory anatimages = os.listdir(datapath) isT2, isFLAIR = doCheckforT2andFLAIR(datapath) for anatimage in anatimages: if anatimage.endswith('reoriented.nii') and 'hrT1' in anatimage: reconall.inputs.T1_files = datapath+'/'+anatimage elif anatimage.endswith('reoriented.nii') and (isT2 and not isFLAIR): reconall.inputs.T2_file = datapath+'/'+anatimage reconall.inputs.use_T2 = True elif anatimage.endswith('reoriented.nii') and isFLAIR: reconall.inputs.FLAIR_file = datapath+'/'+anatimage reconall.inputs.use_FLAIR = True reconall.inputs.hippocampal_subfields_T1 = True reconall.run() # running the recon-all else: print(f'no anat directory/no files in anat directory for {subject}\n')
def init_surface_recon_wf(omp_nthreads, hires, name='surface_recon_wf'): r""" Reconstruct anatomical surfaces using FreeSurfer's ``recon-all``. Reconstruction is performed in three phases. The first phase initializes the subject with T1w and T2w (if available) structural images and performs basic reconstruction (``autorecon1``) with the exception of skull-stripping. For example, a subject with only one session with T1w and T2w images would be processed by the following command:: $ recon-all -sd <output dir>/freesurfer -subjid sub-<subject_label> \ -i <bids-root>/sub-<subject_label>/anat/sub-<subject_label>_T1w.nii.gz \ -T2 <bids-root>/sub-<subject_label>/anat/sub-<subject_label>_T2w.nii.gz \ -autorecon1 \ -noskullstrip The second phase imports an externally computed skull-stripping mask. This workflow refines the external brainmask using the internal mask implicit the the FreeSurfer's ``aseg.mgz`` segmentation, to reconcile ANTs' and FreeSurfer's brain masks. First, the ``aseg.mgz`` mask from FreeSurfer is refined in two steps, using binary morphological operations: 1. With a binary closing operation the sulci are included into the mask. This results in a smoother brain mask that does not exclude deep, wide sulci. 2. Fill any holes (typically, there could be a hole next to the pineal gland and the corpora quadrigemina if the great cerebral brain is segmented out). Second, the brain mask is grown, including pixels that have a high likelihood to the GM tissue distribution: 3. Dilate and substract the brain mask, defining the region to search for candidate pixels that likely belong to cortical GM. 4. Pixels found in the search region that are labeled as GM by ANTs (during ``antsBrainExtraction.sh``) are directly added to the new mask. 5. Otherwise, estimate GM tissue parameters locally in patches of ``ww`` size, and test the likelihood of the pixel to belong in the GM distribution. This procedure is inspired on mindboggle's solution to the problem: https://github.com/nipy/mindboggle/blob/7f91faaa7664d820fe12ccc52ebaf21d679795e2/mindboggle/guts/segment.py#L1660 The final phase resumes reconstruction, using the T2w image to assist in finding the pial surface, if available. See :py:func:`~smriprep.workflows.surfaces.init_autorecon_resume_wf` for details. Memory annotations for FreeSurfer are based off `their documentation <https://surfer.nmr.mgh.harvard.edu/fswiki/SystemRequirements>`_. They specify an allocation of 4GB per subject. Here we define 5GB to have a certain margin. .. workflow:: :graph2use: orig :simple_form: yes from smriprep.workflows.surfaces import init_surface_recon_wf wf = init_surface_recon_wf(omp_nthreads=1, hires=True) **Parameters** omp_nthreads : int Maximum number of threads an individual process may use hires : bool Enable sub-millimeter preprocessing in FreeSurfer **Inputs** t1w List of T1-weighted structural images t2w List of T2-weighted structural images (only first used) flair List of FLAIR images skullstripped_t1 Skull-stripped T1-weighted image (or mask of image) ants_segs Brain tissue segmentation from ANTS ``antsBrainExtraction.sh`` corrected_t1 INU-corrected, merged T1-weighted image subjects_dir FreeSurfer SUBJECTS_DIR subject_id FreeSurfer subject ID **Outputs** subjects_dir FreeSurfer SUBJECTS_DIR subject_id FreeSurfer subject ID t1w2fsnative_xfm LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space fsnative2t1w_xfm LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w surfaces GIFTI surfaces for gray/white matter boundary, pial surface, midthickness (or graymid) surface, and inflated surfaces out_brainmask Refined brainmask, derived from FreeSurfer's ``aseg`` volume out_aseg FreeSurfer's aseg segmentation, in native T1w space out_aparc FreeSurfer's aparc+aseg segmentation, in native T1w space **Subworkflows** * :py:func:`~smriprep.workflows.surfaces.init_autorecon_resume_wf` * :py:func:`~smriprep.workflows.surfaces.init_gifti_surface_wf` """ workflow = Workflow(name=name) workflow.__desc__ = """\ Brain surfaces were reconstructed using `recon-all` [FreeSurfer {fs_ver}, RRID:SCR_001847, @fs_reconall], and the brain mask estimated previously was refined with a custom variation of the method to reconcile ANTs-derived and FreeSurfer-derived segmentations of the cortical gray-matter of Mindboggle [RRID:SCR_002438, @mindboggle]. """.format(fs_ver=fs.Info().looseversion() or '<ver>') inputnode = pe.Node(niu.IdentityInterface(fields=[ 't1w', 't2w', 'flair', 'skullstripped_t1', 'corrected_t1', 'ants_segs', 'subjects_dir', 'subject_id' ]), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=[ 'subjects_dir', 'subject_id', 't1w2fsnative_xfm', 'fsnative2t1w_xfm', 'surfaces', 'out_brainmask', 'out_aseg', 'out_aparc' ]), name='outputnode') recon_config = pe.Node(FSDetectInputs(hires_enabled=hires), name='recon_config') fov_check = pe.Node(niu.Function(function=_check_cw256), name='fov_check') autorecon1 = pe.Node(fs.ReconAll(directive='autorecon1', openmp=omp_nthreads), name='autorecon1', n_procs=omp_nthreads, mem_gb=5) autorecon1.interface._can_resume = False autorecon1.interface._always_run = True skull_strip_extern = pe.Node(FSInjectBrainExtracted(), name='skull_strip_extern') fsnative2t1w_xfm = pe.Node(RobustRegister(auto_sens=True, est_int_scale=True), name='fsnative2t1w_xfm') t1w2fsnative_xfm = pe.Node(LTAConvert(out_lta=True, invert=True), name='t1w2fsnative_xfm') autorecon_resume_wf = init_autorecon_resume_wf(omp_nthreads=omp_nthreads) gifti_surface_wf = init_gifti_surface_wf() aseg_to_native_wf = init_segs_to_native_wf() aparc_to_native_wf = init_segs_to_native_wf(segmentation='aparc_aseg') refine = pe.Node(RefineBrainMask(), name='refine') workflow.connect([ # Configuration (inputnode, recon_config, [('t1w', 't1w_list'), ('t2w', 't2w_list'), ('flair', 'flair_list')]), # Passing subjects_dir / subject_id enforces serial order (inputnode, autorecon1, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id')]), (autorecon1, skull_strip_extern, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id')]), (skull_strip_extern, autorecon_resume_wf, [('subjects_dir', 'inputnode.subjects_dir'), ('subject_id', 'inputnode.subject_id')]), (autorecon_resume_wf, gifti_surface_wf, [('outputnode.subjects_dir', 'inputnode.subjects_dir'), ('outputnode.subject_id', 'inputnode.subject_id')]), # Reconstruction phases (inputnode, autorecon1, [('t1w', 'T1_files')]), (inputnode, fov_check, [('t1w', 'in_files')]), (fov_check, autorecon1, [('out', 'flags')]), ( recon_config, autorecon1, [ ('t2w', 'T2_file'), ('flair', 'FLAIR_file'), ('hires', 'hires'), # First run only (recon-all saves expert options) ('mris_inflate', 'mris_inflate') ]), (inputnode, skull_strip_extern, [('skullstripped_t1', 'in_brain')]), (recon_config, autorecon_resume_wf, [('use_t2w', 'inputnode.use_T2'), ('use_flair', 'inputnode.use_FLAIR')]), # Construct transform from FreeSurfer conformed image to sMRIPrep # reoriented image (inputnode, fsnative2t1w_xfm, [('t1w', 'target_file')]), (autorecon1, fsnative2t1w_xfm, [('T1', 'source_file')]), (fsnative2t1w_xfm, gifti_surface_wf, [('out_reg_file', 'inputnode.fsnative2t1w_xfm')]), (fsnative2t1w_xfm, t1w2fsnative_xfm, [('out_reg_file', 'in_lta')]), # Refine ANTs mask, deriving new mask from FS' aseg (inputnode, refine, [('corrected_t1', 'in_anat'), ('ants_segs', 'in_ants')]), (inputnode, aseg_to_native_wf, [('corrected_t1', 'inputnode.in_file') ]), (autorecon_resume_wf, aseg_to_native_wf, [('outputnode.subjects_dir', 'inputnode.subjects_dir'), ('outputnode.subject_id', 'inputnode.subject_id')]), (inputnode, aparc_to_native_wf, [('corrected_t1', 'inputnode.in_file') ]), (autorecon_resume_wf, aparc_to_native_wf, [('outputnode.subjects_dir', 'inputnode.subjects_dir'), ('outputnode.subject_id', 'inputnode.subject_id')]), (aseg_to_native_wf, refine, [('outputnode.out_file', 'in_aseg')]), # Output (autorecon_resume_wf, outputnode, [('outputnode.subjects_dir', 'subjects_dir'), ('outputnode.subject_id', 'subject_id')]), (gifti_surface_wf, outputnode, [('outputnode.surfaces', 'surfaces')]), (t1w2fsnative_xfm, outputnode, [('out_lta', 't1w2fsnative_xfm')]), (fsnative2t1w_xfm, outputnode, [('out_reg_file', 'fsnative2t1w_xfm')]), (refine, outputnode, [('out_file', 'out_brainmask')]), (aseg_to_native_wf, outputnode, [('outputnode.out_file', 'out_aseg')]), (aparc_to_native_wf, outputnode, [('outputnode.out_file', 'out_aparc') ]), ]) return workflow
def freesurfer_preproc(wf, cfg, strat_pool, pipe_num, opt=None): ''' {"name": "freesurfer_preproc", "config": ["surface_analysis"], "switch": ["run_freesurfer"], "option_key": "None", "option_val": "None", "inputs": [["desc-preproc_T1w", "desc-reorient_T1w", "T1w"]], "outputs": ["space-T1w_desc-brain_mask", "freesurfer_subject_dir", "label-CSF_mask", "label-WM_mask", "label-GM_mask", "surface_curvature", "pial_surface_mesh", "smoothed_surface_mesh", "spherical_surface_mesh", "sulcal_depth_surface_maps", "cortical_thickness_surface_maps", "cortical_volume_surface_maps", "white_matter_surface_mesh", "raw_average"]} ''' reconall = pe.Node(interface=freesurfer.ReconAll(), name=f'anat_freesurfer_{pipe_num}') freesurfer_subject_dir = os.path.join( cfg.pipeline_setup['working_directory']['path'], f'anat_preproc_freesurfer_{pipe_num}', 'anat_freesurfer') if not os.path.exists(freesurfer_subject_dir): os.makedirs(freesurfer_subject_dir) reconall.inputs.directive = 'all' reconall.inputs.subjects_dir = freesurfer_subject_dir reconall.inputs.openmp = cfg.pipeline_setup['system_config'][ 'num_OMP_threads'] node, out = strat_pool.get_data( ["desc-preproc_T1w", "desc-reorient_T1w", "T1w"]) wf.connect(node, out, reconall, 'T1_files') # register FS brain mask to native space fs_brain_mask_to_native = pe.Node(interface=freesurfer.ApplyVolTransform(), name='fs_brain_mask_to_native') fs_brain_mask_to_native.inputs.reg_header = True wf.connect(reconall, 'brainmask', fs_brain_mask_to_native, 'source_file') wf.connect(reconall, 'rawavg', fs_brain_mask_to_native, 'target_file') wf.connect(reconall, 'subjects_dir', fs_brain_mask_to_native, 'subjects_dir') # convert brain mask file from .mgz to .nii.gz fs_brain_mask_to_nifti = pe.Node(util.Function(input_names=['in_file'], output_names=['out_file'], function=mri_convert), name='fs_brainmask_to_nifti') wf.connect(fs_brain_mask_to_native, 'transformed_file', fs_brain_mask_to_nifti, 'in_file') # binarize the brain mask binarize_fs_brain_mask = pe.Node(interface=fsl.maths.MathsCommand(), name='binarize_fs_brainmask') binarize_fs_brain_mask.inputs.args = '-bin' wf.connect(fs_brain_mask_to_nifti, 'out_file', binarize_fs_brain_mask, 'in_file') # fill holes fill_fs_brain_mask = pe.Node(interface=afni.MaskTool(), name='fill_fs_brainmask') fill_fs_brain_mask.inputs.fill_holes = True fill_fs_brain_mask.inputs.outputtype = 'NIFTI_GZ' wf.connect(binarize_fs_brain_mask, 'out_file', fill_fs_brain_mask, 'in_file') # register FS segmentations (aseg.mgz) to native space fs_aseg_to_native = pe.Node(interface=freesurfer.ApplyVolTransform(), name='fs_aseg_to_native') fs_aseg_to_native.inputs.reg_header = True fs_aseg_to_native.inputs.interp = 'nearest' fs_aseg_to_native.inputs.subjects_dir = freesurfer_subject_dir wf.connect(reconall, 'aseg', fs_aseg_to_native, 'source_file') wf.connect(reconall, 'rawavg', fs_aseg_to_native, 'target_file') # convert registered FS segmentations from .mgz to .nii.gz fs_aseg_to_nifti = pe.Node(util.Function(input_names=['in_file'], output_names=['out_file'], function=mri_convert), name='fs_aseg_to_nifti') fs_aseg_to_nifti.inputs.args = '-rt nearest' wf.connect(fs_aseg_to_native, 'transformed_file', fs_aseg_to_nifti, 'in_file') pick_tissue = pe.Node(util.Function( input_names=['multiatlas_Labels'], output_names=['csf_mask', 'gm_mask', 'wm_mask'], function=pick_tissue_from_labels_file), name=f'anat_preproc_freesurfer_tissue_mask') pick_tissue.inputs.include_ventricles = True wf.connect(fs_aseg_to_nifti, 'out_file', pick_tissue, 'multiatlas_Labels') outputs = { 'space-T1w_desc-brain_mask': (fill_fs_brain_mask, 'out_file'), 'freesurfer_subject_dir': (reconall, 'subjects_dir'), 'label-CSF_mask': (pick_tissue, 'csf_mask'), 'label-WM_mask': (pick_tissue, 'wm_mask'), 'label-GM_mask': (pick_tissue, 'gm_mask'), 'surface_curvature': (reconall, 'curv'), 'pial_surface_mesh': (reconall, 'pial'), 'smoothed_surface_mesh': (reconall, 'smoothwm'), 'spherical_surface_mesh': (reconall, 'sphere'), 'sulcal_depth_surface_maps': (reconall, 'sulc'), 'cortical_thickness_surface_maps': (reconall, 'thickness'), 'cortical_volume_surface_maps': (reconall, 'volume'), 'white_matter_surface_mesh': (reconall, 'white'), 'raw_average': (reconall, 'rawavg') } return (wf, outputs)