def legacy( bids_base, template, debug=False, functional_blur_xy=False, functional_match={}, keep_work=False, n_jobs=False, n_jobs_percentage=0.8, out_base=None, realign="time", registration_mask=False, sessions=[], structural_match={}, subjects=[], tr=1, workflow_name='legacy', enforce_dummy_scans=DUMMY_SCANS, exclude={}, ): ''' Legacy realignment and registration workflow representative of the tweaks and workarounds commonly used in the pre-SAMRI period. Parameters ---------- bids_base : str Path to the BIDS data set root. template : str Path to the template to register the data to. debug : bool, optional Whether to enable nipype debug mode. This increases logging. exclude : dict A dictionary with any combination of "sessions", "subjects", "tasks" as keys and corresponding identifiers as values. If this is specified matching entries will be excluded in the analysis. functional_blur_xy : float, optional Factor by which to smooth data in the xy-plane; if parameter evaluates to false, no smoothing will be applied. Ideally this value should correspond to the resolution or smoothness in the z-direction (assuing z represents the lower-resolution slice-encoding direction). functional_match : dict, optional Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered. The dictionary should have keys which are 'acquisition', 'task', or 'modality', and values which are lists of acceptable strings for the respective BIDS field. keep_work : bool, str Whether to keep the work directory after workflow conclusion (this directory contains all the intermediary processing commands, inputs, and outputs --- it is invaluable for debugging but many times larger in size than the actual output). n_jobs : int, optional Number of processors to maximally use for the workflow; if unspecified a best guess will be estimate based on `n_jobs_percentage` and hardware (but not on current load). n_jobs_percentage : float, optional Percentage of available processors (as in available hardware, not available free load) to maximally use for the workflow (this is overriden by `n_jobs`). out_base : str, optional Output base directory - inside which a directory named `workflow_name` (as well as associated directories) will be created. realign : {"space","time","spacetime",""}, optional Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution! registration_mask : str, optional Mask to use for the registration process. This mask will constrain the area for similarity metric evaluation, but the data will not be cropped. sessions : list, optional A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered. structural_match : dict, optional Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered. The dictionary should have keys which are 'acquisition', or 'modality', and values which are lists of acceptable strings for the respective BIDS field. subjects : list, optional A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered. tr : float, optional Repetition time, explicitly. WARNING! This is a parameter waiting for deprecation. workflow_name : str, optional Top level name for the output directory. ''' try: import nipype.interfaces.ants.legacy as antslegacy except ModuleNotFoundError: print(''' The `nipype.interfaces.ants.legacy` was not found on this system. You may want to downgrade nipype to e.g. 1.1.1, as this module has been removed in more recent versions: https://github.com/nipy/nipype/issues/3197 ''') bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind = common_select( bids_base, out_base, workflow_name, template, registration_mask, functional_match, structural_match, subjects, sessions, exclude, ) if not n_jobs: n_jobs = max(int(round(mp.cpu_count() * n_jobs_percentage)), 2) get_f_scan = pe.Node(name='get_f_scan', interface=util.Function( function=get_bids_scan, input_names=inspect.getargspec(get_bids_scan)[0], output_names=[ 'scan_path', 'scan_type', 'task', 'nii_path', 'nii_name', 'events_name', 'subject_session', 'metadata_filename', 'dict_slice', 'ind_type' ])) get_f_scan.inputs.ignore_exception = True get_f_scan.inputs.data_selection = data_selection get_f_scan.inputs.bids_base = bids_base get_f_scan.iterables = ("ind_type", func_ind) dummy_scans = pe.Node( name='dummy_scans', interface=util.Function( function=force_dummy_scans, input_names=inspect.getargspec(force_dummy_scans)[0], output_names=['out_file', 'deleted_scans'])) dummy_scans.inputs.desired_dummy_scans = enforce_dummy_scans events_file = pe.Node( name='events_file', interface=util.Function( function=write_bids_events_file, input_names=inspect.getargspec(write_bids_events_file)[0], output_names=['out_file'])) temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean") f_resize = pe.Node(interface=VoxelResize(), name="f_resize") f_resize.inputs.resize_factors = [10, 10, 10] f_percentile = pe.Node(interface=fsl.ImageStats(), name="f_percentile") f_percentile.inputs.op_string = '-p 98' f_threshold = pe.Node(interface=fsl.Threshold(), name="f_threshold") f_fast = pe.Node(interface=fsl.FAST(), name="f_fast") f_fast.inputs.no_pve = True f_fast.inputs.output_biascorrected = True f_bet = pe.Node(interface=fsl.BET(), name="f_BET") f_swapdim = pe.Node(interface=fsl.SwapDimensions(), name="f_swapdim") f_swapdim.inputs.new_dims = ('x', '-z', '-y') f_deleteorient = pe.Node(interface=FSLOrient(), name="f_deleteorient") f_deleteorient.inputs.main_option = 'deleteorient' datasink = pe.Node(nio.DataSink(), name='datasink') datasink.inputs.base_directory = out_dir datasink.inputs.parameterization = False workflow_connections = [ (get_f_scan, dummy_scans, [('nii_path', 'in_file')]), (dummy_scans, events_file, [('deleted_scans', 'forced_dummy_scans')]), (dummy_scans, f_resize, [('out_file', 'in_file')]), (get_f_scan, events_file, [('nii_path', 'timecourse_file'), ('task', 'task'), ('scan_path', 'scan_dir')]), (events_file, datasink, [('out_file', 'func.@events')]), (get_f_scan, events_file, [('events_name', 'out_file')]), (get_f_scan, datasink, [(('subject_session', ss_to_path), 'container') ]), (temporal_mean, f_percentile, [('out_file', 'in_file')]), # here we divide by 10 assuming 10 percent noise (f_percentile, f_threshold, [(('out_stat', divideby_10), 'thresh')]), (temporal_mean, f_threshold, [('out_file', 'in_file')]), (f_threshold, f_fast, [('out_file', 'in_files')]), (f_fast, f_bet, [('restored_image', 'in_file')]), (f_resize, f_deleteorient, [('out_file', 'in_file')]), (f_deleteorient, f_swapdim, [('out_file', 'in_file')]), ] if realign == "space": realigner = pe.Node(interface=spm.Realign(), name="realigner") realigner.inputs.register_to_mean = True workflow_connections.extend([ (f_swapdim, realigner, [('out_file', 'in_file')]), ]) elif realign == "spacetime": realigner = pe.Node(interface=nipy.SpaceTimeRealigner(), name="realigner") realigner.inputs.slice_times = "asc_alt_2" realigner.inputs.tr = tr realigner.inputs.slice_info = 3 #3 for coronal slices (2 for horizontal, 1 for sagittal) workflow_connections.extend([ (f_swapdim, realigner, [('out_file', 'in_file')]), ]) elif realign == "time": realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer") realigner.inputs.time_repetition = tr workflow_connections.extend([ (f_swapdim, realigner, [('out_file', 'in_file')]), ]) f_antsintroduction = pe.Node(interface=antslegacy.antsIntroduction(), name='ants_introduction') f_antsintroduction.inputs.dimension = 3 f_antsintroduction.inputs.reference_image = template #will need updating to `1` f_antsintroduction.inputs.bias_field_correction = True f_antsintroduction.inputs.transformation_model = 'GR' f_antsintroduction.inputs.max_iterations = [8, 15, 8] f_warp = pe.Node(interface=ants.WarpTimeSeriesImageMultiTransform(), name='f_warp') f_warp.inputs.reference_image = template f_warp.inputs.dimension = 4 f_copysform2qform = pe.Node(interface=FSLOrient(), name='f_copysform2qform') f_copysform2qform.inputs.main_option = 'copysform2qform' warp_merge = pe.Node(util.Merge(2), name='warp_merge') workflow_connections.extend([ (f_bet, f_antsintroduction, [('out_file', 'input_image')]), (f_antsintroduction, warp_merge, [('warp_field', 'in1')]), (f_antsintroduction, warp_merge, [('affine_transformation', 'in2')]), (warp_merge, f_warp, [('out', 'transformation_series')]), (f_warp, f_copysform2qform, [('output_image', 'in_file')]), ]) if realign == "space": workflow_connections.extend([ (realigner, temporal_mean, [('realigned_files', 'in_file')]), (realigner, f_warp, [('realigned_files', 'input_image')]), ]) elif realign == "spacetime": workflow_connections.extend([ (realigner, temporal_mean, [('out_file', 'in_file')]), (realigner, f_warp, [('out_file', 'input_image')]), ]) elif realign == "time": workflow_connections.extend([ (realigner, temporal_mean, [('slice_time_corrected_file', 'in_file')]), (realigner, f_warp, [('slice_time_corrected_file', 'input_image') ]), ]) else: workflow_connections.extend([ (f_resize, temporal_mean, [('out_file', 'in_file')]), (f_swapdim, f_warp, [('out_file', 'input_image')]), ]) if functional_blur_xy: blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur") blur.inputs.fwhmxy = functional_blur_xy workflow_connections.extend([ (get_f_scan, blur, [('nii_name', 'out_file')]), (f_copysform2qform, blur, [('out_file', 'in_file')]), (blur, datasink, [('out_file', 'func')]), ]) else: f_rename = pe.Node(util.Rename(), name='f_rename') workflow_connections.extend([ (get_f_scan, f_rename, [('nii_name', 'format_string')]), (f_copysform2qform, f_rename, [('out_file', 'in_file')]), (f_rename, datasink, [('out_file', 'func')]), ]) workflow_config = { 'execution': { 'crashdump_dir': path.join(out_base, 'crashdump'), } } if debug: workflow_config['logging'] = { 'workflow_level': 'DEBUG', 'utils_level': 'DEBUG', 'interface_level': 'DEBUG', 'filemanip_level': 'DEBUG', 'log_to_file': 'true', } workdir_name = workflow_name + "_work" #this gives the name of the workdir, the output name is passed to the datasink workflow = pe.Workflow(name=workdir_name) workflow.connect(workflow_connections) workflow.base_dir = out_base workflow.config = workflow_config try: workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name, "graph.dot"), graph2use="hierarchical", format="png") except OSError: print( 'We could not write the DOT file for visualization (`dot` function from the graphviz package). This is non-critical to the processing, but you should get this fixed.' ) workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_jobs}) copy_bids_files(bids_base, os.path.join(out_base, workflow_name)) if not keep_work: workdir = path.join(workflow.base_dir, workdir_name) try: shutil.rmtree(workdir) except OSError as e: if str(e) == 'Cannot call rmtree on a symbolic link': print( 'Not deleting top level workdir (`{}`), as it is a symlink. Deleting only contents instead' .format(workdir)) for file_object in os.listdir(workdir): file_object_path = os.path.join(workdir, file_object) if os.path.isfile(file_object_path): os.unlink(file_object_path) else: shutil.rmtree(file_object_path) else: raise OSError(str(e))
def normalise(self, normtemplatepath='MNI152_T1_2mm_brain.nii.gz', templatehead='MNI152_T1_2mm.nii.gz', templatebrainmask='MNI152_T1_2mm_brain_mask.nii.gz', skullstripmethod='ANTS', normalise_method='FSL', lmodel=None, ignoreexception=False, ncore=2): ''' Normalisation Pipeline with either FSL flirt or ANTS antsIntroduction ''' normtemplatepath = abspath(normtemplatepath) templatebrainmask = abspath(templatebrainmask) templatehead = abspath(templatehead) if lmodel == None: lmodel = self._collection.getmrlist() lsbjid = [model.getmetafield('Subject') for model in lmodel] limgid = [model.getimgid() for model in lmodel] inputnode = pe.Node(niu.IdentityInterface(fields=['imgid']), name='input2') inputnode.iterables = ('imgid', limgid) # Create a nipype workflow with serial standard_roi => bet => flirt/antNormalization datasource = pe.Node(nio.DataGrabber(infields=['imgid'], outfields=['srcimg']), name='niifinder') datasource.inputs.base_directory = os.path.abspath(self.dbpath) datasource.inputs.template = '*/*/*/*/ADNI_*_I%s.nii' datasource.inputs.sort_filelist = True datasource.inputs.template_args['srcimg'] = [['imgid']] datasink = pe.Node(nio.DataSink(), name='normsinker') datasink.inputs.base_directory = os.path.abspath(join(self.dbpath, 'results')) # Start to make build the workflow wf = pe.Workflow(name="preprocess") wf.connect(inputnode, 'imgid', datasource, 'imgid') if skullstripmethod == 'FSL': ''' Estimate the tissue classes from the anatomical image. But use spm's segment as FSL appears to be breaking. ''' stdroi = pe.Node(StdRoi(), name='standard_space_roi') stdroi.inputs.betpremask = True stdroi.inputs.ignore_exception = ignoreexception stripper = pe.Node(fsl.BET(), name='stripper') stripper.inputs.frac = 0.25 stripper.inputs.ignore_exception = ignoreexception wf.connect(datasource, 'srcimg', stdroi, 'in_file') wf.connect(stdroi, 'out_file', stripper, 'in_file') else: stripper = pe.Node(interface=antsBrainExtraction(), name='stripper') stripper.inputs.dimension = 3 stripper.inputs.wholetemplate = templatehead assert templatebrainmask != None stripper.inputs.brainmask = templatebrainmask stripper.inputs.output_prefix = 'out' stripper.inputs.randomseeding = True wf.connect(datasource, 'srcimg', stripper, 'in_file') if normalise_method == 'FSL': normwarp = pe.Node(fsl.FLIRT(bins=640, cost_func='mutualinfo'), name='flirt') normwarp.inputs.reference = normtemplatepath normwarp.inputs.output_type = "NIFTI_GZ" #stripper.inputs.padding = True normwarp.inputs.out_file = 'norm_deformed.nii.gz' normwarp.inputs.ignore_exception = ignoreexception infield = 'in_file' outfield = 'out_file' elif normalise_method == 'ANTS': normwarp = pe.Node(antsIntroduction(), name='ants') normwarp.inputs.reference_image = normtemplatepath normwarp.inputs.max_iterations = [30,90,20] #normwarp.inputs.num_threads = 1 # This parameter will not take effects normwarp.inputs.transformation_model = 'RI' normwarp.inputs.out_prefix = 'norm_' normwarp.inputs.ignore_exception = ignoreexception infield = 'input_image' outfield = 'output_file' # The input/output file spec were differnet between FSL and ANTS in nipype wf.connect(stripper, 'out_file', normwarp, infield) #wf.connect(inputidnode, 'subject_id', datasink, 'container') wf.connect(normwarp, outfield, datasink, 'preprocessed') # Run workflow with all cpus available #wf.run(plugin='MultiProc', plugin_args={'n_procs' : ncore}) wf.run(plugin='MultiProc', plugin_args={'n_procs' : ncore})
def legacy( bids_base, template, autorotate=False, debug=False, functional_blur_xy=False, functional_match={}, keep_work=False, negative_contrast_agent=False, n_procs=N_PROCS, out_base=None, realign="time", registration_mask=False, sessions=[], structural_match={}, subjects=[], tr=1, workflow_name='legacy', ): ''' Legacy realignment and registration workflow representative of the tweaks and workarounds commonly used in the pre-SAMRI period. Parameters ---------- bids_base : str Path to the BIDS data set root. template : str Path to the template to register the data to. autorotate : bool, optional Whether to use a multi-rotation-state transformation start. This allows the registration to commence with the best rotational fit, and may help if the orientation of the data is malformed with respect to the header. debug : bool, optional Whether to enable nipype debug mode. This increases logging. functional_blur_xy : float, optional Factor by which to smooth data in the xy-plane; if parameter evaluates to false, no smoothing will be applied. Ideally this value should correspond to the resolution or smoothness in the z-direction (assuing z represents the lower-resolution slice-encoding direction). functional_match : dict, optional Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered. The dictionary should have keys which are 'acquisition', 'task', or 'modality', and values which are lists of acceptable strings for the respective BIDS field. keep_work : bool, str Whether to keep the work directory after workflow conclusion (this directory contains all the intermediary processing commands, inputs, and outputs --- it is invaluable for debugging but many times larger in size than the actual output). negative_contrast_agent : bool, optional Whether the scan was acquired witn a negative contrast agent given the imaging modality; if true the values will be inverted with respect to zero. This is commonly used for iron nano-particle Cerebral Blood Volume (CBV) measurements. n_procs : int, optional Number of processors to maximally use for the workflow; if unspecified a best guess will be estimate based on hardware (but not on current load). out_base : str, optional Output base directory --- inside which a directory named `workflow_name` (as well as associated directories) will be created. realign : {"space","time","spacetime",""}, optional Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution! registration_mask : str, optional Mask to use for the registration process. This mask will constrain the area for similarity metric evaluation, but the data will not be cropped. sessions : list, optional A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered. structural_match : dict, optional Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered. The dictionary should have keys which are 'acquisition', or 'modality', and values which are lists of acceptable strings for the respective BIDS field. subjects : list, optional A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered. tr : float, optional Repetition time, explicitly. WARNING! This is a parameter waiting for deprecation. workflow_name : str, optional Top level name for the output directory. ''' bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind = common_select( bids_base, out_base, workflow_name, template, registration_mask, functional_match, structural_match, subjects, sessions, ) get_f_scan = pe.Node(name='get_f_scan', interface=util.Function( function=get_bids_scan, input_names=inspect.getargspec(get_bids_scan)[0], output_names=[ 'scan_path', 'scan_type', 'task', 'nii_path', 'nii_name', 'file_name', 'events_name', 'subject_session' ])) get_f_scan.inputs.ignore_exception = True get_f_scan.inputs.data_selection = data_selection get_f_scan.inputs.bids_base = bids_base get_f_scan.iterables = ("ind_type", func_ind) dummy_scans = pe.Node( name='dummy_scans', interface=util.Function( function=force_dummy_scans, input_names=inspect.getargspec(force_dummy_scans)[0], output_names=['out_file', 'deleted_scans'])) dummy_scans.inputs.desired_dummy_scans = DUMMY_SCANS events_file = pe.Node( name='events_file', interface=util.Function( function=write_bids_events_file, input_names=inspect.getargspec(write_bids_events_file)[0], output_names=['out_file'])) temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean") f_resize = pe.Node(interface=VoxelResize(), name="f_resize") f_resize.inputs.resize_factors = [10, 10, 10] f_percentile = pe.Node(interface=fsl.ImageStats(), name="f_percentile") f_percentile.inputs.op_string = '-p 98' f_threshold = pe.Node(interface=fsl.Threshold(), name="f_threshold") f_fast = pe.Node(interface=fsl.FAST(), name="f_fast") f_fast.inputs.no_pve = True f_fast.inputs.output_biascorrected = True f_bet = pe.Node(interface=fsl.BET(), name="f_BET") f_swapdim = pe.Node(interface=fsl.SwapDimensions(), name="f_swapdim") f_swapdim.inputs.new_dims = ('x', '-z', '-y') f_deleteorient = pe.Node(interface=FSLOrient(), name="f_deleteorient") f_deleteorient.inputs.main_option = 'deleteorient' datasink = pe.Node(nio.DataSink(), name='datasink') datasink.inputs.base_directory = out_dir datasink.inputs.parameterization = False workflow_connections = [ (get_f_scan, dummy_scans, [('nii_path', 'in_file')]), (get_f_scan, dummy_scans, [('scan_path', 'scan_dir')]), (dummy_scans, events_file, [('deleted_scans', 'forced_dummy_scans')]), (dummy_scans, f_resize, [('out_file', 'in_file')]), (get_f_scan, events_file, [('nii_path', 'timecourse_file'), ('task', 'task'), ('scan_path', 'scan_dir')]), (events_file, datasink, [('out_file', 'func.@events')]), (get_f_scan, events_file, [('events_name', 'out_file')]), (get_f_scan, datasink, [(('subject_session', ss_to_path), 'container') ]), (temporal_mean, f_percentile, [('out_file', 'in_file')]), # here we divide by 10 assuming 10 percent noise (f_percentile, f_threshold, [(('out_stat', divideby_10), 'thresh')]), (temporal_mean, f_threshold, [('out_file', 'in_file')]), (f_threshold, f_fast, [('out_file', 'in_files')]), (f_fast, f_bet, [('restored_image', 'in_file')]), (f_resize, f_deleteorient, [('out_file', 'in_file')]), (f_deleteorient, f_swapdim, [('out_file', 'in_file')]), ] if realign == "space": realigner = pe.Node(interface=spm.Realign(), name="realigner") realigner.inputs.register_to_mean = True workflow_connections.extend([ (f_swapdim, realigner, [('out_file', 'in_file')]), ]) elif realign == "spacetime": realigner = pe.Node(interface=nipy.SpaceTimeRealigner(), name="realigner") realigner.inputs.slice_times = "asc_alt_2" realigner.inputs.tr = tr realigner.inputs.slice_info = 3 #3 for coronal slices (2 for horizontal, 1 for sagittal) workflow_connections.extend([ (f_swapdim, realigner, [('out_file', 'in_file')]), ]) elif realign == "time": realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer") realigner.inputs.time_repetition = tr workflow_connections.extend([ (f_swapdim, realigner, [('out_file', 'in_file')]), ]) #if structural_scan_types.any(): # get_s_scan = pe.Node(name='get_s_scan', interface=util.Function(function=get_bids_scan, input_names=inspect.getargspec(get_bids_scan)[0], output_names=['scan_path','scan_type','task', 'nii_path', 'nii_name', 'file_name', 'events_name', 'subject_session'])) # get_s_scan.inputs.ignore_exception = True # get_s_scan.inputs.data_selection = data_selection # get_s_scan.inputs.bids_base = bids_base # s_cutoff = pe.Node(interface=fsl.ImageMaths(), name="s_cutoff") # s_cutoff.inputs.op_string = "-thrP 20 -uthrp 98" # s_resize = pe.Node(interface=VoxelResize(), name="s_resize") # s_BET = pe.Node(interface=fsl.BET(), name="s_BET") # s_BET.inputs.mask = True # s_BET.inputs.frac = 0.3 # s_BET.inputs.robust = True # ants_introduction = pe.Node(interface=legacy.antsIntroduction(), name='ants_introduction') # ants_introduction.inputs.dimension = 3 # ants_introduction.inputs.reference_image = template # #will need updating to `1` # ants_introduction.inputs.bias_field_correction = True # ants_introduction.inputs.transformation_model = 'GR' # ants_introduction.inputs.max_iterations = [8,15,8] # s_mask = pe.Node(interface=fsl.ApplyMask(), name="s_mask") # s_register, s_warp, f_warp = structural_registration(template) # workflow_connections.extend([ # (get_s_scan, s_reg_biascorrect, [('nii_path', 'input_image')]), # (s_reg_biascorrect, s_cutoff, [('output_image', 'in_file')]), # (s_cutoff, s_BET, [('out_file', 'in_file')]), # (s_biascorrect, s_mask, [('output_image', 'in_file')]), # (s_BET, s_mask, [('mask_file', 'mask_file')]), # ]) # #TODO: incl. in func registration # if autorotate: # workflow_connections.extend([ # (s_mask, s_rotated, [('out_file', 'out_file')]), # (s_rotated, s_register, [('out_file', 'moving_image')]), # ]) # else: # workflow_connections.extend([ # (s_mask, s_register, [('out_file', 'moving_image')]), # (s_register, s_warp, [('composite_transform', 'transforms')]), # (get_s_scan, s_warp, [('nii_path', 'input_image')]), # (s_warp, datasink, [('output_image', 'anat')]), # ]) # if autorotate: # s_rotated = autorotate(template) # workflow_connections.extend([ # (get_f_scan, get_s_scan, [('subject_session', 'selector')]), # (get_s_scan, s_warp, [('nii_name','output_image')]), # (get_s_scan, s_biascorrect, [('nii_path', 'input_image')]), # ]) f_antsintroduction = pe.Node(interface=antslegacy.antsIntroduction(), name='ants_introduction') f_antsintroduction.inputs.dimension = 3 f_antsintroduction.inputs.reference_image = template #will need updating to `1` f_antsintroduction.inputs.bias_field_correction = True f_antsintroduction.inputs.transformation_model = 'GR' f_antsintroduction.inputs.max_iterations = [8, 15, 8] f_warp = pe.Node(interface=ants.WarpTimeSeriesImageMultiTransform(), name='f_warp') f_warp.inputs.reference_image = template f_warp.inputs.dimension = 4 f_copysform2qform = pe.Node(interface=FSLOrient(), name='f_copysform2qform') f_copysform2qform.inputs.main_option = 'copysform2qform' warp_merge = pe.Node(util.Merge(2), name='warp_merge') workflow_connections.extend([ (f_bet, f_antsintroduction, [('out_file', 'input_image')]), (f_antsintroduction, warp_merge, [('warp_field', 'in1')]), (f_antsintroduction, warp_merge, [('affine_transformation', 'in2')]), (warp_merge, f_warp, [('out', 'transformation_series')]), (f_warp, f_copysform2qform, [('output_image', 'in_file')]), ]) if realign == "space": workflow_connections.extend([ (realigner, temporal_mean, [('realigned_files', 'in_file')]), (realigner, f_warp, [('realigned_files', 'input_image')]), ]) elif realign == "spacetime": workflow_connections.extend([ (realigner, temporal_mean, [('out_file', 'in_file')]), (realigner, f_warp, [('out_file', 'input_image')]), ]) elif realign == "time": workflow_connections.extend([ (realigner, temporal_mean, [('slice_time_corrected_file', 'in_file')]), (realigner, f_warp, [('slice_time_corrected_file', 'input_image') ]), ]) else: workflow_connections.extend([ (f_resize, temporal_mean, [('out_file', 'in_file')]), (f_swapdim, f_warp, [('out_file', 'input_image')]), ]) invert = pe.Node(interface=fsl.ImageMaths(), name="invert") blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur") blur.inputs.fwhmxy = functional_blur_xy if functional_blur_xy and negative_contrast_agent: workflow_connections.extend([ (f_copysform2qform, blur, [('out_file', 'in_file')]), (blur, invert, [(('out_file', fslmaths_invert_values), 'op_string') ]), (blur, invert, [('out_file', 'in_file')]), (get_f_scan, invert, [('nii_name', 'output_image')]), (invert, datasink, [('out_file', 'func')]), ]) elif functional_blur_xy: workflow_connections.extend([ (get_f_scan, blur, [('nii_name', 'output_image')]), (f_copysform2qform, blur, [('out_file', 'in_file')]), (blur, datasink, [('out_file', 'func')]), ]) elif negative_contrast_agent: workflow_connections.extend([ (get_f_scan, invert, [('nii_name', 'out_file')]), (f_copysform2qform, invert, [(('out_file', fslmaths_invert_values), 'op_string')]), (f_copysform2qform, invert, [('out_file', 'in_file')]), (invert, datasink, [('out_file', 'func')]), ]) else: f_rename = pe.Node(util.Rename(), name='f_rename') workflow_connections.extend([ (get_f_scan, f_rename, [('nii_name', 'format_string')]), (f_copysform2qform, f_rename, [('out_file', 'in_file')]), (f_rename, datasink, [('out_file', 'func')]), ]) workflow_config = { 'execution': { 'crashdump_dir': path.join(bids_base, 'preprocessing/crashdump'), } } if debug: workflow_config['logging'] = { 'workflow_level': 'DEBUG', 'utils_level': 'DEBUG', 'interface_level': 'DEBUG', 'filemanip_level': 'DEBUG', 'log_to_file': 'true', } workdir_name = workflow_name + "_work" #this gives the name of the workdir, the output name is passed to the datasink workflow = pe.Workflow(name=workdir_name) workflow.connect(workflow_connections) workflow.base_dir = out_base workflow.config = workflow_config workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name, "graph.dot"), graph2use="hierarchical", format="png") workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs}) if not keep_work: workdir = path.join(workflow.base_dir, workdir_name) try: shutil.rmtree(workdir) except OSError as e: if str(e) == 'Cannot call rmtree on a symbolic link': print( 'Not deleting top level workdir (`{}`), as it is a symlink. Deleting only contents instead' .format(workdir)) for file_object in os.listdir(workdir): file_object_path = os.path.join(workdir, file_object) if os.path.isfile(file_object_path): os.unlink(file_object_path) else: shutil.rmtree(file_object_path) else: raise OSError(str(e))
TR = info['RepetitionTime'] # functional workflow # segment = pe.Node(interface=spm.Segment(), name='segment') # segment.inputs.data = anat_file # segment.inputs.csf_output_type = [False, False, True] # output native space images # segment.inputs.gm_output_type = [False, False, True] # segment.inputs.wm_output_type = [False, False, True] brainextraction = pe.Node(interface=BrainExtraction(), name='brainextraction') brainextraction.inputs.dimension = 3 brainextraction.inputs.anatomical_image = anat_file_bias brainextraction.inputs.brain_template = ants_template brainextraction.inputs.brain_probability_mask = ants_template_mask antsreg = pe.Node(interface=antsIntroduction(), name='antsreg') antsreg.inputs.reference_image = "/home/despo/arielle/hp-tms/ants/ants-template.nii.gz" sphere_name = "_sphere_15mm.nii.gz" ants_sphere = "/home/despo/arielle/hp-tms/seed_analysis_at/Mid_R_ROIfiles/ants" + sphere_name antswarp = pe.Node(interface=WarpImageMultiTransform(), name='antswarp') antswarp.inputs.input_image = ants_sphere # antswarp.inputs.transformation_series = [] antswarp.inputs.invert_affine = [1] # antswarp.inputs.output_image = 'native' + sphere_name antswarp.inputs.out_postfix = '_native' struct_list = ['R_Hipp', 'L_Hipp', 'R_Amyg', 'L_Amyg'] first_be = pe.Node(interface=fsl.FIRST(), name='first_be') first_be.inputs.brain_extracted = True # first_be.inputs.list_of_specific_structures = struct_list