def legacy( bids_base, template, debug=False, functional_blur_xy=False, functional_match={}, keep_work=False, n_jobs=False, n_jobs_percentage=0.8, out_base=None, realign="time", registration_mask=False, sessions=[], structural_match={}, subjects=[], tr=1, workflow_name='legacy', enforce_dummy_scans=DUMMY_SCANS, exclude={}, ): ''' Legacy realignment and registration workflow representative of the tweaks and workarounds commonly used in the pre-SAMRI period. Parameters ---------- bids_base : str Path to the BIDS data set root. template : str Path to the template to register the data to. debug : bool, optional Whether to enable nipype debug mode. This increases logging. exclude : dict A dictionary with any combination of "sessions", "subjects", "tasks" as keys and corresponding identifiers as values. If this is specified matching entries will be excluded in the analysis. functional_blur_xy : float, optional Factor by which to smooth data in the xy-plane; if parameter evaluates to false, no smoothing will be applied. Ideally this value should correspond to the resolution or smoothness in the z-direction (assuing z represents the lower-resolution slice-encoding direction). functional_match : dict, optional Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered. The dictionary should have keys which are 'acquisition', 'task', or 'modality', and values which are lists of acceptable strings for the respective BIDS field. keep_work : bool, str Whether to keep the work directory after workflow conclusion (this directory contains all the intermediary processing commands, inputs, and outputs --- it is invaluable for debugging but many times larger in size than the actual output). n_jobs : int, optional Number of processors to maximally use for the workflow; if unspecified a best guess will be estimate based on `n_jobs_percentage` and hardware (but not on current load). n_jobs_percentage : float, optional Percentage of available processors (as in available hardware, not available free load) to maximally use for the workflow (this is overriden by `n_jobs`). out_base : str, optional Output base directory - inside which a directory named `workflow_name` (as well as associated directories) will be created. realign : {"space","time","spacetime",""}, optional Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution! registration_mask : str, optional Mask to use for the registration process. This mask will constrain the area for similarity metric evaluation, but the data will not be cropped. sessions : list, optional A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered. structural_match : dict, optional Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered. The dictionary should have keys which are 'acquisition', or 'modality', and values which are lists of acceptable strings for the respective BIDS field. subjects : list, optional A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered. tr : float, optional Repetition time, explicitly. WARNING! This is a parameter waiting for deprecation. workflow_name : str, optional Top level name for the output directory. ''' try: import nipype.interfaces.ants.legacy as antslegacy except ModuleNotFoundError: print(''' The `nipype.interfaces.ants.legacy` was not found on this system. You may want to downgrade nipype to e.g. 1.1.1, as this module has been removed in more recent versions: https://github.com/nipy/nipype/issues/3197 ''') bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind = common_select( bids_base, out_base, workflow_name, template, registration_mask, functional_match, structural_match, subjects, sessions, exclude, ) if not n_jobs: n_jobs = max(int(round(mp.cpu_count() * n_jobs_percentage)), 2) get_f_scan = pe.Node(name='get_f_scan', interface=util.Function( function=get_bids_scan, input_names=inspect.getargspec(get_bids_scan)[0], output_names=[ 'scan_path', 'scan_type', 'task', 'nii_path', 'nii_name', 'events_name', 'subject_session', 'metadata_filename', 'dict_slice', 'ind_type' ])) get_f_scan.inputs.ignore_exception = True get_f_scan.inputs.data_selection = data_selection get_f_scan.inputs.bids_base = bids_base get_f_scan.iterables = ("ind_type", func_ind) dummy_scans = pe.Node( name='dummy_scans', interface=util.Function( function=force_dummy_scans, input_names=inspect.getargspec(force_dummy_scans)[0], output_names=['out_file', 'deleted_scans'])) dummy_scans.inputs.desired_dummy_scans = enforce_dummy_scans events_file = pe.Node( name='events_file', interface=util.Function( function=write_bids_events_file, input_names=inspect.getargspec(write_bids_events_file)[0], output_names=['out_file'])) temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean") f_resize = pe.Node(interface=VoxelResize(), name="f_resize") f_resize.inputs.resize_factors = [10, 10, 10] f_percentile = pe.Node(interface=fsl.ImageStats(), name="f_percentile") f_percentile.inputs.op_string = '-p 98' f_threshold = pe.Node(interface=fsl.Threshold(), name="f_threshold") f_fast = pe.Node(interface=fsl.FAST(), name="f_fast") f_fast.inputs.no_pve = True f_fast.inputs.output_biascorrected = True f_bet = pe.Node(interface=fsl.BET(), name="f_BET") f_swapdim = pe.Node(interface=fsl.SwapDimensions(), name="f_swapdim") f_swapdim.inputs.new_dims = ('x', '-z', '-y') f_deleteorient = pe.Node(interface=FSLOrient(), name="f_deleteorient") f_deleteorient.inputs.main_option = 'deleteorient' datasink = pe.Node(nio.DataSink(), name='datasink') datasink.inputs.base_directory = out_dir datasink.inputs.parameterization = False workflow_connections = [ (get_f_scan, dummy_scans, [('nii_path', 'in_file')]), (dummy_scans, events_file, [('deleted_scans', 'forced_dummy_scans')]), (dummy_scans, f_resize, [('out_file', 'in_file')]), (get_f_scan, events_file, [('nii_path', 'timecourse_file'), ('task', 'task'), ('scan_path', 'scan_dir')]), (events_file, datasink, [('out_file', 'func.@events')]), (get_f_scan, events_file, [('events_name', 'out_file')]), (get_f_scan, datasink, [(('subject_session', ss_to_path), 'container') ]), (temporal_mean, f_percentile, [('out_file', 'in_file')]), # here we divide by 10 assuming 10 percent noise (f_percentile, f_threshold, [(('out_stat', divideby_10), 'thresh')]), (temporal_mean, f_threshold, [('out_file', 'in_file')]), (f_threshold, f_fast, [('out_file', 'in_files')]), (f_fast, f_bet, [('restored_image', 'in_file')]), (f_resize, f_deleteorient, [('out_file', 'in_file')]), (f_deleteorient, f_swapdim, [('out_file', 'in_file')]), ] if realign == "space": realigner = pe.Node(interface=spm.Realign(), name="realigner") realigner.inputs.register_to_mean = True workflow_connections.extend([ (f_swapdim, realigner, [('out_file', 'in_file')]), ]) elif realign == "spacetime": realigner = pe.Node(interface=nipy.SpaceTimeRealigner(), name="realigner") realigner.inputs.slice_times = "asc_alt_2" realigner.inputs.tr = tr realigner.inputs.slice_info = 3 #3 for coronal slices (2 for horizontal, 1 for sagittal) workflow_connections.extend([ (f_swapdim, realigner, [('out_file', 'in_file')]), ]) elif realign == "time": realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer") realigner.inputs.time_repetition = tr workflow_connections.extend([ (f_swapdim, realigner, [('out_file', 'in_file')]), ]) f_antsintroduction = pe.Node(interface=antslegacy.antsIntroduction(), name='ants_introduction') f_antsintroduction.inputs.dimension = 3 f_antsintroduction.inputs.reference_image = template #will need updating to `1` f_antsintroduction.inputs.bias_field_correction = True f_antsintroduction.inputs.transformation_model = 'GR' f_antsintroduction.inputs.max_iterations = [8, 15, 8] f_warp = pe.Node(interface=ants.WarpTimeSeriesImageMultiTransform(), name='f_warp') f_warp.inputs.reference_image = template f_warp.inputs.dimension = 4 f_copysform2qform = pe.Node(interface=FSLOrient(), name='f_copysform2qform') f_copysform2qform.inputs.main_option = 'copysform2qform' warp_merge = pe.Node(util.Merge(2), name='warp_merge') workflow_connections.extend([ (f_bet, f_antsintroduction, [('out_file', 'input_image')]), (f_antsintroduction, warp_merge, [('warp_field', 'in1')]), (f_antsintroduction, warp_merge, [('affine_transformation', 'in2')]), (warp_merge, f_warp, [('out', 'transformation_series')]), (f_warp, f_copysform2qform, [('output_image', 'in_file')]), ]) if realign == "space": workflow_connections.extend([ (realigner, temporal_mean, [('realigned_files', 'in_file')]), (realigner, f_warp, [('realigned_files', 'input_image')]), ]) elif realign == "spacetime": workflow_connections.extend([ (realigner, temporal_mean, [('out_file', 'in_file')]), (realigner, f_warp, [('out_file', 'input_image')]), ]) elif realign == "time": workflow_connections.extend([ (realigner, temporal_mean, [('slice_time_corrected_file', 'in_file')]), (realigner, f_warp, [('slice_time_corrected_file', 'input_image') ]), ]) else: workflow_connections.extend([ (f_resize, temporal_mean, [('out_file', 'in_file')]), (f_swapdim, f_warp, [('out_file', 'input_image')]), ]) if functional_blur_xy: blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur") blur.inputs.fwhmxy = functional_blur_xy workflow_connections.extend([ (get_f_scan, blur, [('nii_name', 'out_file')]), (f_copysform2qform, blur, [('out_file', 'in_file')]), (blur, datasink, [('out_file', 'func')]), ]) else: f_rename = pe.Node(util.Rename(), name='f_rename') workflow_connections.extend([ (get_f_scan, f_rename, [('nii_name', 'format_string')]), (f_copysform2qform, f_rename, [('out_file', 'in_file')]), (f_rename, datasink, [('out_file', 'func')]), ]) workflow_config = { 'execution': { 'crashdump_dir': path.join(out_base, 'crashdump'), } } if debug: workflow_config['logging'] = { 'workflow_level': 'DEBUG', 'utils_level': 'DEBUG', 'interface_level': 'DEBUG', 'filemanip_level': 'DEBUG', 'log_to_file': 'true', } workdir_name = workflow_name + "_work" #this gives the name of the workdir, the output name is passed to the datasink workflow = pe.Workflow(name=workdir_name) workflow.connect(workflow_connections) workflow.base_dir = out_base workflow.config = workflow_config try: workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name, "graph.dot"), graph2use="hierarchical", format="png") except OSError: print( 'We could not write the DOT file for visualization (`dot` function from the graphviz package). This is non-critical to the processing, but you should get this fixed.' ) workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_jobs}) copy_bids_files(bids_base, os.path.join(out_base, workflow_name)) if not keep_work: workdir = path.join(workflow.base_dir, workdir_name) try: shutil.rmtree(workdir) except OSError as e: if str(e) == 'Cannot call rmtree on a symbolic link': print( 'Not deleting top level workdir (`{}`), as it is a symlink. Deleting only contents instead' .format(workdir)) for file_object in os.listdir(workdir): file_object_path = os.path.join(workdir, file_object) if os.path.isfile(file_object_path): os.unlink(file_object_path) else: shutil.rmtree(file_object_path) else: raise OSError(str(e))
def create_apply_ants_xfm(dimension, mapnode, name='apply_ants_xfm'): """ Takes in the results of the FSL-based functional-to-anatomical registration, and the results of the ANTS-based anatomical-to-template registration, and applies these transformations to register functional to template. The FSL-based functional-to-anatomical registration output transformations are first converted from FSL format to ITK format - this step can and should be separated into their own workflows in the future. NOTE: The dimension of the input image (3 or 4) must be specified in the function call, as well as whether or not the apply warp must be treated as a mapnode (0 - no, 1 - yes). Parameters ---------- name : string, optional Name of the workflow. Returns ------- apply_ants_xfm : nipype.pipeline.engine.Workflow Notes ----- Workflow Inputs:: inputspec.in_file : string (nifti file) File of functional brain data to be registered inputspec.warp_reference : string (nifti file) File of template to be used inputspec.use_nearest : boolean (True or False) Whether or not to use nearest neighbor interpolation inputspec.func_anat_affine : .mat file (affine matrix) Output matrix of FSL-based functional to anatomical registration inputspec.conversion_reference : string (nifti file) File of skull-stripped anatomical brain to be used in affine conversion inputspec.conversion_source : string (nifti file) Should match the input of the apply warp (in_file) unless you are applying the warp to a 4-d file, in which case this file should be a mean_functional file inputspec.nonlinear_field : string (nifti file) Output field file of the anatomical to template ANTS registration inputspec.ants_affine : text file Output matrix of the anatomical to template ANTS registration Workflow Outputs:: outputspec.out_file : string (nifti file) Normalizion of input functional file Registration Procedure: 1. Convert the FSL-based functional-to-anatomical output affine matrix into ANTS (ITK) format. 2. Collect this converted affine, and the ants_affine.txt and nonlinear field file from the anatomical-to-template ANTS registration into one transformation series string. 3. Apply the warp to the input file using WarpImageMultiTransform (for 3d files) or WarpTimeSeriesImageMultiTransform (for 4d files with timeseries). Workflow Graph: .. image:: :width: 500 Detailed Workflow Graph: .. image:: :width: 500 """ apply_ants_xfm = pe.Workflow(name=name) inputspec = pe.Node(util.IdentityInterface(fields=[ 'warp_reference', 'in_file', 'use_nearest', 'func_anat_affine', 'conversion_reference', 'conversion_source', 'nonlinear_field', 'ants_affine' ]), name='inputspec') outputspec = pe.Node(util.IdentityInterface(fields=['out_file']), name='outputspec') if dimension == 4: if mapnode == 0: # converts FSL-format .mat affine xfm into ANTS-format .txt # .mat affine comes from Func->Anat registration fsl_reg_2_itk = create_fsl_to_itk_conversion( mapnode, 'fsl_reg_2_itk') #collects series of transformations to be applied to the moving images collect_transforms = pe.Node(util.Merge(3), name='collect_transforms') # performs series of transformations on moving images warp_images = pe.Node( interface=ants.WarpTimeSeriesImageMultiTransform(), name='ants_apply_4d_warp') warp_images.inputs.dimension = 4 apply_ants_xfm.connect(inputspec, 'warp_reference', warp_images, 'reference_image') apply_ants_xfm.connect(inputspec, 'use_nearest', warp_images, 'use_nearest') elif mapnode == 1: # in this case with 4-d images (timeseries), fsl_reg_2_itk and collect_transforms are not # map nodes due to the fact that the affine conversion (fsl_reg_2_itk) cannot take in a # 4-d image as its conversion_source (ordinarily the input image and the conversion source # are the same image, however with timeseries, mean_functional should be used as the # conversion source and the 4-d image used as the input image to the apply warp. # (this is why in_file and conversion_source are separated into two inputs) # converts FSL-format .mat affine xfm into ANTS-format .txt # .mat affine comes from Func->Anat registration fsl_reg_2_itk = create_fsl_to_itk_conversion( mapnode, 'fsl_reg_2_itk') #collects series of transformations to be applied to the moving images collect_transforms = pe.Node(util.Merge(3), name='collect_transforms') # performs series of transformations on moving images warp_images = pe.MapNode( interface=ants.WarpTimeSeriesImageMultiTransform(), name='ants_apply_4d_warp', iterfield=['in_file']) warp_images.inputs.dimension = 4 apply_ants_xfm.connect(inputspec, 'warp_reference', warp_images, 'reference_image') apply_ants_xfm.connect(inputspec, 'use_nearest', warp_images, 'use_nearest') elif dimension == 3: if mapnode == 0: # converts FSL-format .mat affine xfm into ANTS-format .txt # .mat affine comes from Func->Anat registration fsl_reg_2_itk = create_fsl_to_itk_conversion( mapnode, 'fsl_reg_2_itk') #collects series of transformations to be applied to the moving images collect_transforms = pe.Node(util.Merge(3), name='collect_transforms') # performs series of transformations on moving images warp_images = pe.Node(interface=ants.WarpImageMultiTransform(), name='apply_ants_3d_warp') warp_images.inputs.dimension = 3 apply_ants_xfm.connect(inputspec, 'warp_reference', warp_images, 'reference_image') apply_ants_xfm.connect(inputspec, 'use_nearest', warp_images, 'use_nearest') elif mapnode == 1: # converts FSL-format .mat affine xfm into ANTS-format .txt # .mat affine comes from Func->Anat registration fsl_reg_2_itk = create_fsl_to_itk_conversion( mapnode, 'fsl_reg_2_itk') #collects series of transformations to be applied to the moving images collect_transforms = pe.MapNode(util.Merge(3), name='collect_transforms', iterfield=['in3']) # performs series of transformations on moving images warp_images = pe.MapNode( interface=ants.WarpImageMultiTransform(), name='apply_ants_3d_warp', iterfield=['input_image', 'transformation_series']) warp_images.inputs.dimension = 3 apply_ants_xfm.connect(inputspec, 'warp_reference', warp_images, 'reference_image') apply_ants_xfm.connect(inputspec, 'use_nearest', warp_images, 'use_nearest') # convert the .mat from linear Func->Anat to ANTS format apply_ants_xfm.connect(inputspec, 'func_anat_affine', fsl_reg_2_itk, 'inputspec.transform_file') apply_ants_xfm.connect(inputspec, 'conversion_reference', fsl_reg_2_itk, 'inputspec.reference_file') apply_ants_xfm.connect(inputspec, 'conversion_source', fsl_reg_2_itk, 'inputspec.source_file') # Premat from Func->Anat linear reg and bbreg (if bbreg is enabled) apply_ants_xfm.connect(fsl_reg_2_itk, 'outputspec.itk_transform', collect_transforms, 'in3') # Field file from anatomical nonlinear registration apply_ants_xfm.connect(inputspec, 'nonlinear_field', collect_transforms, 'in1') # affine transformation from anatomical registration apply_ants_xfm.connect(inputspec, 'ants_affine', collect_transforms, 'in2') apply_ants_xfm.connect(inputspec, 'in_file', warp_images, 'input_image') apply_ants_xfm.connect(collect_transforms, 'out', warp_images, 'transformation_series') apply_ants_xfm.connect(warp_images, 'output_image', outputspec, 'out_file') return apply_ants_xfm
def get_post_struct_norm_workflow(name='normalize_post_struct'): """ Base post-structural workflow for normalization Parameters ---------- name : name of workflow. Default = 'normalize_post_struct' Inputs ------ inputspec.template_file : inputspec.unwarped_brain : inputspec.warp_field : inputspec.affine_transformation : inputspec.out_fsl_file : inputspec.moving_image : inputspec.mean_func : inputspec.use_nearest: Outputs ------- outputspec.warped_image : Returns ------- workflow : post-structural normalization workflow """ #inputs to workflow import nipype.interfaces.freesurfer as fs import nipype.interfaces.ants as ants import nipype.pipeline.engine as pe import nipype.interfaces.utility as util inputspec = pe.Node(util.IdentityInterface(fields=[ 'template_file', 'unwarped_brain', 'warp_field', 'affine_transformation', 'out_fsl_file', 'moving_image', 'mean_func', "use_nearest" ]), name='inputspec') #makes fsl-style coregistration ANTS compatible fsl_reg_2_itk = pe.Node(c3.C3dAffineTool(fsl2ras=True), name='fsl_reg_2_itk') #collects series of transformations to be applied to the moving images collect_transforms = pe.Node(util.Merge(3), name='collect_transforms') #performs series of transformations on moving images warp_images = pe.MapNode(ants.WarpTimeSeriesImageMultiTransform(), name='warp_images', iterfield=['input_image', 'dimension']) #collects workflow outputs outputspec = pe.Node(util.IdentityInterface(fields=['warped_image']), name='outputspec') #initializes and connects workflow nodes normalize_post_struct = pe.Workflow(name=name) normalize_post_struct.connect([ (inputspec, fsl_reg_2_itk, [('unwarped_brain', 'reference_file')]), (inputspec, fsl_reg_2_itk, [('out_fsl_file', 'transform_file')]), (inputspec, fsl_reg_2_itk, [('mean_func', 'source_file')]), (fsl_reg_2_itk, collect_transforms, [('itk_transform', 'in3')]), (inputspec, collect_transforms, [('warp_field', 'in1'), ('affine_transformation', 'in2')]), (inputspec, warp_images, [('moving_image', 'input_image')]), (inputspec, warp_images, [(('moving_image', get_image_dimensions), 'dimension')]), (inputspec, warp_images, [('template_file', 'reference_image'), ('use_nearest', 'use_nearest')]), (collect_transforms, warp_images, [('out', 'transformation_series')]), (warp_images, outputspec, [('output_image', 'warped_image')]) ]) return normalize_post_struct
def legacy( bids_base, template, autorotate=False, debug=False, functional_blur_xy=False, functional_match={}, keep_work=False, negative_contrast_agent=False, n_procs=N_PROCS, out_base=None, realign="time", registration_mask=False, sessions=[], structural_match={}, subjects=[], tr=1, workflow_name='legacy', ): ''' Legacy realignment and registration workflow representative of the tweaks and workarounds commonly used in the pre-SAMRI period. Parameters ---------- bids_base : str Path to the BIDS data set root. template : str Path to the template to register the data to. autorotate : bool, optional Whether to use a multi-rotation-state transformation start. This allows the registration to commence with the best rotational fit, and may help if the orientation of the data is malformed with respect to the header. debug : bool, optional Whether to enable nipype debug mode. This increases logging. functional_blur_xy : float, optional Factor by which to smooth data in the xy-plane; if parameter evaluates to false, no smoothing will be applied. Ideally this value should correspond to the resolution or smoothness in the z-direction (assuing z represents the lower-resolution slice-encoding direction). functional_match : dict, optional Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered. The dictionary should have keys which are 'acquisition', 'task', or 'modality', and values which are lists of acceptable strings for the respective BIDS field. keep_work : bool, str Whether to keep the work directory after workflow conclusion (this directory contains all the intermediary processing commands, inputs, and outputs --- it is invaluable for debugging but many times larger in size than the actual output). negative_contrast_agent : bool, optional Whether the scan was acquired witn a negative contrast agent given the imaging modality; if true the values will be inverted with respect to zero. This is commonly used for iron nano-particle Cerebral Blood Volume (CBV) measurements. n_procs : int, optional Number of processors to maximally use for the workflow; if unspecified a best guess will be estimate based on hardware (but not on current load). out_base : str, optional Output base directory --- inside which a directory named `workflow_name` (as well as associated directories) will be created. realign : {"space","time","spacetime",""}, optional Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution! registration_mask : str, optional Mask to use for the registration process. This mask will constrain the area for similarity metric evaluation, but the data will not be cropped. sessions : list, optional A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered. structural_match : dict, optional Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered. The dictionary should have keys which are 'acquisition', or 'modality', and values which are lists of acceptable strings for the respective BIDS field. subjects : list, optional A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered. tr : float, optional Repetition time, explicitly. WARNING! This is a parameter waiting for deprecation. workflow_name : str, optional Top level name for the output directory. ''' bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind = common_select( bids_base, out_base, workflow_name, template, registration_mask, functional_match, structural_match, subjects, sessions, ) get_f_scan = pe.Node(name='get_f_scan', interface=util.Function( function=get_bids_scan, input_names=inspect.getargspec(get_bids_scan)[0], output_names=[ 'scan_path', 'scan_type', 'task', 'nii_path', 'nii_name', 'file_name', 'events_name', 'subject_session' ])) get_f_scan.inputs.ignore_exception = True get_f_scan.inputs.data_selection = data_selection get_f_scan.inputs.bids_base = bids_base get_f_scan.iterables = ("ind_type", func_ind) dummy_scans = pe.Node( name='dummy_scans', interface=util.Function( function=force_dummy_scans, input_names=inspect.getargspec(force_dummy_scans)[0], output_names=['out_file', 'deleted_scans'])) dummy_scans.inputs.desired_dummy_scans = DUMMY_SCANS events_file = pe.Node( name='events_file', interface=util.Function( function=write_bids_events_file, input_names=inspect.getargspec(write_bids_events_file)[0], output_names=['out_file'])) temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean") f_resize = pe.Node(interface=VoxelResize(), name="f_resize") f_resize.inputs.resize_factors = [10, 10, 10] f_percentile = pe.Node(interface=fsl.ImageStats(), name="f_percentile") f_percentile.inputs.op_string = '-p 98' f_threshold = pe.Node(interface=fsl.Threshold(), name="f_threshold") f_fast = pe.Node(interface=fsl.FAST(), name="f_fast") f_fast.inputs.no_pve = True f_fast.inputs.output_biascorrected = True f_bet = pe.Node(interface=fsl.BET(), name="f_BET") f_swapdim = pe.Node(interface=fsl.SwapDimensions(), name="f_swapdim") f_swapdim.inputs.new_dims = ('x', '-z', '-y') f_deleteorient = pe.Node(interface=FSLOrient(), name="f_deleteorient") f_deleteorient.inputs.main_option = 'deleteorient' datasink = pe.Node(nio.DataSink(), name='datasink') datasink.inputs.base_directory = out_dir datasink.inputs.parameterization = False workflow_connections = [ (get_f_scan, dummy_scans, [('nii_path', 'in_file')]), (get_f_scan, dummy_scans, [('scan_path', 'scan_dir')]), (dummy_scans, events_file, [('deleted_scans', 'forced_dummy_scans')]), (dummy_scans, f_resize, [('out_file', 'in_file')]), (get_f_scan, events_file, [('nii_path', 'timecourse_file'), ('task', 'task'), ('scan_path', 'scan_dir')]), (events_file, datasink, [('out_file', 'func.@events')]), (get_f_scan, events_file, [('events_name', 'out_file')]), (get_f_scan, datasink, [(('subject_session', ss_to_path), 'container') ]), (temporal_mean, f_percentile, [('out_file', 'in_file')]), # here we divide by 10 assuming 10 percent noise (f_percentile, f_threshold, [(('out_stat', divideby_10), 'thresh')]), (temporal_mean, f_threshold, [('out_file', 'in_file')]), (f_threshold, f_fast, [('out_file', 'in_files')]), (f_fast, f_bet, [('restored_image', 'in_file')]), (f_resize, f_deleteorient, [('out_file', 'in_file')]), (f_deleteorient, f_swapdim, [('out_file', 'in_file')]), ] if realign == "space": realigner = pe.Node(interface=spm.Realign(), name="realigner") realigner.inputs.register_to_mean = True workflow_connections.extend([ (f_swapdim, realigner, [('out_file', 'in_file')]), ]) elif realign == "spacetime": realigner = pe.Node(interface=nipy.SpaceTimeRealigner(), name="realigner") realigner.inputs.slice_times = "asc_alt_2" realigner.inputs.tr = tr realigner.inputs.slice_info = 3 #3 for coronal slices (2 for horizontal, 1 for sagittal) workflow_connections.extend([ (f_swapdim, realigner, [('out_file', 'in_file')]), ]) elif realign == "time": realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer") realigner.inputs.time_repetition = tr workflow_connections.extend([ (f_swapdim, realigner, [('out_file', 'in_file')]), ]) #if structural_scan_types.any(): # get_s_scan = pe.Node(name='get_s_scan', interface=util.Function(function=get_bids_scan, input_names=inspect.getargspec(get_bids_scan)[0], output_names=['scan_path','scan_type','task', 'nii_path', 'nii_name', 'file_name', 'events_name', 'subject_session'])) # get_s_scan.inputs.ignore_exception = True # get_s_scan.inputs.data_selection = data_selection # get_s_scan.inputs.bids_base = bids_base # s_cutoff = pe.Node(interface=fsl.ImageMaths(), name="s_cutoff") # s_cutoff.inputs.op_string = "-thrP 20 -uthrp 98" # s_resize = pe.Node(interface=VoxelResize(), name="s_resize") # s_BET = pe.Node(interface=fsl.BET(), name="s_BET") # s_BET.inputs.mask = True # s_BET.inputs.frac = 0.3 # s_BET.inputs.robust = True # ants_introduction = pe.Node(interface=legacy.antsIntroduction(), name='ants_introduction') # ants_introduction.inputs.dimension = 3 # ants_introduction.inputs.reference_image = template # #will need updating to `1` # ants_introduction.inputs.bias_field_correction = True # ants_introduction.inputs.transformation_model = 'GR' # ants_introduction.inputs.max_iterations = [8,15,8] # s_mask = pe.Node(interface=fsl.ApplyMask(), name="s_mask") # s_register, s_warp, f_warp = structural_registration(template) # workflow_connections.extend([ # (get_s_scan, s_reg_biascorrect, [('nii_path', 'input_image')]), # (s_reg_biascorrect, s_cutoff, [('output_image', 'in_file')]), # (s_cutoff, s_BET, [('out_file', 'in_file')]), # (s_biascorrect, s_mask, [('output_image', 'in_file')]), # (s_BET, s_mask, [('mask_file', 'mask_file')]), # ]) # #TODO: incl. in func registration # if autorotate: # workflow_connections.extend([ # (s_mask, s_rotated, [('out_file', 'out_file')]), # (s_rotated, s_register, [('out_file', 'moving_image')]), # ]) # else: # workflow_connections.extend([ # (s_mask, s_register, [('out_file', 'moving_image')]), # (s_register, s_warp, [('composite_transform', 'transforms')]), # (get_s_scan, s_warp, [('nii_path', 'input_image')]), # (s_warp, datasink, [('output_image', 'anat')]), # ]) # if autorotate: # s_rotated = autorotate(template) # workflow_connections.extend([ # (get_f_scan, get_s_scan, [('subject_session', 'selector')]), # (get_s_scan, s_warp, [('nii_name','output_image')]), # (get_s_scan, s_biascorrect, [('nii_path', 'input_image')]), # ]) f_antsintroduction = pe.Node(interface=antslegacy.antsIntroduction(), name='ants_introduction') f_antsintroduction.inputs.dimension = 3 f_antsintroduction.inputs.reference_image = template #will need updating to `1` f_antsintroduction.inputs.bias_field_correction = True f_antsintroduction.inputs.transformation_model = 'GR' f_antsintroduction.inputs.max_iterations = [8, 15, 8] f_warp = pe.Node(interface=ants.WarpTimeSeriesImageMultiTransform(), name='f_warp') f_warp.inputs.reference_image = template f_warp.inputs.dimension = 4 f_copysform2qform = pe.Node(interface=FSLOrient(), name='f_copysform2qform') f_copysform2qform.inputs.main_option = 'copysform2qform' warp_merge = pe.Node(util.Merge(2), name='warp_merge') workflow_connections.extend([ (f_bet, f_antsintroduction, [('out_file', 'input_image')]), (f_antsintroduction, warp_merge, [('warp_field', 'in1')]), (f_antsintroduction, warp_merge, [('affine_transformation', 'in2')]), (warp_merge, f_warp, [('out', 'transformation_series')]), (f_warp, f_copysform2qform, [('output_image', 'in_file')]), ]) if realign == "space": workflow_connections.extend([ (realigner, temporal_mean, [('realigned_files', 'in_file')]), (realigner, f_warp, [('realigned_files', 'input_image')]), ]) elif realign == "spacetime": workflow_connections.extend([ (realigner, temporal_mean, [('out_file', 'in_file')]), (realigner, f_warp, [('out_file', 'input_image')]), ]) elif realign == "time": workflow_connections.extend([ (realigner, temporal_mean, [('slice_time_corrected_file', 'in_file')]), (realigner, f_warp, [('slice_time_corrected_file', 'input_image') ]), ]) else: workflow_connections.extend([ (f_resize, temporal_mean, [('out_file', 'in_file')]), (f_swapdim, f_warp, [('out_file', 'input_image')]), ]) invert = pe.Node(interface=fsl.ImageMaths(), name="invert") blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur") blur.inputs.fwhmxy = functional_blur_xy if functional_blur_xy and negative_contrast_agent: workflow_connections.extend([ (f_copysform2qform, blur, [('out_file', 'in_file')]), (blur, invert, [(('out_file', fslmaths_invert_values), 'op_string') ]), (blur, invert, [('out_file', 'in_file')]), (get_f_scan, invert, [('nii_name', 'output_image')]), (invert, datasink, [('out_file', 'func')]), ]) elif functional_blur_xy: workflow_connections.extend([ (get_f_scan, blur, [('nii_name', 'output_image')]), (f_copysform2qform, blur, [('out_file', 'in_file')]), (blur, datasink, [('out_file', 'func')]), ]) elif negative_contrast_agent: workflow_connections.extend([ (get_f_scan, invert, [('nii_name', 'out_file')]), (f_copysform2qform, invert, [(('out_file', fslmaths_invert_values), 'op_string')]), (f_copysform2qform, invert, [('out_file', 'in_file')]), (invert, datasink, [('out_file', 'func')]), ]) else: f_rename = pe.Node(util.Rename(), name='f_rename') workflow_connections.extend([ (get_f_scan, f_rename, [('nii_name', 'format_string')]), (f_copysform2qform, f_rename, [('out_file', 'in_file')]), (f_rename, datasink, [('out_file', 'func')]), ]) workflow_config = { 'execution': { 'crashdump_dir': path.join(bids_base, 'preprocessing/crashdump'), } } if debug: workflow_config['logging'] = { 'workflow_level': 'DEBUG', 'utils_level': 'DEBUG', 'interface_level': 'DEBUG', 'filemanip_level': 'DEBUG', 'log_to_file': 'true', } workdir_name = workflow_name + "_work" #this gives the name of the workdir, the output name is passed to the datasink workflow = pe.Workflow(name=workdir_name) workflow.connect(workflow_connections) workflow.base_dir = out_base workflow.config = workflow_config workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name, "graph.dot"), graph2use="hierarchical", format="png") workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs}) if not keep_work: workdir = path.join(workflow.base_dir, workdir_name) try: shutil.rmtree(workdir) except OSError as e: if str(e) == 'Cannot call rmtree on a symbolic link': print( 'Not deleting top level workdir (`{}`), as it is a symlink. Deleting only contents instead' .format(workdir)) for file_object in os.listdir(workdir): file_object_path = os.path.join(workdir, file_object) if os.path.isfile(file_object_path): os.unlink(file_object_path) else: shutil.rmtree(file_object_path) else: raise OSError(str(e))
preproc.connect(fsl2ras_downsample_atlas_from_struct_to_func, 'itk_transform', cmb_tfm_func_to_ds_atlas, 'in1') # from structural to atlas (with struct resolution) preproc.connect(structs_to_atlas_coreg_output, ('forward_transforms', get_sec), cmb_tfm_func_to_ds_atlas, 'in2') preproc.connect(structs_to_atlas_coreg_output, ('forward_transforms', get_first), cmb_tfm_func_to_ds_atlas, 'in3') # transform from functional to structural #preproc.connect(ants_mbRef_to_t2_output, ('forward_transforms', get_sec), cmb_tfm_func_to_ds_atlas, 'in4') preproc.connect(sep_tfms, 'out_list', cmb_tfm_func_to_ds_atlas, 'in4') # apply transformation from mbRef all the way to caltech atlas (in func resolution) mbRef_to_ds_atlas = pe.MapNode(interface=ants.WarpImageMultiTransform(use_nearest = True), name = 'mbRef_to_ds_atlas', iterfield=['input_image', 'transformation_series']) preproc.connect(mask_mbRef, 'out_file', mbRef_to_ds_atlas, 'input_image') preproc.connect(downsample_atlas_t2_to_func, 'out_file', mbRef_to_ds_atlas, 'reference_image') preproc.connect(cmb_tfm_func_to_ds_atlas, 'out', mbRef_to_ds_atlas, 'transformation_series') # apply transformation to 4D func func_to_ds_atlas = pe.MapNode(interface=ants.WarpTimeSeriesImageMultiTransform(use_nearest = True), name = 'func_to_ds_atlas', iterfield=['transformation_series','input_image']) preproc.connect(unwarp_func, 'unwarped_file', func_to_ds_atlas, 'input_image') preproc.connect(downsample_atlas_t2_to_func, 'out_file', func_to_ds_atlas, 'reference_image') preproc.connect(cmb_tfm_func_to_ds_atlas, 'out', func_to_ds_atlas, 'transformation_series') # combine transformations (t2 to atlas) cmb_tfm_t2_to_ds_atlas = pe.Node(interface=util.Merge(3, axis='hstack'), name='cmb_tfm_t2_to_ds_atlas') # from structural to atlas (with struct resolution) preproc.connect(structs_to_atlas_coreg_output, ('forward_transforms', get_sec), cmb_tfm_t2_to_ds_atlas, 'in1') preproc.connect(structs_to_atlas_coreg_output, ('forward_transforms', get_first), cmb_tfm_t2_to_ds_atlas, 'in2') # from atlas in struct resolution to t2 resolution preproc.connect(fsl2ras_downsample_atlas_from_struct_to_func, 'itk_transform', cmb_tfm_t2_to_ds_atlas, 'in3') # apply transformation from t2 all the way to caltech atlas (in func resolution) t2_to_ds_atlas = pe.Node(interface=ants.WarpImageMultiTransform(use_nearest = True), name = 't2_to_ds_atlas') preproc.connect(maskT2, 'out_file', t2_to_ds_atlas, 'input_image')