def test_UnaryMaths_outputs(): output_map = dict(out_file=dict(), ) outputs = UnaryMaths.output_spec() for key, metadata in output_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(outputs.traits()[key], metakey), value
def test_UnaryMaths_inputs(): input_map = dict( args=dict(argstr='%s', ), environ=dict( nohash=True, usedefault=True, ), ignore_exception=dict( nohash=True, usedefault=True, ), in_file=dict( argstr='%s', mandatory=True, position=2, ), internal_datatype=dict( argstr='-dt %s', position=1, ), nan2zeros=dict( argstr='-nan', position=3, ), operation=dict( argstr='-%s', mandatory=True, position=4, ), out_file=dict( argstr='%s', genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr='-odt %s', position=-1, ), output_type=dict(), terminal_output=dict(nohash=True, ), ) inputs = UnaryMaths.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_UnaryMaths_inputs(): input_map = dict(args=dict(argstr='%s', ), environ=dict(nohash=True, usedefault=True, ), ignore_exception=dict(nohash=True, usedefault=True, ), in_file=dict(argstr='%s', mandatory=True, position=2, ), internal_datatype=dict(argstr='-dt %s', position=1, ), nan2zeros=dict(argstr='-nan', position=3, ), operation=dict(argstr='-%s', mandatory=True, position=4, ), out_file=dict(argstr='%s', genfile=True, hash_files=False, position=-2, ), output_datatype=dict(argstr='-odt %s', position=-1, ), output_type=dict(), terminal_output=dict(mandatory=True, nohash=True, ), ) inputs = UnaryMaths.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def Couple_Preproc_Pipeline(base_dir=None, output_dir=None, subject_id=None, spm_path=None): """ Create a preprocessing workflow for the Couples Conflict Study using nipype Args: base_dir: path to data folder where raw subject folder is located output_dir: path to where key output files should be saved subject_id: subject_id (str) spm_path: path to spm folder Returns: workflow: a nipype workflow that can be run """ from nipype.interfaces.dcm2nii import Dcm2nii from nipype.interfaces.fsl import Merge, TOPUP, ApplyTOPUP import nipype.interfaces.io as nio import nipype.interfaces.utility as util from nipype.interfaces.utility import Merge as Merge_List from nipype.pipeline.engine import Node, Workflow from nipype.interfaces.fsl.maths import UnaryMaths from nipype.interfaces.nipy.preprocess import Trim from nipype.algorithms.rapidart import ArtifactDetect from nipype.interfaces import spm from nipype.interfaces.spm import Normalize12 from nipype.algorithms.misc import Gunzip from nipype.interfaces.nipy.preprocess import ComputeMask import nipype.interfaces.matlab as mlab from nltools.utils import get_resource_path, get_vox_dims, get_n_volumes from nltools.interfaces import Plot_Coregistration_Montage, PlotRealignmentParameters, Create_Covariates import os import glob ######################################## ## Setup Paths and Nodes ######################################## # Specify Paths canonical_file = os.path.join(spm_path, 'canonical', 'single_subj_T1.nii') template_file = os.path.join(spm_path, 'tpm', 'TPM.nii') # Set the way matlab should be called mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") mlab.MatlabCommand.set_default_paths(spm_path) # Get File Names for different types of scans. Parse into separate processing streams datasource = Node(interface=nio.DataGrabber( infields=['subject_id'], outfields=['struct', 'ap', 'pa']), name='datasource') datasource.inputs.base_directory = base_dir datasource.inputs.template = '*' datasource.inputs.field_template = { 'struct': '%s/Study*/t1w_32ch_mpr_08mm*', 'ap': '%s/Study*/distortion_corr_32ch_ap*', 'pa': '%s/Study*/distortion_corr_32ch_pa*' } datasource.inputs.template_args = { 'struct': [['subject_id']], 'ap': [['subject_id']], 'pa': [['subject_id']] } datasource.inputs.subject_id = subject_id datasource.inputs.sort_filelist = True # iterate over functional scans to define paths scan_file_list = glob.glob( os.path.join(base_dir, subject_id, 'Study*', '*')) func_list = [s for s in scan_file_list if "romcon_ap_32ch_mb8" in s] func_list = [s for s in func_list if "SBRef" not in s] # Exclude sbref for now. func_source = Node(interface=util.IdentityInterface(fields=['scan']), name="func_source") func_source.iterables = ('scan', func_list) # Create Separate Converter Nodes for each different type of file. (dist corr scans need to be done before functional) ap_dcm2nii = Node(interface=Dcm2nii(), name='ap_dcm2nii') ap_dcm2nii.inputs.gzip_output = True ap_dcm2nii.inputs.output_dir = '.' ap_dcm2nii.inputs.date_in_filename = False pa_dcm2nii = Node(interface=Dcm2nii(), name='pa_dcm2nii') pa_dcm2nii.inputs.gzip_output = True pa_dcm2nii.inputs.output_dir = '.' pa_dcm2nii.inputs.date_in_filename = False f_dcm2nii = Node(interface=Dcm2nii(), name='f_dcm2nii') f_dcm2nii.inputs.gzip_output = True f_dcm2nii.inputs.output_dir = '.' f_dcm2nii.inputs.date_in_filename = False s_dcm2nii = Node(interface=Dcm2nii(), name='s_dcm2nii') s_dcm2nii.inputs.gzip_output = True s_dcm2nii.inputs.output_dir = '.' s_dcm2nii.inputs.date_in_filename = False ######################################## ## Setup Nodes for distortion correction ######################################## # merge output files into list merge_to_file_list = Node(interface=Merge_List(2), infields=['in1', 'in2'], name='merge_to_file_list') # fsl merge AP + PA files (depends on direction) merger = Node(interface=Merge(dimension='t'), name='merger') merger.inputs.output_type = 'NIFTI_GZ' # use topup to create distortion correction map topup = Node(interface=TOPUP(), name='topup') topup.inputs.encoding_file = os.path.join(get_resource_path(), 'epi_params_APPA_MB8.txt') topup.inputs.output_type = "NIFTI_GZ" topup.inputs.config = 'b02b0.cnf' # apply topup to all functional images apply_topup = Node(interface=ApplyTOPUP(), name='apply_topup') apply_topup.inputs.in_index = [1] apply_topup.inputs.encoding_file = os.path.join(get_resource_path(), 'epi_params_APPA_MB8.txt') apply_topup.inputs.output_type = "NIFTI_GZ" apply_topup.inputs.method = 'jac' apply_topup.inputs.interp = 'spline' # Clear out Zeros from spline interpolation using absolute value. abs_maths = Node(interface=UnaryMaths(), name='abs_maths') abs_maths.inputs.operation = 'abs' ######################################## ## Preprocessing ######################################## # Trim - remove first 10 TRs n_vols = 10 trim = Node(interface=Trim(), name='trim') trim.inputs.begin_index = n_vols #Realignment - 6 parameters - realign to first image of very first series. realign = Node(interface=spm.Realign(), name="realign") realign.inputs.register_to_mean = True #Coregister - 12 parameters coregister = Node(interface=spm.Coregister(), name="coregister") coregister.inputs.jobtype = 'estwrite' #Plot Realignment plot_realign = Node(interface=PlotRealignmentParameters(), name="plot_realign") #Artifact Detection art = Node(interface=ArtifactDetect(), name="art") art.inputs.use_differences = [True, False] art.inputs.use_norm = True art.inputs.norm_threshold = 1 art.inputs.zintensity_threshold = 3 art.inputs.mask_type = 'file' art.inputs.parameter_source = 'SPM' # Gunzip - unzip the functional and structural images gunzip_struc = Node(Gunzip(), name="gunzip_struc") gunzip_func = Node(Gunzip(), name="gunzip_func") # Normalize - normalizes functional and structural images to the MNI template normalize = Node(interface=Normalize12(jobtype='estwrite', tpm=template_file), name="normalize") #Plot normalization Check plot_normalization_check = Node(interface=Plot_Coregistration_Montage(), name="plot_normalization_check") plot_normalization_check.inputs.canonical_img = canonical_file #Create Mask compute_mask = Node(interface=ComputeMask(), name="compute_mask") #remove lower 5% of histogram of mean image compute_mask.inputs.m = .05 #Smooth #implicit masking (.im) = 0, dtype = 0 smooth = Node(interface=spm.Smooth(), name="smooth") smooth.inputs.fwhm = 6 #Create Covariate matrix make_cov = Node(interface=Create_Covariates(), name="make_cov") # Create a datasink to clean up output files datasink = Node(interface=nio.DataSink(), name='datasink') datasink.inputs.base_directory = output_dir datasink.inputs.container = subject_id ######################################## # Create Workflow ######################################## workflow = Workflow(name='Preprocessed') workflow.base_dir = os.path.join(base_dir, subject_id) workflow.connect([ (datasource, ap_dcm2nii, [('ap', 'source_dir')]), (datasource, pa_dcm2nii, [('pa', 'source_dir')]), (datasource, s_dcm2nii, [('struct', 'source_dir')]), (func_source, f_dcm2nii, [('scan', 'source_dir')]), (ap_dcm2nii, merge_to_file_list, [('converted_files', 'in1')]), (pa_dcm2nii, merge_to_file_list, [('converted_files', 'in2')]), (merge_to_file_list, merger, [('out', 'in_files')]), (merger, topup, [('merged_file', 'in_file')]), (topup, apply_topup, [('out_fieldcoef', 'in_topup_fieldcoef'), ('out_movpar', 'in_topup_movpar')]), (f_dcm2nii, trim, [('converted_files', 'in_file')]), (trim, apply_topup, [('out_file', 'in_files')]), (apply_topup, abs_maths, [('out_corrected', 'in_file')]), (abs_maths, gunzip_func, [('out_file', 'in_file')]), (gunzip_func, realign, [('out_file', 'in_files')]), (s_dcm2nii, gunzip_struc, [('converted_files', 'in_file')]), (gunzip_struc, coregister, [('out_file', 'source')]), (coregister, normalize, [('coregistered_source', 'image_to_align')]), (realign, coregister, [('mean_image', 'target'), ('realigned_files', 'apply_to_files')]), (realign, normalize, [(('mean_image', get_vox_dims), 'write_voxel_sizes')]), (coregister, normalize, [('coregistered_files', 'apply_to_files')]), (normalize, smooth, [('normalized_files', 'in_files')]), (realign, compute_mask, [('mean_image', 'mean_volume')]), (compute_mask, art, [('brain_mask', 'mask_file')]), (realign, art, [('realignment_parameters', 'realignment_parameters'), ('realigned_files', 'realigned_files')]), (realign, plot_realign, [('realignment_parameters', 'realignment_parameters')]), (normalize, plot_normalization_check, [('normalized_files', 'wra_img') ]), (realign, make_cov, [('realignment_parameters', 'realignment_parameters')]), (art, make_cov, [('outlier_files', 'spike_id')]), (normalize, datasink, [('normalized_files', 'structural.@normalize')]), (coregister, datasink, [('coregistered_source', 'structural.@struct') ]), (topup, datasink, [('out_fieldcoef', 'distortion.@fieldcoef')]), (topup, datasink, [('out_movpar', 'distortion.@movpar')]), (smooth, datasink, [('smoothed_files', 'functional.@smooth')]), (plot_realign, datasink, [('plot', 'functional.@plot_realign')]), (plot_normalization_check, datasink, [('plot', 'functional.@plot_normalization')]), (make_cov, datasink, [('covariates', 'functional.@covariates')]) ]) return workflow
def segmentation_pipeline(self, **kwargs): # @UnusedVariable @IgnorePep8 pipeline = self.create_pipeline( name='ute1_segmentation', inputs=[DatasetSpec('ute1_registered', nifti_format)], outputs=[ DatasetSpec('air_mask', nifti_gz_format), DatasetSpec('bones_mask', nifti_gz_format) ], desc="Segmentation of the first echo UTE image", version=1, citations=(spm_cite, matlab_cite), **kwargs) segmentation = pipeline.create_node( NewSegment(), name='ute1_registered_segmentation', requirements=[matlab2015_req, spm12_req], wall_time=480) pipeline.connect_input('ute1_registered', segmentation, 'channel_files') segmentation.inputs.affine_regularization = 'none' tissue1 = ((self.tpm_path, 1), 1, (True, False), (False, False)) tissue2 = ((self.tpm_path, 2), 1, (True, False), (False, False)) tissue3 = ((self.tpm_path, 3), 2, (True, False), (False, False)) tissue4 = ((self.tpm_path, 4), 3, (True, False), (False, False)) tissue5 = ((self.tpm_path, 5), 4, (True, False), (False, False)) tissue6 = ((self.tpm_path, 6), 3, (True, False), (False, False)) segmentation.inputs.tissues = [ tissue1, tissue2, tissue3, tissue4, tissue5, tissue6 ] select_bones_pm = pipeline.create_node( Select(), name='select_bones_pm_from_SPM_new_segmentation', requirements=[], wall_time=5) pipeline.connect(segmentation, 'native_class_images', select_bones_pm, 'inlist') select_bones_pm.inputs.index = 3 select_air_pm = pipeline.create_node( Select(), name='select_air_pm_from_SPM_new_segmentation', requirements=[], wall_time=5) pipeline.connect(segmentation, 'native_class_images', select_air_pm, 'inlist') select_air_pm.inputs.index = 5 threshold_bones = pipeline.create_node( Threshold(), name='bones_probabilistic_map_thresholding', requirements=[fsl5_req], wall_time=5) pipeline.connect(select_bones_pm, 'out', threshold_bones, 'in_file') threshold_bones.inputs.output_type = "NIFTI_GZ" threshold_bones.inputs.direction = 'below' threshold_bones.inputs.thresh = 0.2 binarize_bones = pipeline.create_node( UnaryMaths(), name='bones_probabilistic_map_binarization', requirements=[fsl5_req], wall_time=5) pipeline.connect(threshold_bones, 'out_file', binarize_bones, 'in_file') binarize_bones.inputs.output_type = "NIFTI_GZ" binarize_bones.inputs.operation = 'bin' threshold_air = pipeline.create_node( Threshold(), name='air_probabilistic_maps_thresholding', requirements=[fsl5_req], wall_time=5) pipeline.connect(select_air_pm, 'out', threshold_air, 'in_file') threshold_air.inputs.output_type = "NIFTI_GZ" threshold_air.inputs.direction = 'below' threshold_air.inputs.thresh = 0.1 binarize_air = pipeline.create_node( UnaryMaths(), name='air_probabilistic_map_binarization', requirements=[fsl5_req], wall_time=5) pipeline.connect(threshold_air, 'out_file', binarize_air, 'in_file') binarize_air.inputs.output_type = "NIFTI_GZ" binarize_air.inputs.operation = 'bin' pipeline.connect_output('bones_mask', binarize_bones, 'out_file') pipeline.connect_output('air_mask', binarize_air, 'out_file') pipeline.assert_connected() return pipeline
#Extract and Return Values first_tissue = c1 string_list = [c2, c3] return (first_tissue, string_list) pre_merge = Node(Function(input_names=['c1', 'c2', 'c3'], output_names=['first_tissue', 'string_list'], function=extract_tissue_c123), name='Pre_Merge_Tissues') merge_tissues = Node(MultiImageMaths(), name="Merge_C1_C2_C3") merge_tissues.inputs.op_string = "-add %s -add %s -thr 0.05 -bin" fill_mask = Node(UnaryMaths(), name="FillHoles_Mask") fill_mask.inputs.operation = "fillh" apply_mask_t1 = Node(ApplyMask(), name="ApplyMask_T1") apply_mask_flair = Node(ApplyMask(), name="ApplyMask_FLAIR") apply_mask_swi = Node(ApplyMask(), name="ApplyMask_SWI") apply_mask_bct1 = Node(ApplyMask(), name="ApplyMask_BiasCorrect_T1") ###SNR #Tissue 1-3 mask construction and HeadMask construction. con_tissue_mask_1 = Node(Threshold(), name="Tissue1_Mask") con_tissue_mask_1.inputs.thresh = 0.1 con_tissue_mask_1.inputs.args = "-bin" con_tissue_mask_2 = Node(Threshold(), name="Tissue2_Mask") con_tissue_mask_2.inputs.thresh = 0.1 con_tissue_mask_2.inputs.args = "-bin"