class config(baseconfig): highpass_freq = traits.Float() lowpass_freq = traits.Float() filtering_algorithm = traits.Enum("fsl", "IIR", "FIR", "Fourier") reg_params = traits.BaseTuple( traits.Bool(desc="motion parameters"), traits.Bool(desc="norm components"), traits.Bool(desc="noise components (CompCor)"), traits.Bool(desc='gloabl signal (NOT RECOMMENDED!)'), traits.Bool(desc="art_outliers"), traits.Bool(desc="motion derivatives")) do_despike = traits.Bool(False, usedefault=True) do_whitening = traits.Bool(False, usedefault=True) use_metadata = traits.Bool(True) update_hash = traits.Bool(False)
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") # Directories base_dir = Directory( os.path.abspath('.'), mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") surf_dir = Directory( desc="freesurfer directory. subject id's should be the same") save_script_only = traits.Bool(False) # DataGrabber datagrabber = traits.Instance(Data, ()) # segstats use_reg = traits.Bool(True) inverse_reg = traits.Bool(True) use_standard_label = traits.Bool( False, desc="use same label file for all subjects") label_file = traits.File() use_annotation = traits.Bool( False, desc= "use same annotation file for all subjects (will warp to subject space" ) use_subject_annotation = traits.Bool( False, desc="you need to change datragrabber to\ have outputs lh_annotation and rh_annotation" ) annot_space = traits.String("fsaverage5", desc="subject space of annot file") lh_annotation = traits.File() rh_annotation = traits.File() color_table_file = traits.Enum("Default", "Color_Table", "GCA_color_table", "None") color_file = traits.File() proj = traits.BaseTuple(("frac", 0, 1, 0.1), traits.Enum("abs", "frac"), traits.Float(), traits.Float(), traits.Float()) statname = traits.Str('segstats1', desc="description of the segstat")
class config(baseconfig): highpass_freq = traits.Float() lowpass_freq = traits.Float() filtering_algorithm = traits.Enum("fsl", "IIR", "FIR", "Fourier") reg_params = traits.BaseTuple( traits.Bool(desc="motion parameters"), traits.Bool(desc="norm components"), traits.Bool(desc="noise components (CompCor)"), traits.Bool(desc='gloabl signal (NOT RECOMMENDED!)'), traits.Bool(desc="art_outliers"), traits.Bool(desc="motion derivatives")) do_despike = traits.Bool(False, usedefault=True) do_whitening = traits.Bool(False, usedefault=True) use_metadata = traits.Bool(True) update_hash = traits.Bool(False) datagrabber = traits.Instance(Data, ()) segmentation_type = traits.Enum('FAST', 'Atropos') order = traits.Enum('motion_slicetime', 'slicetime_motion', use_default=True) save_script_only = traits.Bool(False)
class config(HasTraits): uuid = traits.Str(desc="UUID") # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") base_dir = Directory( os.path.abspath('.'), mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") crash_dir = Directory(mandatory=False, desc="Location to store crash files") surf_dir = Directory( desc="freesurfer directory. subject id's should be the same") # Execution run_using_plugin = Bool( False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') test_mode = Bool( False, mandatory=False, usedefault=True, desc='Affects whether where and if the workflow keeps its \ intermediary files. True to keep intermediary files. ' ) timeout = traits.Float(30.0) # DataGrabber datagrabber = traits.Instance(Data, ()) # segstats use_reg = traits.Bool(True) inverse_reg = traits.Bool(True) use_standard_label = traits.Bool( False, desc="use same label file for all subjects") label_file = traits.File() use_annotation = traits.Bool( False, desc= "use same annotation file for all subjects (will warp to subject space" ) use_subject_annotation = traits.Bool( False, desc="you need to change datragrabber to\ have outputs lh_annotation and rh_annotation" ) annot_space = traits.String("fsaverage5", desc="subject space of annot file") lh_annotation = traits.File() rh_annotation = traits.File() color_table_file = traits.Enum("Default", "Color_Table", "GCA_color_table", "None") color_file = traits.File() proj = traits.BaseTuple(("frac", 0, 1, 0.1), traits.Enum("abs", "frac"), traits.Float(), traits.Float(), traits.Float()) statname = traits.Str('segstats1', desc="description of the segstat")
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") desc = traits.Str(desc='Workflow description') # Directories base_dir = Directory( os.path.abspath('.'), mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(os.path.abspath('.'), mandatory=True, desc="Location where the BIP will store the results") field_dir = Directory( desc="Base directory of field-map data (Should be subject-independent) \ Set this value to None if you don't want fieldmap distortion correction" ) surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory") # Subjects subjects = traits.List( traits.Str, mandatory=True, usedefault=True, desc="Subject id's. Note: These MUST match the subject id's in the \ Freesurfer directory. For simplicity, the subject id's should \ also match with the location of individual functional files." ) func_template = traits.String('%s/functional.nii.gz') run_datagrabber_without_submitting = traits.Bool( desc="Run the datagrabber without \ submitting to the cluster") timepoints_to_remove = traits.Int(0, usedefault=True) # Fieldmap use_fieldmap = Bool( False, mandatory=False, usedefault=True, desc='True to include fieldmap distortion correction. Note: field_dir \ must be specified') magnitude_template = traits.String('%s/magnitude.nii.gz') phase_template = traits.String('%s/phase.nii.gz') TE_diff = traits.Float(desc='difference in B0 field map TEs') sigma = traits.Int( 2, desc='2D spatial gaussing smoothing stdev (default = 2mm)') echospacing = traits.Float(desc="EPI echo spacing") # Motion Correction do_slicetiming = Bool(True, usedefault=True, desc="Perform slice timing correction") SliceOrder = traits.List(traits.Int) TR = traits.Float(1.0, mandatory=True, desc="TR of functional") motion_correct_node = traits.Enum( 'nipy', 'fsl', 'spm', 'afni', desc="motion correction algorithm to use", usedefault=True, ) loops = traits.List([5], traits.Int(5), usedefault=True) #between_loops = traits.Either("None",traits.List([5]),usedefault=True) speedup = traits.List([5], traits.Int(5), usedefault=True) # Artifact Detection norm_thresh = traits.Float(1, min=0, usedefault=True, desc="norm thresh for art") z_thresh = traits.Float(3, min=0, usedefault=True, desc="z thresh for art") # Smoothing fwhm = traits.List( [0, 5], traits.Float(), mandatory=True, usedefault=True, desc="Full width at half max. The data will be smoothed at all values \ specified in this list.") smooth_type = traits.Enum("susan", "isotropic", 'freesurfer', usedefault=True, desc="Type of smoothing to use") surface_fwhm = traits.Float( 0.0, desc='surface smoothing kernel, if freesurfer is selected', usedefault=True) # CompCor compcor_select = traits.BaseTuple( traits.Bool, traits.Bool, mandatory=True, desc="The first value in the list corresponds to applying \ t-compcor, and the second value to a-compcor. Note: \ both can be true") num_noise_components = traits.Int( 6, usedefault=True, desc="number of principle components of the noise to use") regress_before_PCA = traits.Bool(True) # Highpass Filter hpcutoff = traits.Float(128., desc="highpass cutoff", usedefault=True) #zscore do_zscore = Bool(False) # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code() debug = traits.Bool(False) # Buttons check_func_datagrabber = Button("Check") check_field_datagrabber = Button("Check") def _check_func_datagrabber_fired(self): subs = self.subjects for s in subs: if not os.path.exists( os.path.join(self.base_dir, self.func_template % s)): print "ERROR", os.path.join(self.base_dir, self.func_template % s), "does NOT exist!" break else: print os.path.join(self.base_dir, self.func_template % s), "exists!" def _check_field_datagrabber_fired(self): subs = self.subjects for s in subs: if not os.path.exists( os.path.join(self.field_dir, self.magnitude_template % s)): print "ERROR:", os.path.join(self.field_dir, self.magnitude_template % s), "does NOT exist!" break else: print os.path.join(self.base_dir, self.magnitude_template % s), "exists!" if not os.path.exists( os.path.join(self.field_dir, self.phase_template % s)): print "ERROR:", os.path.join( self.field_dir, self.phase_template % s), "does NOT exist!" break else: print os.path.join(self.base_dir, self.phase_template % s), "exists!"