def __init__(self): super(PatternSnipeLabeling, self).__init__() self.add_trait( 'graphs', traits.List(traits.File(output=False), desc='graphs to classify')) self.add_trait( 'traindata_file', traits.File(output=False, desc='file (.json) storing the data extracted' ' from the training base graphs')) self.add_trait( 'param_file', traits.File(output=False, desc='file (.json) storing the hyperparameters' ' (OPM number, patch sizes)')) self.add_trait( 'num_cpu', traits.Int( 1, output=False, optional=True, desc='number of processes that can be used to parallel the' ' calculations')) self.add_trait( 'result_file', traits.File(output=True, desc='file (.csv) with predicted class (y_pred)' ' for each of the input graphs'))
def __init__(self): super(ErrorComputation, self).__init__() self.add_trait('t1mri', traits.File(output=False, desc='MRI scan')) self.add_trait( 'true_graph', traits.File(output=False, desc='corresponding graph manually labeled')) self.add_trait( 'labeled_graphs', traits.List( traits.File(output=False), desc='corresponding set of graphs automatically labeled')) self.add_trait( 'sulci_side_list', traits.List( traits.Str(output=False), desc='list of sulci (e.g. S.C._right) considered to compute the' ' error rates. It is not supposed to contain the labels' ' "unknown", "ventricle_left" and "ventricle_right".')) self.add_trait( 'error_file', traits.File( output=True, desc= 'file (.csv) storing the error rates for each labeled graph'))
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") # Directories base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") # Subjects subjects = traits.List(traits.Str, mandatory=True, usedefault=True, desc="Subject id's. Note: These MUST match the subject id's in the \ Freesurfer directory. For simplicity, the subject id's should \ also match with the location of individual functional files.") fwhm=traits.List(traits.Float()) inputs_template = traits.String('%s/preproc/output/fwhm_%s/*.nii.gz') meanfunc_template = traits.String('%s/preproc/mean/*_mean.nii.gz') fsl_mat_template = traits.String('%s/preproc/bbreg/*.mat') unwarped_brain_template = traits.String('%s/smri/unwarped_brain/*.nii*') affine_transformation_template = traits.String('%s/smri/affine_transformation/*.nii*') warp_field_template = traits.String('%s/smri/warped_field/*.nii*') #Normalization standard_transform_template = traits.File(mandatory=True,desc='Standard template to warp to') standard_warp_field_template = traits.String() standard_affine_transformation_template = traits.String() standard_norm_template = traits.File() standard_warp_field_template = traits.File() standard_affine_transformation_template = traits.File() # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code() # Buttons check_func_datagrabber = Button("Check")
def __init__(self): super(PatternSVMTraining, self).__init__() self.add_trait( 'graphs', traits.List(traits.File(output=False), desc='training base graphs')) self.add_trait( 'pattern', traits.Str(output=False, desc='vertex name representing the' ' searched pattern')) self.add_trait( 'names_filter', traits.ListStr( output=False, desc='list of vertex names used for the registration' ' of the patches')) self.add_trait( 'step_1', traits.Bool( True, output=False, optional=True, desc='perform the data extraction step from the graphs')) self.add_trait( 'step_2', traits.Bool(True, output=False, optional=True, desc='perform the hyperparameter tuning step' ' (C, gamma, initial translations)')) self.add_trait( 'step_3', traits.Bool(True, output=False, optional=True, desc='perform the model training step')) self.add_trait( 'traindata_file', traits.File(output=True, desc='file (.json) storing the data extracted' ' from the training base graphs')) self.add_trait( 'param_file', traits.File(output=True, desc='file (.json) storing the hyperparameters' ' (C, gamma, initial tranlations)')) self.add_trait( 'clf_file', traits.File(output=True, desc='file (.sav) storing the trained SVM classifier')) self.add_trait( 'scaler_file', traits.File(output=True, desc='file (.sav) storing the scaler'))
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") desc = traits.Str(desc='Workflow description') # Directories base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(os.path.abspath('.'),mandatory=True, desc="Location where the BIP will store the results") field_dir = Directory(desc="Base directory of field-map data (Should be subject-independent) \ Set this value to None if you don't want fieldmap distortion correction") surf_dir = Directory(mandatory=True, desc= "Freesurfer subjects directory") # Subjects subjects= traits.List(traits.Str, mandatory=True, usedefault=True, desc="Subject id's. Note: These MUST match the subject id's in the \ Freesurfer directory. For simplicity, the subject id's should \ also match with the location of individual functional files.") func_template = traits.String('%s/functional.nii.gz') run_datagrabber_without_submitting = traits.Bool(desc="Run the datagrabber without \ submitting to the cluster") timepoints_to_remove = traits.Int(0,usedefault=True) do_slicetiming = Bool(True, usedefault=True, desc="Perform slice timing correction") SliceOrder = traits.List(traits.Int) order = traits.Enum('motion_slicetime','slicetime_motion',use_default=True) TR = traits.Float(mandatory=True, desc = "TR of functional") motion_correct_node = traits.Enum('nipy','fsl','spm','afni', desc="motion correction algorithm to use", usedefault=True,) csf_prob = traits.File(desc='CSF_prob_map') grey_prob = traits.File(desc='grey_prob_map') white_prob = traits.File(desc='white_prob_map') # Artifact Detection norm_thresh = traits.Float(1, min=0, usedefault=True, desc="norm thresh for art") z_thresh = traits.Float(3, min=0, usedefault=True, desc="z thresh for art") # Smoothing fwhm = traits.Float(6.0,usedefault=True) save_script_only = traits.Bool(False) check_func_datagrabber = Button("Check") def _check_func_datagrabber_fired(self): subs = self.subjects for s in subs: if not os.path.exists(os.path.join(self.base_dir,self.func_template % s)): print "ERROR", os.path.join(self.base_dir,self.func_template % s), "does NOT exist!" break else: print os.path.join(self.base_dir,self.func_template % s), "exists!"
def __init__(self): super(PatternSnipeTraining, self).__init__() self.add_trait( 'graphs', traits.List(traits.File(output=False), desc='training base graphs')) self.add_trait( 'pattern', traits.Str(output=False, desc='vertex name representing the' ' searched pattern')) self.add_trait( 'names_filter', traits.ListStr(output=False, desc='list of vertex names defining the region of' ' interest')) self.add_trait( 'num_cpu', traits.Int( 1, output=False, optional=True, default=1, desc='number of processes that can be used to parallel the' ' calculations')) self.add_trait( 'step_1', traits.Bool( True, output=False, optional=True, desc='perform the data extraction step from the graphs')) self.add_trait( 'step_2', traits.Bool(True, output=False, optional=True, desc='perform the hyperparameter tuning step' ' (OPM number, patch sizes)')) self.add_trait( 'traindata_file', traits.File(output=True, desc='file (.json) storing the data extracted' ' from the training base graphs')) self.add_trait( 'param_file', traits.File(output=True, desc='file (.json) storing the hyperparameters' ' (OPM number, patch sizes)'))
class config(HasTraits): uuid = traits.Str(desc="UUID") # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") crash_dir = Directory(mandatory=False, desc="Location to store crash files") # Execution run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') test_mode = Bool(False, mandatory=False, usedefault=True, desc='Affects whether where and if the workflow keeps its \ intermediary files. True to keep intermediary files. ') timeout = traits.Float(14.0) # Subjects #subjects = traits.List(traits.Str, mandatory=True, usedefault=True, # desc="Subject id's. Note: These MUST match the subject id's in the \ # Freesurfer directory. For simplicity, the subject id's should \ # also match with the location of individual functional files.") #fwhm=traits.List(traits.Float()) #copes_template = traits.String('%s/preproc/output/fwhm_%s/cope*.nii.gz') #varcopes_template = traits.String('%s/preproc/output/fwhm_%s/varcope*.nii.gz') #contrasts = traits.List(traits.Str,desc="contrasts") datagrabber = traits.Instance(Data, ()) # Regression design_csv = traits.File(desc="design .csv file") reg_contrasts = traits.Code(desc="function named reg_contrasts which takes in 0 args and returns contrasts") #Normalization norm_template = traits.File(mandatory=True,desc='Template of files') #Correction: run_correction = traits.Bool(False) z_threshold = traits.Float(2.3) connectivity = traits.Int(25) do_randomize = traits.Bool(False) # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code() # Buttons check_func_datagrabber = Button("Check")
class Process1(Process): ''' This silly process concatenates in an output file the input file and the other int input parameter. Not very useful, for sure. ''' param_a = traits.File(output=False, desc='input param A') param_b = traits.Int(desc='another parameter B') param_c = traits.File(output=True, desc='output param C') def _run_process(self): with open(self.param_c, 'w') as f: print('output file:', file=f) with open(self.param_b) as sf: f.write(sf.read()) print('param B:', self.param_b, file=f)
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") # Directories base_dir = Directory( os.path.abspath('.'), mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") save_script_only = traits.Bool(False) # Subjects #subjects = traits.List(traits.Str, mandatory=True, usedefault=True, # desc="Subject id's. Note: These MUST match the subject id's in the \ # Freesurfer directory. For simplicity, the subject id's should \ # also match with the location of individual functional files.") #fwhm=traits.List(traits.Float()) #copes_template = traits.String('%s/preproc/output/fwhm_%s/cope*.nii.gz') #varcopes_template = traits.String('%s/preproc/output/fwhm_%s/varcope*.nii.gz') #contrasts = traits.List(traits.Str,desc="contrasts") datagrabber = traits.Instance(Data, ()) # Regression design_csv = traits.File(desc="design .csv file") reg_contrasts = traits.Code( desc= "function named reg_contrasts which takes in 0 args and returns contrasts" ) run_mode = traits.Enum("flame1", "ols", "flame12") #Normalization norm_template = traits.File(desc='Template of files') use_mask = traits.Bool(False) mask_file = traits.File() #Correction: run_correction = traits.Bool(False) p_threshold = traits.Float(0.05) z_threshold = traits.Float(2.3) connectivity = traits.Int(26) do_randomize = traits.Bool(False) num_iterations = traits.Int(5000) # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code() # Buttons check_func_datagrabber = Button("Check")
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") desc = traits.Str(desc='Workflow description') # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") base_dir = Directory( exists=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") field_dir = Directory( exists=True, desc="Base directory of field-map data (Should be subject-independent) \ Set this value to None if you don't want fieldmap distortion correction" ) crash_dir = Directory(mandatory=False, desc="Location to store crash files") json_sink = Directory(mandatory=False, desc="Location to store json_files") surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory") # Subjects subjects = traits.List( traits.Str, mandatory=True, usedefault=True, desc= "Subject id's. These subjects must match the ones that have been run in your preproc config" ) preproc_config = traits.File(desc="preproc config file") debug = traits.Bool(True) #Preprocessing Info use_custom_ROI_list_file = Bool( False, usedefault=True, desc= "True to limit the produced TSNR table to a more selective list of ROIs" ) custom_ROI_list_file = traits.File( desc="Enter the full path to your customized FreeSurferColorLUT.txt") # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code() save_script_only = traits.Bool(False)
class config(HasTraits): uuid = traits.Str(desc="UUID") desc = traits.Str(desc="Workflow Description") # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") sink_dir = Directory(os.path.abspath('.'), mandatory=True, desc="Location where the BIP will store the results") crash_dir = Directory(mandatory=False, desc="Location to store crash files") surf_dir = Directory(mandatory=True, desc= "Freesurfer subjects directory") save_script_only = traits.Bool(False) # Execution run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') test_mode = Bool(False, mandatory=False, usedefault=True, desc='Affects whether where and if the workflow keeps its \ intermediary files. True to keep intermediary files. ') timeout = traits.Float(14.0) subjects = traits.List(desc="subjects") split_files = traits.List(traits.File(),desc="""list of split files""") # First Level #advanced_options use_advanced_options = Bool(False) advanced_options = traits.Code()
class config(HasTraits): # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") base_dir = Directory(mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") crash_dir = Directory(mandatory=False, desc="Location to store crash files") # Execution run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "PBSGraph","MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') test_mode = Bool(False, mandatory=False, usedefault=True, desc='Affects whether where and if the workflow keeps its \ intermediary files. True to keep intermediary files. ') timeout = traits.Float(14.0) #Subjects subjects= traits.List(traits.Str, mandatory=True, usedefault=True, desc="Subject id's. Bips expects dicoms to be organized by subject id's") dicom_dir_template = traits.String('%s/dicoms/') #Conversion Options embed_meta = traits.Bool(True) info_only = traits.Bool(True) no_moco = traits.Bool(False,desc="only convert non-moco files") use_heuristic = traits.Bool(False) heuristic_file = traits.File(desc="heuristic file")
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") # Directories base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") datagrabber = traits.Instance(Data, ()) run_mode = traits.Enum("flame1","ols","flame12") save_script_only = traits.Bool(False) #Normalization brain_mask = traits.File(mandatory=True,desc='Brain Mask') name_of_project = traits.String("group_analysis",usedefault=True) do_randomize = traits.Bool(True) num_iterations = traits.Int(5000) #Correction: run_correction = traits.Bool(True) z_threshold = traits.Float(2.3) p_threshold = traits.Float(0.05) connectivity = traits.Int(26) # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code() # Buttons check_func_datagrabber = Button("Check")
class config(HasTraits): uuid = traits.Str(desc="UUID") desc = traits.Str(desc='Workflow description') # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") crash_dir = Directory(mandatory=False, desc="Location to store crash files") # Execution run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "PBSGraph","MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') test_mode = Bool(False, mandatory=False, usedefault=True, desc='Affects whether where and if the workflow keeps its \ intermediary files. True to keep intermediary files. ') # Subjects subjects= traits.List(traits.Str, mandatory=True, usedefault=True, desc="Subject id's. Note: These MUST match the subject id's in the \ Freesurfer directory. For simplicity, the subject id's should \ also match with the location of individual functional files.") # Preprocessing info preproc_config = traits.File(desc="preproc json file") #Advanced use_advanced_options = traits.Bool() advanced_script = traits.Code() save_script_only = traits.Bool(False)
def createOption(name, initialValue): """creates an option with a boolean attribute as the value, type should be the result of type(value)""" option = Option(name=name) if type(initialValue) is bool: option.add_trait("value", traits.Bool(initialValue)) elif type(initialValue) is int: option.add_trait("value", traits.Int(initialValue)) elif type(initialValue) is float: option.add_trait("value", traits.Float(initialValue)) elif type(initialValue) is str: option.add_trait("value", traits.File(initialValue)) # # need to modify the view, not sure how to make this more elegantly option.traits_view = traitsui.View( traitsui.HGroup( traitsui.Item("name", style="readonly", springy=True, show_label=False), traitsui.Item( "value", show_label=False, springy=True, editor=traitsui.FileEditor(dialog_style='save')))) else: logger.warning( "unrecognised option type ({}) in processor. Using traits.Any Editor and value" .format(type(initialValue))) option.add_trait("value", traits.Any(initialValue)) return option
class InputData(T.HasTraits): dwi_images = nifti_file fa_file = nifti_file bvec_file = T.File(filter=['*.bvec']) bvec_orientation = T.String('IMG', minlen=3, maxlen=3) min_signal = T.Float(1) @T.on_trait_change('dwi_images') def update_files(self): dir, file = path.split(self.dwi_images) base = string.split(file, path.extsep, 1)[0] if self.fa_file == '': self.fa_file = path.join(dir, base + '_fa.nii.gz') if self.bvec_file == '': self.bvec_file = path.join(dir, base + '.bvec') def read_data(self): data_img = nib.load(self.dwi_images) affine = data_img.get_affine() voxel_size = data_img.get_header().get_zooms() voxel_size = voxel_size[:3] fa_img = nib.load(self.fa_file) assert data_img.shape[:-1] == fa_img.shape bvec, bval = read_bvec_file(self.bvec_file) data_ornt = nib.io_orientation(affine) if self.bvec_orientation != 'IMG': bvec = reorient_vectors(bvec, self.bvec_orientation, data_ornt) fa = fa_img.get_data() data = data_img.get_data() return data, voxel_size, affine, fa, bvec, bval
def __init__(self, pipeline, name, input_names=['inputs'], output_names=['output_%d'], input_types=None): in_traits = [] out_traits = [{'name': 'lengths', 'optional': True}] if input_types: ptypes = input_types else: ptypes = [traits.File(traits.Undefined, output=False)] \ * len(input_names) self.input_types = ptypes for tr in input_names: in_traits.append({'name': tr, 'optional': False}) super(MapNode, self).__init__(pipeline, name, in_traits, out_traits) for tr, ptype in zip(input_names, ptypes): self.add_trait(tr, traits.List(ptype, output=False)) self.add_trait( 'lengths', traits.List(traits.Int(), output=True, optional=True, desc='lists lengths')) self.input_names = input_names self.output_names = output_names self.lengths = [0] * len(input_names) self.set_callbacks()
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") desc = traits.Str(desc='Workflow description') # Directories base_dir = Directory( exists=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") field_dir = Directory( exists=True, desc="Base directory of field-map data (Should be subject-independent) \ Set this value to None if you don't want fieldmap distortion correction" ) surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory") save_script_only = traits.Bool(False) # Subjects datagrabber = traits.Instance(Data, ()) TR = traits.Float(6.0) preproc_config = traits.File(desc="preproc config file") json_name = traits.String('preproc_metrics') # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code()
class config(HasTraits): uuid = traits.Str(desc="UUID") desc = traits.Str(desc='Workflow description') # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") base_dir = Directory( exists=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") field_dir = Directory( exists=True, desc="Base directory of field-map data (Should be subject-independent) \ Set this value to None if you don't want fieldmap distortion correction" ) crash_dir = Directory(mandatory=False, desc="Location to store crash files") json_sink = Directory(mandatory=False, desc="Location to store json_files") surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory") # Execution run_using_plugin = Bool( False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "PBSGraph", "MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') test_mode = Bool( False, mandatory=False, usedefault=True, desc='Affects whether where and if the workflow keeps its \ intermediary files. True to keep intermediary files. ' ) # Subjects subjects = traits.List( traits.Str, mandatory=True, usedefault=True, desc= "Subject id's. These subjects must match the ones that have been run in your preproc config" ) preproc_config = traits.File(desc="preproc config file") debug = traits.Bool(True) # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code()
class config(HasTraits): uuid = traits.Str(desc="UUID") # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") base_dir = Directory( os.path.abspath('.'), mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") crash_dir = Directory(mandatory=False, desc="Location to store crash files") # Execution run_using_plugin = Bool( False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') test_mode = Bool( False, mandatory=False, usedefault=True, desc='Affects whether where and if the workflow keeps its \ intermediary files. True to keep intermediary files. ' ) timeout = traits.Float(14.0) datagrabber = traits.Instance(Data, ()) run_mode = traits.Enum("flame1", "ols", "flame12") save_script_only = traits.Bool(False) #Normalization brain_mask = traits.File(mandatory=True, desc='Brain Mask') name_of_project = traits.String("group_analysis", usedefault=True) do_randomize = traits.Bool(True) num_iterations = traits.Int(5000) #Correction: run_correction = traits.Bool(True) z_threshold = traits.Float(2.3) p_threshold = traits.Float(0.05) connectivity = traits.Int(26) # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code() # Buttons check_func_datagrabber = Button("Check")
def __init__(self): super(LabelResample, self).__init__() self.add_trait('input_image', traits.File( output=False, desc='Labelled image to transform')) self.add_trait('transformation', traits.File( output=False, optional=True, desc='Transformation file .trm')) self.add_trait('sx', traits.Float(-1, output=False, desc='Output resolution (X axis)')) self.add_trait('sy', traits.Float(-1, output=False, desc='Output resolution (Y axis)')) self.add_trait('sz', traits.Float(-1, output=False, desc='Output resolution (Z axis)')) self.add_trait('background', traits.Int(0, output=False, desc='Background value/label')) self.add_trait('output_image', traits.File( output=False, desc='file (.json) storing the hyperparameters' ' (cutting threshold)'))
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") # Directories base_dir = Directory( os.path.abspath('.'), mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") surf_dir = Directory( desc="freesurfer directory. subject id's should be the same") save_script_only = traits.Bool(False) # DataGrabber datagrabber = traits.Instance(Data, ()) # segstats use_reg = traits.Bool(True) inverse_reg = traits.Bool(True) use_standard_label = traits.Bool( False, desc="use same label file for all subjects") label_file = traits.File() use_annotation = traits.Bool( False, desc= "use same annotation file for all subjects (will warp to subject space" ) use_subject_annotation = traits.Bool( False, desc="you need to change datragrabber to\ have outputs lh_annotation and rh_annotation" ) annot_space = traits.String("fsaverage5", desc="subject space of annot file") lh_annotation = traits.File() rh_annotation = traits.File() color_table_file = traits.Enum("Default", "Color_Table", "GCA_color_table", "None") color_file = traits.File() proj = traits.BaseTuple(("frac", 0, 1, 0.1), traits.Enum("abs", "frac"), traits.Float(), traits.Float(), traits.Float()) statname = traits.Str('segstats1', desc="description of the segstat")
def __init__(self): super(PatternDeepLabeling, self).__init__() self.add_trait( 'graphs', traits.List(traits.File(output=False), desc='graphs to classify')) self.add_trait( 'model_file', traits.File(output=False, desc='file (.mdsm) storing neural network parameters')) self.add_trait( 'param_file', traits.File(output=False, desc='file (.json) storing the hyperparameters' ' (bounding_box, learning rate and momentum)')) self.add_trait( 'result_file', traits.File(output=True, desc='file (.csv) with predicted class (y_pred)' ' for each of the input graphs'))
def build_node(cls, pipeline, name, conf_controller): t = None if conf_controller.param_type == 'Str': t = traits.Str(traits.Undefined) elif conf_controller.param_type == 'File': t = traits.File(traits.Undefined) elif conf_controller.param_type not in (None, traits.Undefined): t = getattr(traits, conf_controller.param_type)() node = CrossValidationFoldNode(pipeline, name, input_type=t) return node
class config(HasTraits): uuid = traits.Str(desc="UUID") # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") crash_dir = Directory(mandatory=False, desc="Location to store crash files") # Execution run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') test_mode = Bool(False, mandatory=False, usedefault=True, desc='Affects whether where and if the workflow keeps its \ intermediary files. True to keep intermediary files. ') timeout = traits.Float(14.0) # Subjects datagrabber = traits.Instance(Data, ()) #Normalization norm_template = traits.File(mandatory=True,desc='Template to warp to') use_nearest = traits.Bool(False,desc="use nearest neighbor interpolation") do_segment = traits.Bool(True) surf_dir = traits.Directory() moving_images_4D = traits.Bool(True, usedefault=True, desc="True if your moving image inputs \ are time series images, False if they are 3-dimensional") # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code() save_script_only = traits.Bool(False) # Buttons check_func_datagrabber = Button("Check") def _check_func_datagrabber_fired(self): subs = self.subjects template = [self.inputs_template, self.meanfunc_template, self.fsl_mat_template, self.unwarped_brain_template, self.affine_transformation_template, self.warp_field_template] for s in subs: for t in template: try: temp = glob(os.path.join(self.base_dir,t%s)) except TypeError: temp = [] for f in self.fwhm: temp.append(glob(os.path.join(self.base_dir,t%(s,f)))) print temp
def edition_widget(engine, environment): ''' Edition GUI for FSL config - see :class:`~capsul.qt_gui.widgets.settings_editor.SettingsEditor` ''' from soma.qt_gui.controller_widget import ScrollControllerWidget from soma.controller import Controller import types import traits.api as traits def validate_config(widget): controller = widget.controller_widget.controller with widget.engine.settings as session: conf = session.config('fsl', widget.environment) values = {'config_id': 'fsl'} for k in ('directory', 'config', 'prefix'): value = getattr(controller, k) if value is traits.Undefined: value = None values[k] = value if conf is None: session.new_config('fsl', widget.environment, values) else: for k, value in values.items(): if k == 'config_id': continue setattr(conf, k, values[k]) controller = Controller() controller.add_trait( 'directory', traits.Directory(traits.Undefined, desc='Directory where FSL is installed')) controller.add_trait( 'config', traits.File(traits.Undefined, output=False, desc='Parameter to specify the fsl.sh path')) controller.add_trait( 'prefix', traits.String(traits.Undefined, desc='Prefix to add to FSL commands')) conf = engine.settings.select_configurations(environment, {'fsl': 'any'}) if conf: fconf = conf.get('capsul.engine.module.fsl', {}) controller.directory = fconf.get('directory', traits.Undefined) controller.config = fconf.get('config', traits.Undefined) controller.prefix = fconf.get('prefix', traits.Undefined) widget = ScrollControllerWidget(controller, live=True) widget.engine = engine widget.environment = environment widget.accept = types.MethodType(validate_config, widget) return widget
def build_node(cls, pipeline, name, conf_controller): t = None if conf_controller.param_type == 'Str': t = traits.Str(traits.Undefined) elif conf_controller.param_type == 'File': t = traits.File(traits.Undefined) elif conf_controller.param_type not in (None, traits.Undefined): t = getattr(traits, conf_controller.param_type)() node = CVFilterNode(pipeline, name, conf_controller.is_output, input_type=t) return node
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") # Directories base_dir = Directory( os.path.abspath('.'), mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") save_script_only = traits.Bool(False) datagrabber = traits.Instance(Data, ()) # Regression run_one_sample_T_test = traits.Bool(True) run_regression = traits.Bool() design_csv = traits.File(desc="design .csv file") reg_contrasts = traits.Code( desc= "function named reg_contrasts which takes in 0 args and returns contrasts" ) use_regressors = traits.Bool() estimation_method = traits.Enum('Classical', 'Bayesian', 'Bayesian2') include_intercept = traits.Bool(True) #Normalization norm_template = traits.File(desc='Template of files') use_mask = traits.Bool(False) mask_file = traits.File(desc='already binarized mask file to use') #Correction: p_threshold = traits.Float(0.05) height_threshold = traits.Float(0.05) min_cluster_size = traits.Int(25) # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code() # Buttons check_func_datagrabber = Button("Check")
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") # Directories base_dir = Directory( os.path.abspath('.'), mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") surf_dir = Directory( desc="freesurfer directory. subject id's should be the same") save_script_only = traits.Bool(False) # Data subject_id = traits.String() contrast = traits.File() mask_contrast = traits.File() use_contrast_mask = traits.Bool(True) reg_file = traits.File() mean_image = traits.File() background_thresh = traits.Float(0.5) roi = traits.List( ['superiortemporal', 'bankssts'], traits.Enum('superiortemporal', 'bankssts', 'caudalanteriorcingulate', 'caudalmiddlefrontal', 'corpuscallosum', 'cuneus', 'entorhinal', 'fusiform', 'inferiorparietal', 'inferiortemporal', 'isthmuscingulate', 'lateraloccipital', 'lateralorbitofrontal', 'lingual', 'medialorbitofrontal', 'middletemporal', 'parahippocampal', 'paracentral', 'parsopercularis', 'parsorbitalis', 'parstriangularis', 'pericalcarine', 'postcentral', 'posteriorcingulate', 'precentral', 'precuneus', 'rostralanteriorcingulate', 'rostralmiddlefrontal', 'superiorfrontal', 'superiorparietal', 'supramarginal', 'frontalpole', 'temporalpole', 'transversetemporal', 'insula'), usedefault=True) #35 freesurfer regions, thresh = traits.Float(1.5)
def __init__(self): super(PatternSVMLabeling, self).__init__() self.add_trait( 'graphs', traits.List(traits.File(output=False), desc='graphs to classify')) self.add_trait( 'clf_file', traits.File(output=False, desc='file (.sav) storing the trained SVM' ' classifier')) self.add_trait( 'scaler_file', traits.File(output=False, desc='file (.sav) storing the scaler')) self.add_trait( 'param_file', traits.File(output=False, desc='file (.json) storing the hyperparameters' ' (C, gamma, initial tranlations)')) self.add_trait( 'result_file', traits.File(output=True, desc='file (.csv) with predicted class (y_pred)' ' for each of the input graphs'))