class WardMetrics(t.HasStrictTraits): class_ward_metrics: ty.List[ClassWardMetrics] = t.List( ClassWardMetrics, []) overall_ward_metrics: ClassWardMetrics = t.Instance(ClassWardMetrics) df_event_scores: pd.DataFrame = t.Instance(pd.DataFrame()) df_event_detailed_scores: pd.DataFrame = t.Instance(pd.DataFrame()) df_segment_2set_results: pd.DataFrame = t.Instance(pd.DataFrame())
class FormingProcessView(tr.HasStrictTraits): '''Forming process viewer with task tree editor ''' forming_process = tr.Instance(FormingProcess) root = tr.Property(tr.Instance(FormingTask)) '''All FormingTask steps. ''' def _get_root(self): return self.forming_process.factory_task selected = tr.Instance(IFormingTask) view1 = View(VSplit(Group( Item('root', editor=FormingTask_tree_editor, resizable=True, show_label=False), label='Design tree', scrollable=True, ), Group(UItem('selected@')), dock='tab'), dock='tab', resizable=True, title='Forming Process View', width=1.0, height=1.0)
class config(HasTraits): uuid = traits.Str(desc="UUID") desc = traits.Str(desc='Workflow description') # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") crash_dir = Directory(mandatory=False, desc="Location to store crash files") sink_dir = Directory(mandatory=True, desc="Location to store results") save_script_only = traits.Bool(False) # Execution run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "PBSGraph","MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') # Subjects interpolation = traits.Enum('trilinear','nearestneighbour','sinc',usedefault=True) name = traits.String('flirt_output',desc='name of folder to store flirt mats') datagrabber_create = traits.Instance(Data, ()) datagrabber_apply = traits.Instance(Data, ()) create_transform = traits.Bool(True) apply_transform = traits.Bool(False) # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code()
class EpochRecord(t.HasStrictTraits): epoch: int = t.Int() train: EpochMetrics = t.Instance(EpochMetrics) val: EpochMetrics = t.Instance(EpochMetrics) lr: float = t.Float() iter_s_cpu: float = t.Float() iter_s_wall: float = t.Float() should_checkpoint: bool = t.Bool(False) done: bool = t.Bool(False) stopping_metric: float = t.Float() def to_dict(self): d = { k: v for k, v in self.__dict__.items() if v is not None and not type(v) == EpochMetrics } for f in ["train", "val"]: em = getattr(self, f) if em: for k, v in em.__dict__.items(): if v is not None: d[f"{f}_{k}"] = v return d
class HCFF2(tr.HasStrictTraits): '''High-Cycle Fatigue Filter ''' hcf = tr.Instance(HCFFRoot) def _hcf_default(self): return HCFFRoot(import_manager=FileImportManager()) figure = tr.Instance(Figure) def _figure_default(self): figure = Figure(facecolor='white') figure.set_tight_layout(True) return figure traits_view = ui.View( ui.HSplit( ui.Item(name='hcf', editor=tree_editor, show_label=False, width=0.3 ), ui.UItem('figure', editor=MPLFigureEditor(), resizable=True, springy=True, label='2d plots') ), title='HCF Filter', resizable=True, width=0.6, height=0.6 )
class Conjoint(Model): # The imput data for calculation owner_ref = _traits.WeakRef() # design = DataSet() design = _traits.DelegatesTo('owner_ref') design_vars = _traits.List(_traits.Str()) liking = DataSet() # consumers = DataSet() consumers = _traits.DelegatesTo('owner_ref') consumers_vars = _traits.List(_traits.Str()) # Conjoint settings model_struct = _traits.Enum('Struct 1', 'Struct 2', 'Struct 3') # Conjoint calculation state ccs = _traits.Instance(ConjointCalcState, ()) cm = _traits.Instance(ConjointMachine) # depends_on res = _traits.Property( depends_on='design_vars, consumers_vars, model_struct') def _cm_default(self): try: return ConjointMachine() except RNotFoundException: self.ccs.messages = ("Was not able to find and start R.\n" "You have to check the installation of R") self.ccs.edit_traits(kind='livemodal') @_traits.on_trait_change('owner_ref.model_struct') def _struc_altered(self, new): self.model_struct = new @_traits.on_trait_change('owner_ref.sel_design_var') def _des_var_altered(self, new): self.design_vars = new @_traits.on_trait_change('owner_ref.sel_cons_char') def _cons_char_altered(self, new): self.consumers_vars = new @_traits.cached_property def _get_res(self): if not self.cm.run_state: self.cm.run_state = self.ccs model = { 'Struct 1': 1, 'Struct 2': 2, 'Struct 3': 3 }[self.model_struct] self.cm.schedule_calculation(self.design, sorted(self.design_vars), self.liking, model, self.consumers, sorted(self.consumers_vars)) self.ccs.edit_traits(kind='livemodal') return self.cm.get_result()
class Preferences(t.HasTraits): EELS = t.Instance(EELSConfig) EDS = t.Instance(EDSConfig) General = t.Instance(GeneralConfig) GUIs = t.Instance(GUIs) def save(self): config = configparser.ConfigParser(allow_no_value=True) template2config(template, config) config.write(open(defaults_file, 'w'))
class Preferences(t.HasTraits): EELS = t.Instance(EELSConfig) EDS = t.Instance(EDSConfig) Model = t.Instance(ModelConfig) General = t.Instance(GeneralConfig) MachineLearning = t.Instance(MachineLearningConfig) view = tui.View( tui.Group(tui.Item( 'General', style='custom', show_label=False, ), label='General'), tui.Group(tui.Item( 'Model', style='custom', show_label=False, ), label='Model'), tui.Group(tui.Item( 'EELS', style='custom', show_label=False, ), label='EELS'), tui.Group(tui.Item( 'EDS', style='custom', show_label=False, ), label='EDS'), tui.Group(tui.Item( 'MachineLearning', style='custom', show_label=False, ), label='Machine Learning'), title='Preferences', handler=PreferencesHandler, ) def gui(self): self.edit_traits() def save(self): config = ConfigParser.SafeConfigParser(allow_no_value=True) template2config(template, config) config.write(open(defaults_file, 'w'))
class EagleHandler(traitsui.Handler): #--------------------------------------------------------------------------- # State traits #--------------------------------------------------------------------------- model = traits.Instance(CameraImage) view = traits.Any watchFolderTimer = traits.Instance(Timer) #--------------------------------------------------------------------------- # Handler interface #--------------------------------------------------------------------------- def closed(self, info, is_ok): """ Handles a dialog-based user interface being closed by the user. Overridden here to stop the timer once the window is destroyed. """ try: #stop any previous timer, should only have 1 timer at a time logger.info("closing image plot inspector") except Exception as e: logger.error("couldn't close: error: %s " % e.message) return def init(self, info): self.view = info.object self.model = info.object.model self.model.on_trait_change(self._model_changed, "model_changed") self.view.boxSelection2D.on_trait_change(self._box_selection_complete, "selection_complete") self.view.lineInspectorX.on_trait_change(self._setCentreGuess, "mouseClickEvent") print "in init" print self.model #self.start_timer() def _model_changed(self): print "model changed" if self.view is not None: print "updating view" self.view.update(self.model) def _box_selection_complete(self): logger.critical("Box selection complete") #[self.view.selectedFit.startX,self.view.selectedFit.startY,self.view.selectedFit.endX,self.view.selectedFit.endY] = map(int,self.view.boxSelection2D._get_coordinate_box()) def _setCentreGuess(self): x_ndx, y_ndx = self.view._image_index.metadata["selections"] logger.debug("centre guess made")
class FilterTool(tr.HasTraits): data = tr.Array max_chans = tr.Int t = tr.Array channel = tr.Int filter_type = tr.Enum(filter_dict.keys()) filter = tr.Instance(Filter) _ds = tr.Instance(ch.ArrayPlotData) _plot = tr.Instance(ch.Plot) def _max_chans_default(self): return int(self.data.shape[1] - 1) def _filter_default(self): return SvdFilter(t=self.t, d=self.data) def __ds_default(self): return ch.ArrayPlotData(t=self.t, y=self.data[:, self.channel], yf=self.filter.yf) def __plot_default(self): pl = ch.Plot(self._ds) pl.plot(('t', 'y'), color='black') pl.plot(('t', 'yf'), color='red', line_width=1.2) return pl @tr.on_trait_change('filter.replot') def replot(self): self._ds.set_data('yf', self.filter.yf) def _channel_changed(self): self._ds.set_data('y', self.data[:, self.channel]) self.filter.chan = int(self.channel) self.replot() def _filter_type_changed(self, value): self.filter = filter_dict[value](t=self.t, d=self.data) plot_item = ui.Item('_plot', editor=en.ComponentEditor(), show_label=False) ch_item = ui.Item('channel', editor=ui.RangeEditor(low=0, high_name='max_chans', is_float=False)) settings_group = ui.VGroup([ch_item, 'filter_type', '@filter']) traits_view = ui.View(ui.HGroup([plot_item, settings_group]))
class _H5Trees(api.HasTraits): h5_trees = api.Instance(Hdf5FilesNode) node = api.Any() path = api.Str() traits_view = ui.View( ui.Group( ui.Item( 'h5_trees', editor=_hdf5_tree_editor(selected='node'), resizable=True, ), ui.Item('path', label='Selected node'), orientation='vertical', ), title='Multiple HDF5 file Tree Example', buttons=['OK', 'Cancel'], resizable=True, width=0.3, height=0.3, ) def _node_changed(self): self.path = self.node.path print(self.node.path)
class config(HasTraits): uuid = traits.Str(desc="UUID") desc = traits.Str(desc='Workflow description') datagrabber = traits.Instance(Data, ()) # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") crash_dir = Directory(mandatory=False, desc="Location to store crash files") # Execution run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "PBSGraph","MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') test_mode = Bool(False, mandatory=False, usedefault=True, desc='Affects whether where and if the workflow keeps its \ intermediary files. True to keep intermediary files. ') run_datagrabber_without_submitting = Bool(True) # Subjects #prep_config = traits.File() tr = traits.Float() n_subjects= traits.Int() project_name=traits.Str()
class Hist(tr.HasStrictTraits): model = tr.Instance(IModel) tstep = tr.DelegatesTo('model') def init_state(self): pass
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") desc = traits.Str(desc='Workflow description') # Directories base_dir = Directory( exists=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") field_dir = Directory( exists=True, desc="Base directory of field-map data (Should be subject-independent) \ Set this value to None if you don't want fieldmap distortion correction" ) surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory") save_script_only = traits.Bool(False) # Subjects datagrabber = traits.Instance(Data, ()) TR = traits.Float(6.0) preproc_config = traits.File(desc="preproc config file") json_name = traits.String('preproc_metrics') # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code()
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") desc = traits.Str(desc="Workflow Description") # Directories sink_dir = Directory(os.path.abspath('.'), mandatory=True, desc="Location where the BIP will store the results") surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory") save_script_only = traits.Bool(False) datagrabber = traits.Instance(Data, ()) projection_stem = traits.Str('-projfrac-avg 0 1 0.1', desc='how to project data onto the surface') out_type = traits.Enum('mat', 'hdf5', desc='mat or hdf5') hdf5_package = traits.Enum('h5py', 'pytables', desc='which hdf5 package to use') target_surf = traits.Enum('fsaverage4', 'fsaverage3', 'fsaverage5', 'fsaverage6', 'fsaverage', 'subject', desc='which average surface to map to') surface_fwhm = traits.List([5], traits.Float(), mandatory=True, usedefault=True, desc="How much to smooth on target surface") roiname = traits.String('amygdala') use_advanced_options = Bool(False) advanced_options = traits.Code()
class config(HasTraits): uuid = traits.Str(desc="UUID") desc = traits.Str(desc="Workflow Description") # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") sink_dir = Directory(os.path.abspath('.'), mandatory=True, desc="Location where the BIP will store the results") crash_dir = Directory(mandatory=False, desc="Location to store crash files") surf_dir = Directory(mandatory=True, desc= "Freesurfer subjects directory") save_script_only = traits.Bool(False) # Execution run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') test_mode = Bool(False, mandatory=False, usedefault=True, desc='Affects whether where and if the workflow keeps its \ intermediary files. True to keep intermediary files. ') timeout = traits.Float(14.0) datagrabber = traits.Instance(Data, ()) surface_template = traits.Enum("fsaverage","fsaverage5","fsaverage6","fsaverage4","subject") test_name = traits.String('FS_one_sample_t_test') # First Level #advanced_options use_advanced_options = Bool(False) advanced_options = traits.Code()
class config(HasTraits): uuid = traits.Str(desc="UUID") # Directories working_dir = traits.Directory( mandatory=True, desc="Location of the Nipype working directory") sink_dir = traits.Directory( mandatory=True, desc="Location where the BIP will store the results") crash_dir = traits.Directory(mandatory=False, desc="Location to store crash files") save_script_only = traits.Bool(False) # Execution run_using_plugin = traits.Bool( False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') test_mode = traits.Bool( False, mandatory=False, usedefault=True, desc='Affects whether where and if the workflow keeps its \ intermediary files. True to keep intermediary files. ' ) timeout = traits.Float(14.0) # DataGrabber datagrabber = traits.Instance(Data, ())
class config(HasTraits): uuid = traits.Str(desc="UUID") desc = traits.Str(desc='Workflow description') # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") crash_dir = Directory(mandatory=False, desc="Location to store crash files") save_script_only = traits.Bool(False) sink_dir = Directory(mandatory=True, desc="Location to store results") # Execution run_using_plugin = Bool( False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "PBSGraph", "MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') # Subjects datagrabber = traits.Instance(Data, ()) name = traits.String('mean') # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code()
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") # Directories base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") datagrabber = traits.Instance(Data, ()) run_mode = traits.Enum("flame1","ols","flame12") save_script_only = traits.Bool(False) #Normalization brain_mask = traits.File(mandatory=True,desc='Brain Mask') name_of_project = traits.String("group_analysis",usedefault=True) do_randomize = traits.Bool(True) num_iterations = traits.Int(5000) #Correction: run_correction = traits.Bool(True) z_threshold = traits.Float(2.3) p_threshold = traits.Float(0.05) connectivity = traits.Int(26) # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code() # Buttons check_func_datagrabber = Button("Check")
def getNode(_type,tr,config): from bips.workflows.flexible_datagrabber import Data, DataBase if _type == type(traits.Int()): col_type = colander.SchemaNode(colander.Int(), name=tr,description=config.trait(tr).desc) elif _type == type(traits.Float()): col_type = colander.SchemaNode(colander.Decimal(),name=tr) elif _type == type(traits.String()) or _type==type(traits.Str()): col_type = colander.SchemaNode(colander.String(),name=tr) elif _type == type(traits.Enum('')): values=config.trait(tr).trait_type.values the_values = [] for v in values: the_values.append((v,v)) col_type = colander.SchemaNode( deform.Set(), widget=deform.widget.SelectWidget(values=the_values), name=tr) elif _type == type(traits.Bool()): col_type = colander.SchemaNode(colander.Boolean(),widget=deform.widget.CheckboxWidget(),name=tr) elif _type == type(traits.Code()): col_type = colander.SchemaNode(colander.String(),name=tr,widget=deform.widget.TextAreaWidget(cols=100,rows=20)) elif _type == type(traits.Instance(Data,())): from bips.workflows.flexible_datagrabber import create_datagrabber_html_view col_type = create_datagrabber_html_view() elif _type == type(traits.List()): col_type =get_list(_type,tr,config) else: print "type: ", _type, "not found!" col_type = colander.SchemaNode(colander.String(),name=tr) return col_type
def init_parameters(self, parameter_name_list, linear_parameter_list=None): """ Initialise the parameters of the component. Parameters ---------- parameter_name_list : list The list of parameter names. linear_parameter_list : list, optional The list of linear parameter. The default is None. Returns ------- None. """ if linear_parameter_list is None: linear_parameter_list = [] for name in parameter_name_list: parameter = Parameter() self.parameters.append(parameter) parameter.name = name if name in linear_parameter_list: parameter._linear = True parameter._id_name = name setattr(self, name, parameter) if hasattr(self, 'grad_' + name): parameter.grad = getattr(self, 'grad_' + name) parameter.component = self self.add_trait(name, t.Instance(Parameter))
class config(baseconfig): threshold = traits.Float cluster_size = traits.Int is_fixed_fx = traits.Bool first_level_config = traits.File fx_config = traits.File is_block_design = traits.Bool datagrabber = traits.Instance(Data, ())
class Preferences(t.HasTraits): global current_toolkit EELS = t.Instance(EELSConfig) EDS = t.Instance(EDSConfig) Model = t.Instance(ModelConfig) General = t.Instance(GeneralConfig) MachineLearning = t.Instance(MachineLearningConfig) def gui(self): import hyperspy.gui.preferences self.EELS.trait_view("traits_view", hyperspy.gui.preferences.eels_view) self.edit_traits(view=hyperspy.gui.preferences.preferences_view) def save(self): config = ConfigParser.SafeConfigParser(allow_no_value = True) template2config(template, config) config.write(open(defaults_file, 'w'))
class config(HasTraits): uuid = traits.Str(desc="UUID") # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") base_dir = Directory( os.path.abspath('.'), mandatory=True, desc='Base directory of data. (Should be subject-independent)') sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results") crash_dir = Directory(mandatory=False, desc="Location to store crash files") # Execution run_using_plugin = Bool( False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') test_mode = Bool( False, mandatory=False, usedefault=True, desc='Affects whether where and if the workflow keeps its \ intermediary files. True to keep intermediary files. ' ) timeout = traits.Float(14.0) datagrabber = traits.Instance(Data, ()) run_mode = traits.Enum("flame1", "ols", "flame12") save_script_only = traits.Bool(False) #Normalization brain_mask = traits.File(mandatory=True, desc='Brain Mask') name_of_project = traits.String("group_analysis", usedefault=True) do_randomize = traits.Bool(True) num_iterations = traits.Int(5000) #Correction: run_correction = traits.Bool(True) z_threshold = traits.Float(2.3) p_threshold = traits.Float(0.05) connectivity = traits.Int(26) # Advanced Options use_advanced_options = traits.Bool() advanced_script = traits.Code() # Buttons check_func_datagrabber = Button("Check")
class config(HasTraits): uuid = traits.Str(desc="UUID") desc = traits.Str(desc="Workflow Description") # Directories working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory") sink_dir = Directory(os.path.abspath('.'), mandatory=True, desc="Location where the BIP will store the results") crash_dir = Directory(mandatory=False, desc="Location to store crash files") surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory") # Execution run_using_plugin = Bool( False, usedefault=True, desc="True to run pipeline with plugin, False to run serially") plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor", usedefault=True, desc="plugin to use, if run_using_plugin=True") plugin_args = traits.Dict({"qsub_args": "-q many"}, usedefault=True, desc='Plugin arguments.') test_mode = Bool( False, mandatory=False, usedefault=True, desc='Affects whether where and if the workflow keeps its \ intermediary files. True to keep intermediary files. ' ) timeout = traits.Float(14.0) datagrabber = traits.Instance(Data, ()) projection_stem = traits.Str('-projfrac-avg 0 1 0.1', desc='how to project data onto the surface') out_type = traits.Enum('mat', 'hdf5', desc='mat or hdf5') hdf5_package = traits.Enum('h5py', 'pytables', desc='which hdf5 package to use') target_surf = traits.Enum('fsaverage4', 'fsaverage3', 'fsaverage5', 'fsaverage6', 'fsaverage', 'subject', desc='which average surface to map to') surface_fwhm = traits.List([5], traits.Float(), mandatory=True, usedefault=True, desc="How much to smooth on target surface") roiname = traits.String('amygdala') use_advanced_options = Bool(False) advanced_options = traits.Code()
class config(BaseWorkflowConfig): uuid = traits.Str(desc="UUID") # Directories sink_dir = traits.Directory(mandatory=True, desc="Location where the BIP will store the results") save_script_only = traits.Bool(False) # DataGrabber datagrabber = traits.Instance(Data, ())
class InstanceEditor(EditorFactory): value = tr.Instance(IModel) def render(self): app_window = self.controller.app_window instance_controller = self.value.get_controller(app_window=app_window) model_editor = instance_controller.model_editor return model_editor
class DescStatBasePlot(BasePlot): ds = _traits.Instance(DataSet) plot_data = _traits.Property() """The data set that is to be shown i table view of the plot data""" def _get_plot_data(self): nds = copy.deepcopy(self.ds) df = self.ds.mat.transpose() nds.mat = df.sort_index(axis=0, ascending=False) return nds
class NoPlotControl(_traitsui.ModelView): model = _traits.Instance(_chaco.DataView) plot_controllers = _traitsui.Group() traits_view = _traitsui.View( _traitsui.Group(_traitsui.Item( 'model', editor=_enable.ComponentEditor(bgcolor=bg_color), show_label=False), _traitsui.Include('plot_controllers'), orientation="vertical"))
def init_parameters(self, parameter_name_list): for name in parameter_name_list: parameter = Parameter() self.parameters.append(parameter) parameter.name = name setattr(self, name, parameter) if hasattr(self, 'grad_' + name): parameter.grad = getattr(self, 'grad_' + name) parameter.component = self self.add_trait(name, t.Instance(Parameter))