def register_pipeline_node(node_class, name=None): """Registers new pipeline node class to registry. """ if bool(name) == False: name = node_class.__name__ PIPELINE_NODE_REGISTRY[name] = node_class debug('Registered pipeline node %s as %s' % (node_class.__name__, name))
def request_data(self, spec, **kwargs): """Provides particle data. """ if spec == Hdf5DataSpec: debug('Serving Hdf5DataSpec') return self.hdf5_data # this may be None if datasource is not valid. return None
def selected_data(self): if self.index_cursor in range(-self.n_data, self.n_data): debug("retriving data at index=%s" %self.index_cursor) return self.parent.request_data(Hdf5DataSpec, index=self.index_cursor) else: warning('Index cursor is set to wrong value.') return None
def OnPipelineDeleteNodeMenu(self, event): """Called on 'Pipeline'->'Delete Node...' menu. """ # If current pipeline node is set to None, do nothing and return. selected_tree_item_id, selected_node = self.check_pipeline_node_selected() if selected_node is None: return elif selected_tree_item_id==self.pipeline_tree_ctrl.GetRootItem(): wx.MessageBox('Cannot delete root node.', 'Invalid operation') return # here, selected_node is proved to have some parent. selected_pipeline_node = self.pipeline_tree_ctrl.GetPyData(selected_tree_item_id) # else - debug('current_pipeline_node is %s' %selected_node) # destroy selected node # phase 1: collect target data targets_to_be_destroyed = self.pipeline_tree_ctrl.get_subtree_data(selected_tree_item_id) # phase 2: destroy inspectors/visualizers bound to those targets debug('Destroying targets: %s' %targets_to_be_destroyed) for target in targets_to_be_destroyed: self.inspector_notebook.destroy_page_for_target(target) self.visualizer_notebook.destroy_page_for_target(target) # phase 3: cull pipeline subtree selected_pipeline_node.disconnect() # phase 4: rebuild tree self.pipeline_tree_ctrl.rebuild_parent(selected_tree_item_id) # phase 5: delete pipeline node del selected_pipeline_node
def finalize(self): """Finalizer. """ # finalize visualizers debug('finalized %s' %(self.__class__.__name__)) # close registry self.registry.close()
def request_data(self, spec, **kwargs): """Provides particle data. """ if spec==Hdf5DataSpec: debug('Serving Hdf5DataSpec') return self.hdf5_data # this may be None if datasource is not valid. return None
def OnPipelineDeleteNodeMenu(self, event): """Called on 'Pipeline'->'Delete Node...' menu. """ # If current pipeline node is set to None, do nothing and return. selected_tree_item_id, selected_node = self.check_pipeline_node_selected( ) if selected_node is None: return elif selected_tree_item_id == self.pipeline_tree_ctrl.GetRootItem(): wx.MessageBox('Cannot delete root node.', 'Invalid operation') return # here, selected_node is proved to have some parent. selected_pipeline_node = self.pipeline_tree_ctrl.GetPyData( selected_tree_item_id) # else - debug('current_pipeline_node is %s' % selected_node) # destroy selected node # phase 1: collect target data targets_to_be_destroyed = self.pipeline_tree_ctrl.get_subtree_data( selected_tree_item_id) # phase 2: destroy inspectors/visualizers bound to those targets debug('Destroying targets: %s' % targets_to_be_destroyed) for target in targets_to_be_destroyed: self.inspector_notebook.destroy_page_for_target(target) self.visualizer_notebook.destroy_page_for_target(target) # phase 3: cull pipeline subtree selected_pipeline_node.disconnect() # phase 4: rebuild tree self.pipeline_tree_ctrl.rebuild_parent(selected_tree_item_id) # phase 5: delete pipeline node del selected_pipeline_node
def finalize(self): """Finalizer. """ # finalize visualizers debug('finalized %s' % (self.__class__.__name__)) # close registry self.registry.close()
def fetch_particle_space(self, **kwargs): """Property getter for particle_space """ # examine cache uri = self.parent.request_data(UriSpec, **kwargs) if 'index' in kwargs: index = kwargs['index'] else: index = 0 if not (self._uri == uri and self._index == index): self._particle_space = None self._uri = uri if self._particle_space: pass else: # self._particle_space is None if uri is None: return debug('spatiocyte data uri=%s' % uri) try: parsed = urlparse(uri) fullpath = parsed.netloc + parsed.path self._particle_space = self.load_spatiocyte_file(fullpath, index) except IOError, e: warning('Failed to open %s: %s', fullpath, str(e)) pass
def fetch_particle_space(self, **kwargs): """Property getter for particle_space """ # examine cache uri = self.parent.request_data(UriSpec, **kwargs) if 'index' in kwargs: index = kwargs['index'] else: index = 0 if not (self._uri == uri and self._index == index): self._particle_space = None self._uri = uri if self._particle_space: pass else: # self._particle_space is None if uri is None: return debug('spatiocyte data uri=%s' % uri) try: parsed = urlparse(uri) fullpath = parsed.netloc + parsed.path self._particle_space = self.load_spatiocyte_file( fullpath, index) except IOError, e: warning('Failed to open %s: %s', fullpath, str(e)) pass
def register_pipeline_node(node_class, name=None): """Registers new pipeline node class to registry. """ if bool(name)==False: name = node_class.__name__ PIPELINE_NODE_REGISTRY[name] = node_class debug('Registered pipeline node %s as %s' %(node_class.__name__, name))
def register_datasource_page(page_class, name=None): """Registers new page class to registry. """ if bool(name)==False: name = page_class.__name__ DATASOURCE_PAGE_REGISTRY[name] = page_class debug('Registered datasource %s as %s' %(page_class.__name__, name))
def register_datasource_page(page_class, name=None): """Registers new page class to registry. """ if bool(name) == False: name = page_class.__name__ DATASOURCE_PAGE_REGISTRY[name] = page_class debug('Registered datasource %s as %s' % (page_class.__name__, name))
def selected_data(self): if self.index_cursor in range(-self.n_data, self.n_data): debug("retriving data at index=%s" % self.index_cursor) return self.parent.request_data(Hdf5DataSpec, index=self.index_cursor) else: warning('Index cursor is set to wrong value.') return None
def OnAppAboutMenu(self, event): """Called on 'App'->'About' menu. """ debug('App::OnAppAboutMenu.') dlg = wx.MessageDialog(self.browser, APP_TITLE_NAME, 'About this application...', wx.OK) dlg.ShowModal() dlg.Destroy()
def init_plugins(self): """Initialize plugins """ # load plugins plugin_loader = PluginLoader() for i, (modpath, status) in enumerate(plugin_loader.load_iterative()): message = '%s ... %s' %(modpath, 'OK' if status else 'FAILED') debug(message)
def check_pipeline_node_selected(self): """Show alert if no pipeline node is selected, returning True. Otherwise False. """ selected_tree_item_id, selected_node = self.current_pipeline_node_info debug('current_pipeline_node is %s' % selected_node) if selected_node is None: wx.MessageBox('No node is selected.', 'Invalid operation.') return selected_tree_item_id, selected_node
def check_pipeline_node_selected(self): """Show alert if no pipeline node is selected, returning True. Otherwise False. """ selected_tree_item_id, selected_node = self.current_pipeline_node_info debug('current_pipeline_node is %s' %selected_node) if selected_node is None: wx.MessageBox('No node is selected.', 'Invalid operation.') return selected_tree_item_id, selected_node
def init_plugins(self): """Initialize plugins """ # load plugins plugin_loader = PluginLoader() for i, (modpath, status) in enumerate(plugin_loader.load_iterative()): message = '%s ... %s' % (modpath, 'OK' if status else 'FAILED') debug(message)
def load_iterative(self): for mod_path, modname in self.modules_info: try: mod_fullpath = mod_path+'.'+modname __import__(mod_fullpath) yield (mod_fullpath, True) except Exception, e: debug('**FAILED**: %s' %str(e)) yield (mod_fullpath, False)
def internal_update(self): """Resets node_cursor if it has become invalid. """ if self.hdf5_data: if self.hdf5_data.get(self.node_cursor): return # otherwise: reset node cursor self.node_cursor = '/' debug('%s' % self.hdf5_data)
def internal_update(self): """Resets node_cursor if it has become invalid. """ if self.hdf5_data: if self.hdf5_data.get(self.node_cursor): return # otherwise: reset node cursor self.node_cursor = '/' debug('%s' %self.hdf5_data)
def request_data(self, spec, **kwargs): """Provides particle data. """ if spec == NumberOfItemsSpec: debug('Serving NumberOfItemsSpec') return self.parent.request_data(NumberOfItemsSpec, **kwargs) elif spec == ParticleSpaceSpec: debug('Serving ParticleSpaceSpec') return self.fetch_particle_space(**kwargs) return None
def OnNotebookPageChanged(self, event): """Event handler called on notebook change. """ # look for currently selected page if self.notebook.selected_page: self.notebook.selected_page.update_datasource() else: # if no page is left self.datasource.uri = None debug('datasource.uri set to %s' % self.datasource.uri) wx.PostEvent(self, DatasourceChangedEvent())
def OnNotebookPageChanged(self, event): """Event handler called on notebook change. """ # look for currently selected page if self.notebook.selected_page: self.notebook.selected_page.update_datasource() else: # if no page is left self.datasource.uri = None debug('datasource.uri set to %s' %self.datasource.uri) wx.PostEvent(self, DatasourceChangedEvent())
def _get_actors(self): """override a base-class member function """ for sid, actor in self._actors_cache.items(): self._renderer.RemoveActor(actor) self._actors_cache.clear() if self.particle_space is not None: # bounds = [numpy.inf, 0.0, numpy.inf, 0.0, numpy.inf, 0.0] # cmap = create_color_map(len(self.particle_space.species)) # for i, sid in enumerate(self.particle_space.species): # color = cmap[i] for sid in self.particle_space.species: if sid not in self.color_map.keys(): continue color = self.color_map[sid] particles = self.particle_space.list_particles(sid) if len(particles) == 0: continue points = vtk.vtkPoints() radius = 0.0 for pid, particle in particles: points.InsertNextPoint( numpy.asarray(particle.position) / self.view_scale) radius = max(particle.radius / self.view_scale, radius) points.ComputeBounds() b = points.GetBounds() # bounds = [ # min(bounds[0], b[0]), max(bounds[1], b[1]), # min(bounds[2], b[2]), max(bounds[3], b[3]), # min(bounds[4], b[4]), max(bounds[5], b[5])] poly_data = vtk.vtkPolyData() poly_data.SetPoints(points) # source = vtk.vtkPointSource() # source.SetRadius(radius) source = vtk.vtkSphereSource() source.SetRadius(radius) mapper = vtk.vtkGlyph3DMapper() mapper.SetSourceConnection(source.GetOutputPort()) mapper.SetInputConnection(poly_data.GetProducerPort()) actor = vtk.vtkActor() actor.SetMapper(mapper) actor.GetProperty().SetColor(color) self._actors_cache[sid] = actor debug('actors: %s' % self._actors_cache) return self._actors_cache
def find_page_for_target(self, target): """Finds page which binds given target """ found_page_index, found_page_instance = None, None if target is None: debug('Cannot find page for target=None') else: for page_index in range(self.GetPageCount()): page_instance = self.GetPage(page_index) if getattr(page_instance, 'target', None) == target: found_page_index = page_index found_page_instance = page_instance return found_page_index, found_page_instance
def find_page_for_target(self, target): """Finds page which binds given target """ found_page_index, found_page_instance = None, None if target is None: debug('Cannot find page for target=None') else: for page_index in range(self.GetPageCount()): page_instance = self.GetPage(page_index) if getattr(page_instance, 'target', None)==target: found_page_index = page_index found_page_instance = page_instance return found_page_index, found_page_instance
def OnPipelineShowVisualizerMenu(self, event): """Called on 'Pipeline'->'Show visualizer...' menu. """ # TBD: this is almost same with OnPipelineShowInspectorMenu. # If current pipeline node is set to None, do nothing and return. selected_tree_id, selected_node = self.check_pipeline_node_selected() if selected_node is None: return # else -- debug('current_pipeline_node is %s' % selected_node) # If there are already corresponding visualizer, just focus it. visualizer_page_index, visualizer_page_instance = self.visualizer_notebook.find_page_for_target( selected_node) if visualizer_page_index is None: debug('No page exists, trying') # find PipelineNode class and (try to) load new visualizer page. pipeline_node_type_name = selected_node.__class__.__name__ debug('\n'.join( str((k, v)) for k, v in VISUALIZER_PAGE_REGISTRY.items())) visualizer_page_class = VISUALIZER_PAGE_REGISTRY.get( pipeline_node_type_name, None) if visualizer_page_class is None: wx.MessageBox( 'Node type %s does not have inspector.' % (pipeline_node_type_name), 'Invalid operation.') return debug('inspector page class: %s' % (visualizer_page_class)) # else visualizer_page_index, visualizer_page_instance = self.visualizer_notebook.create_page( visualizer_page_class, selected_node.name, target=selected_node) # force update visualizer_page_instance.update() self.visualizer_notebook.SetSelection(visualizer_page_index)
def OnPipelineShowVisualizerMenu(self, event): """Called on 'Pipeline'->'Show visualizer...' menu. """ # TBD: this is almost same with OnPipelineShowInspectorMenu. # If current pipeline node is set to None, do nothing and return. selected_tree_id, selected_node = self.check_pipeline_node_selected() if selected_node is None: return # else -- debug('current_pipeline_node is %s' %selected_node) # If there are already corresponding visualizer, just focus it. visualizer_page_index, visualizer_page_instance = self.visualizer_notebook.find_page_for_target(selected_node) if visualizer_page_index is None: debug('No page exists, trying') # find PipelineNode class and (try to) load new visualizer page. pipeline_node_type_name = selected_node.__class__.__name__ debug('\n'.join(str((k, v)) for k, v in VISUALIZER_PAGE_REGISTRY.items())) visualizer_page_class = VISUALIZER_PAGE_REGISTRY.get(pipeline_node_type_name, None) if visualizer_page_class is None: wx.MessageBox('Node type %s does not have inspector.' %(pipeline_node_type_name), 'Invalid operation.') return debug('inspector page class: %s' %(visualizer_page_class)) # else visualizer_page_index, visualizer_page_instance = self.visualizer_notebook.create_page( visualizer_page_class, selected_node.name, target=selected_node) # force update visualizer_page_instance.update() self.visualizer_notebook.SetSelection(visualizer_page_index)
def request_data(self, spec, **kwargs): """Provides particle data. """ if spec == NumberOfItemsSpec: debug('Serving NumberOfItemsSpec') if self.fetch_particle_space(**kwargs) is None: return 0 else: return 1 elif spec == ParticleSpaceSpec: debug('Serving ParticleSpaceSpec') # this may be None if datasource is not valid. return self.fetch_particle_space(**kwargs) return None
def set_hdf5_file(self, hdf5_file): """Property setter; rebuilds tree on file change. """ debug('Deleting tree') self.DeleteAllItems() if hdf5_file: debug('Valid hdf5 file at %x' % id(hdf5_file)) node_name = "%s:: %s" % (hdf5_file.__class__.__name__, hdf5_file.name) root_id = self.AddRoot(node_name) self.SetPyData(root_id, hdf5_file) if hasattr(hdf5_file, 'items') and hdf5_file.items(): self.SetItemHasChildren(root_id, True) self._hdf5_file = hdf5_file
def OnDatasourceAddMenu(self, event): """Called on 'Datasource'->'Add...' menu. """ debug('Available datasource page types: %s' %DATASOURCE_PAGE_REGISTRY.keys()) dlg = AddDatasourceDialog(self.browser, choices=sorted(DATASOURCE_PAGE_REGISTRY.keys())) if dlg.ShowModal()==wx.ID_OK: label_name = dlg.label_name page_class = DATASOURCE_PAGE_REGISTRY.get(dlg.datasource_name, None) debug('Got %s as %s' %(page_class.__name__, label_name)) if page_class is None: wx.MessageBox('Page Type not found: %s' %dlg.datasource_name, 'Error') # load new dialog page self.datasource_panel.notebook.create_page(page_class, label_name) dlg.Destroy()
def set_hdf5_file(self, hdf5_file): """Property setter; rebuilds tree on file change. """ debug('Deleting tree') self.DeleteAllItems() if hdf5_file: debug('Valid hdf5 file at %x' % id(hdf5_file)) node_name = "%s:: %s" %( hdf5_file.__class__.__name__, hdf5_file.name) root_id = self.AddRoot(node_name) self.SetPyData(root_id, hdf5_file) if hasattr(hdf5_file, 'items') and hdf5_file.items(): self.SetItemHasChildren(root_id, True) self._hdf5_file = hdf5_file
def restore_state(self, info_dict): """Experimental: restore page info. """ for name, info in info_dict.items(): module_name = info['module_name'] mod, cls = None, None try: mod = __import__(module_name) class_name = info['class_name'] cls = getattr(mod, class_name, None) except ImportError: debug('Unable to load %s' %module_name) if cls: page = cls(self, -1) if hasattr(page, 'state'): page.state = info['state'] self.AddPage(page, name)
def restore_state(self, info_dict): """Experimental: restore page info. """ for name, info in info_dict.items(): module_name = info['module_name'] mod, cls = None, None try: mod = __import__(module_name) class_name = info['class_name'] cls = getattr(mod, class_name, None) except ImportError: debug('Unable to load %s' % module_name) if cls: page = cls(self, -1) if hasattr(page, 'state'): page.state = info['state'] self.AddPage(page, name)
def update(self): """Observer update handler. """ # reset values (delete existing rows for grids) self.path_text.SetValue('') self.attr_grid.SetRowLabelSize(0) # HideRowLabels() if self.attr_grid.GetNumberRows(): self.attr_grid.DeleteRows(0, self.attr_grid.GetNumberRows()) self.vals_grid.SetRowLabelSize(0) # HideRowLabels() if self.vals_grid.GetNumberRows(): self.vals_grid.DeleteRows(0, self.vals_grid.GetNumberRows()) if self.vals_grid.GetNumberCols(): self.vals_grid.DeleteCols(0, self.vals_grid.GetNumberCols()) # check for current node cursor of target. hdf5_data = self.target.hdf5_data current_path = self.target.node_cursor if (current_path is None) or (hdf5_data is None): return debug('hdf5data: %s' % hdf5_data) # current_path and hdf5_data is valid, let's inspect it. self.path_text.SetValue(current_path) node = hdf5_data.get(current_path) debug('selected node: %s' % node) # for node having attrs, populate attrs grid. if hasattr(node, 'attrs'): n_attrs = len(node.attrs.items()) grid = self.attr_grid grid.InsertRows(0, n_attrs) for row_idx, (key, value) in enumerate(node.attrs.items()): grid.SetCellValue(row_idx, 0, key) grid.SetCellValue(row_idx, 1, "%s" % value) # for node having value (implies dataset), populate values grid. if hasattr(node, 'value'): grid = self.vals_grid labels = node.dtype.names n_labels = len(labels) grid.InsertCols(0, n_labels) grid.InsertRows(0, node.len()) for i, label in enumerate(labels): grid.SetColLabelValue(i, label) for row_idx, row_data in enumerate(node.value): for col_idx in range(n_labels): grid.SetCellValue(row_idx, col_idx, "%s" % row_data[col_idx])
def update(self): """Observer update handler. """ # reset values (delete existing rows for grids) self.path_text.SetValue('') self.attr_grid.SetRowLabelSize(0) # HideRowLabels() if self.attr_grid.GetNumberRows(): self.attr_grid.DeleteRows(0, self.attr_grid.GetNumberRows()) self.vals_grid.SetRowLabelSize(0) # HideRowLabels() if self.vals_grid.GetNumberRows(): self.vals_grid.DeleteRows(0, self.vals_grid.GetNumberRows()) if self.vals_grid.GetNumberCols(): self.vals_grid.DeleteCols(0, self.vals_grid.GetNumberCols()) # check for current node cursor of target. hdf5_data = self.target.hdf5_data current_path = self.target.node_cursor if (current_path is None) or (hdf5_data is None): return debug('hdf5data: %s' %hdf5_data) # current_path and hdf5_data is valid, let's inspect it. self.path_text.SetValue(current_path) node = hdf5_data.get(current_path) debug('selected node: %s' %node) # for node having attrs, populate attrs grid. if hasattr(node, 'attrs'): n_attrs = len(node.attrs.items()) grid = self.attr_grid grid.InsertRows(0, n_attrs) for row_idx, (key, value) in enumerate(node.attrs.items()): grid.SetCellValue(row_idx, 0, key) grid.SetCellValue(row_idx, 1, "%s" %value) # for node having value (implies dataset), populate values grid. if hasattr(node, 'value'): grid = self.vals_grid labels = node.dtype.names n_labels = len(labels) grid.InsertCols(0, n_labels) grid.InsertRows(0, node.len()) for i, label in enumerate(labels): grid.SetColLabelValue(i, label) for row_idx, row_data in enumerate(node.value): for col_idx in range(n_labels): grid.SetCellValue(row_idx, col_idx, "%s" %row_data[col_idx])
def OnCameraParameterText(self, evt): """Called on TextCtrl edits. """ try: camera = self.target.renderer.GetActiveCamera() # x/y/z controls for prefix, handler in ( ('position_', camera.SetPosition), ('focal_point_', camera.SetFocalPoint), ('view_up_', camera.SetViewUp)): handler(*[ float(getattr(self, prefix+axis_name+'_text').GetValue()) for i, axis_name in enumerate(['x', 'y', 'z'])]) # parallel scale camera.SetParallelScale(float(self.parallel_scale_text.GetValue())) self.target.status_changed(exclude_observers=[self]) except Exception, e: debug('OnCameraParameterText failed due to %s' %(e)) self.params_from_camera()
def OnBrowserClosing(self, event): """Hook from browser on closing. """ # save datasource panel. ds_registry = self.registry.load_section('datasources') ds_notebook = self.datasource_panel.notebook ds_pages_info = [] for page_index in range(ds_notebook.GetPageCount()): page = ds_notebook.GetPage(page_index) label = ds_notebook.GetPageText(page_index) for name, cls in DATASOURCE_PAGE_REGISTRY.items(): if cls == page.__class__: ds_pages_info.append((label, name, page.save())) debug('saving page %s' % [label, name, page.save()]) ds_registry['pages'] = ds_pages_info # save pipeline pl_registry = self.registry.load_section('pipeline') pl_registry['tree'] = self.pipeline.save()
def OnBrowserClosing(self, event): """Hook from browser on closing. """ # save datasource panel. ds_registry = self.registry.load_section('datasources') ds_notebook = self.datasource_panel.notebook ds_pages_info = [] for page_index in range(ds_notebook.GetPageCount()): page = ds_notebook.GetPage(page_index) label = ds_notebook.GetPageText(page_index) for name, cls in DATASOURCE_PAGE_REGISTRY.items(): if cls==page.__class__: ds_pages_info.append((label, name, page.save())) debug('saving page %s' % [label, name, page.save()]) ds_registry['pages'] = ds_pages_info # save pipeline pl_registry = self.registry.load_section('pipeline') pl_registry['tree'] = self.pipeline.save()
def OnDatasourceAddMenu(self, event): """Called on 'Datasource'->'Add...' menu. """ debug('Available datasource page types: %s' % DATASOURCE_PAGE_REGISTRY.keys()) dlg = AddDatasourceDialog(self.browser, choices=sorted( DATASOURCE_PAGE_REGISTRY.keys())) if dlg.ShowModal() == wx.ID_OK: label_name = dlg.label_name page_class = DATASOURCE_PAGE_REGISTRY.get(dlg.datasource_name, None) debug('Got %s as %s' % (page_class.__name__, label_name)) if page_class is None: wx.MessageBox('Page Type not found: %s' % dlg.datasource_name, 'Error') # load new dialog page self.datasource_panel.notebook.create_page(page_class, label_name) dlg.Destroy()
def update(self): """Update UI. """ if self.target and hasattr(self.target, 'hdf5_data'): hdf5_data = self.target.hdf5_data debug('hdf5_data at %s' %(hdf5_data)) if hdf5_data: # glob attributes from the hdf5 data. for prop_name in self.PROP_NAMES: widget = getattr(self, prop_name, None) prop_value = getattr(hdf5_data, prop_name, None) if widget and prop_value: widget.SetValue(str(prop_value)) return # else fallback debug('**** not found: %s, %s' %(self.target, self.target.hdf5_data)) for prop_name in self.PROP_NAMES: widget = getattr(self, prop_name, None) if widget: widget.SetValue('')
def update(self): """Update UI. """ if self.target and hasattr(self.target, 'hdf5_data'): hdf5_data = self.target.hdf5_data debug('hdf5_data at %s' % (hdf5_data)) if hdf5_data: # glob attributes from the hdf5 data. for prop_name in self.PROP_NAMES: widget = getattr(self, prop_name, None) prop_value = getattr(hdf5_data, prop_name, None) if widget and prop_value: widget.SetValue(str(prop_value)) return # else fallback debug('**** not found: %s, %s' % (self.target, self.target.hdf5_data)) for prop_name in self.PROP_NAMES: widget = getattr(self, prop_name, None) if widget: widget.SetValue('')
def OnImportButton(self, evt): """Called on Import... button """ dlg = wx.FileDialog( self, message="Open camera parameters ...", defaultDir=os.getcwd(), defaultFile="camera_params.json", wildcard="Json (*.json)|*.json|") ret = dlg.ShowModal() if ret==wx.ID_OK: path = dlg.GetPath() try: infile = open(path, 'r') data = json.loads(infile.read()) infile.close() camera = self.target.renderer.GetActiveCamera() camera.SetPosition(data['position']) camera.SetFocalPoint(data['focal_point']) camera.SetViewUp(data['view_up']) camera.SetParallelScale(data['parallel_scale']) self.params_from_camera() except Exception, e: debug('Import failed due to %s' %(str(e)))
def hdf5_data(self): """Property getter for hdf5_data """ # examine cache uri = self.parent.request_data(UriSpec) if not (self._uri == uri): self._hdf5_data = None self._uri = uri if self._hdf5_data: pass else: # self._hdf5_data is None if uri is None: return debug('hdf5 data uri=%s' % uri) try: parsed = urlparse(uri) fullpath = parsed.netloc + parsed.path if os.path.exists(fullpath): self._hdf5_data = File(fullpath) except IOError, e: warning('Failed to open %s: %s', fullpath, str(e)) pass
def hdf5_data(self): """Property getter for hdf5_data """ # examine cache uri = self.parent.request_data(UriSpec) if not (self._uri==uri): self._hdf5_data = None self._uri = uri if self._hdf5_data: pass else: # self._hdf5_data is None if uri is None: return debug('hdf5 data uri=%s' % uri) try: parsed = urlparse(uri) fullpath = parsed.netloc+parsed.path if os.path.exists(fullpath): self._hdf5_data = File(fullpath) except IOError, e: warning('Failed to open %s: %s', fullpath, str(e)) pass
def OnExportButton(self, evt): """Called on Export... button """ dlg = wx.FileDialog( self, message="Save file as ...", defaultDir=os.getcwd(), defaultFile="camera_params.json", wildcard="Json (*.json)|*.json|", style=wx.SAVE) ret = dlg.ShowModal() if ret==wx.ID_OK: path = dlg.GetPath() try: camera = self.target.renderer.GetActiveCamera() data = dict( position=tuple(camera.GetPosition()), focal_point=tuple(camera.GetFocalPoint()), view_up=tuple(camera.GetViewUp()), parallel_scale=camera.GetParallelScale()) ofile = open(path, 'w') ofile.write(json.dumps(data)) ofile.close() except Exception, e: debug('Export failed due to %s' %(str(e)))
def OnImportButton(self, evt): """Called on Import... button """ dlg = wx.FileDialog( self, message="Open camera parameters ...", defaultDir=os.getcwd(), defaultFile="camera_params.json", wildcard="Json (*.json)|*.json|") ret = dlg.ShowModal() if ret==wx.ID_OK: path = dlg.GetPath() try: infile = open(path, 'r') data = json.loads(infile.read()) infile.close() camera = self.target.renderer.GetActiveCamera() camera.SetPosition(data['position']) camera.SetFocalPoint(data['focal_point']) camera.SetViewUp(data['view_up']) camera.SetParallelScale(data['parallel_scale']) self.params_from_camera() except Exception, e: debug('Import failed due to %s' %(str(e))) self.target.propagate_down(UpdateEvent(None))
def OnCaptureImageButton(self, evt): """Controls offscreen rendering mode """ dlg = wx.FileDialog(self, message="Save capture image as ...", defaultDir=os.getcwd(), defaultFile="capture.png", wildcard="PNG (*.png)|*.png|", style=wx.SAVE) ret = dlg.ShowModal() if ret == wx.ID_OK: path = dlg.GetPath() try: render_window = self.target.renderer.GetRenderWindow() image_filter = vtk.vtkWindowToImageFilter() image_writer = vtk.vtkPNGWriter() image_filter.SetInput(render_window) image_filter.Update() image_writer.SetInputConnection(image_filter.GetOutputPort()) image_writer.SetFileName(path) render_window.Render() image_writer.Write() except Exception, e: debug('Import failed due to %s' % (str(e)))
def OnCaptureImageButton(self, evt): """Controls offscreen rendering mode """ dlg = wx.FileDialog( self, message="Save capture image as ...", defaultDir=os.getcwd(), defaultFile="capture.png", wildcard="PNG (*.png)|*.png|", style=wx.SAVE) ret = dlg.ShowModal() if ret==wx.ID_OK: path = dlg.GetPath() try: render_window = self.target.renderer.GetRenderWindow() image_filter = vtk.vtkWindowToImageFilter() image_writer = vtk.vtkPNGWriter() image_filter.SetInput(render_window) image_filter.Update() image_writer.SetInputConnection( image_filter.GetOutputPort()) image_writer.SetFileName(path) render_window.Render() image_writer.Write() except Exception, e: debug('Import failed due to %s' %(str(e)))
def register_inspector_page(node_class_name, page_class): """Register new page class to registry. """ INSPECTOR_PAGE_REGISTRY[node_class_name] = page_class debug('Registered inspector %s for pipeline node type %s' % (page_class.__name__, node_class_name))
def listbox_select(self, event): #TODO checked = self.listbox.GetChecked() debug(checked)