def test_no_op_validation(): """Test the no-op handler. """ a = Atom() m = Value() m.set_validate_mode(Validate.NoOp, None) for value in (1, 1.0, '', [], {}): assert m.do_validate(a, None, value) == value
def test_using_no_op_handler(): """Test using the no_op handler. """ v = Value() v.set_getattr_mode(GetAttr.NoOp, None) class CustomGetAtom(Atom): val = v a = CustomGetAtom() assert a.val is None a.val = 1 assert a.val is None
def test_copy_static_observers(static_atom): """Test cloning the static observers of a member. """ member = static_atom.get_member('val2') v = Value() v.copy_static_observers(member) assert v.has_observers() assert v.has_observer('manual_obs') assert v.has_observer('react') # This is ano-op and take an early exit seen in coverage. v.copy_static_observers(v) with pytest.raises(TypeError) as excinfo: v.copy_static_observers(1) assert 'Member' in excinfo.exconly()
def _create_local_storage(self): # Only classes which have operator bindings need local storage. scopename = self._scopenames.next() stack = [self._root] while stack: node = stack.pop() if node.bindings: klass = node.typeclass members = klass.members() storage = Value() storage.set_name(scopename) storage.set_index(len(members)) members[scopename] = storage node.scope_member = storage # The member is not added to the class so that it # remains hidden from the user and object namespace stack.extend(node.child_defs)
def test_using_call_object_object_name_mode(): """Test using call_object_object_name mode. """ def getter(object, name): object.count += 1 return object.count, name m = Value() m.set_getattr_mode(GetAttr.CallObject_ObjectName, getter) class CustomGetAtom(Atom): val = m count = Int() a = CustomGetAtom() assert a.val == (1, 'val') assert a.val == (2, 'val') with pytest.raises(TypeError): m.set_getattr_mode(GetAttr.CallObject_ObjectName, 1)
def test_using_object_method_name_mode(): """Test using object_method mode. """ m = Value() m.set_getattr_mode(GetAttr.ObjectMethod_Name, 'getter') class CustomGetAtom(Atom): val = m count = Int() def getter(self, name): self.count += 1 return (self.count, name) a = CustomGetAtom() assert a.val == (1, 'val') assert a.val == (2, 'val') with pytest.raises(TypeError): m.set_getattr_mode(GetAttr.CallObject_Object, 1)
def test_no_op_validation(): """Test the no-op handler. """ a = Atom() m = Value() m.set_validate_mode(Validate.NoOp, None) for value in (1, 1.0, '', [], {}): assert m.do_validate(a, None, value) == value @pytest.mark.parametrize("member, set_values, values, raising_values", [ (Value(), ['a', 1, None], ['a', 1, None], []), (Bool(), [True, False], [True, False], 'r'), (Int(strict=True), [1], [1], [1.0, long(1)] if sys.version_info < (3, ) else [1.0]), (Int(strict=False), [1, 1.0, long(1) ], 3 * [1], ['a'] + [] if sys.version_info >= (3, ) else [1.0e35]), (Long(strict=True), [long(1)], [long(1)], [1.0, 1] if sys.version_info < (3, ) else [0.1]), (Long(strict=False), [1, 1.0, int(1)], 3 * [1], ['a']), (Range(0, 2), [0, 2], [0, 2], [-1, 3, '']), (Range(2, 0), [0, 2], [0, 2], [-1, 3]), (Range(0), [0, 3], [0, 3], [-1]), (Range(high=2), [-1, 2], [-1, 2], [3]), (Float(), [1, int(1), 1.1], [1.0, 1.0, 1.1], ['']), (Float(strict=True), [1.1], [1.1], [1]),
class Engine(PSIContribution): ''' Defines hardware-specific interface The user-defind attributes are ones set by the end-user of this library in their IO manifest. The IO manifest is system specific and describes the hardware they are using for data acquisition. User-defined attributes ----------------------- name : string Name of the engine. Must be unique across all engines. This name is used for debugging and metadata purposes. master_clock : bool If true, this engine will provide a timestamp whenever it's requested via `get_ts`. This is typically used for software-timed events (events generated by the hardware will typically have a timestamp that's determined by the engine that controls that particular device). hw_ai_monitor_period : float (sec) Poll period (in seconds). This defines how quickly acquired (analog input) data is downloaded from the buffers (and made available to listeners). If you want to see data as soon as possible, set the poll period to a small value. If your application is stalling or freezing, set this to a larger value. This poll period is a suggestion, not a contract. hw_ao_monitor_period : float (sec) Poll period (in seconds). This defines how often callbacks for the analog outputs are notified (i.e., to generate additional samples for playout). If the poll period is too long, then the analog output may run out of samples. This poll period is a suggestion, not a contract. Attributes ---------- configured : bool True if the hardware has been configured. Notes ----- When subclassing, you only need to implement the callbacks required by your hardware. For example, if your hardware only has analog inputs, you only need to implement the analog input methods. ''' name = d_(Str()).tag(metadata=True) master_clock = d_(Bool(False)).tag(metadata=True) lock = Value() configured = Bool(False) hw_ai_monitor_period = d_(Float(0.1)).tag(metadata=True) hw_ao_monitor_period = d_(Float(1)).tag(metadata=True) def _default_lock(self): return LogLock(self.name) def get_channels(self, mode=None, direction=None, timing=None, active=True): ''' Return channels matching criteria Parameters ---------- mode : {None, 'analog', 'digital' Type of channel direction : {None, 'input, 'output'} Direction timing : {None, 'hardware', 'software'} Hardware or software-timed channel. Hardware-timed channels have a sampling frequency greater than 0. active : bool If True, return only channels that have configured inputs or outputs. ''' channels = [c for c in self.children if isinstance(c, Channel)] if active: channels = [c for c in channels if c.active] if timing is not None: if timing in ('hardware', 'hw'): channels = [ c for c in channels if isinstance(c, HardwareMixin) ] elif timing in ('software', 'sw'): channels = [ c for c in channels if isinstance(c, SoftwareMixin) ] else: raise ValueError('Unsupported timing') if direction is not None: if direction in ('input', 'in'): channels = [c for c in channels if isinstance(c, InputMixin)] elif direction in ('output', 'out'): channels = [c for c in channels if isinstance(c, OutputMixin)] else: raise ValueError('Unsupported direction') if mode is not None: if mode == 'analog': channels = [c for c in channels if isinstance(c, AnalogMixin)] elif mode == 'digital': channels = [c for c in channels if isinstance(c, DigitalMixin)] elif mode == 'counter': channels = [c for c in channels if isinstance(c, CounterMixin)] else: raise ValueError('Unsupported mode') return tuple(channels) def get_channel(self, channel_name): channels = self.get_channels(active=False) for channel in channels: if channel.name == channel_name: return channel m = '{} channel does not exist'.format(channel_name) raise AttributeError(m) def remove_channel(self, channel): channel.set_parent(None) def configure(self): for channel in self.get_channels(): log.debug('Configuring channel {}'.format(channel.name)) channel.configure() self.configured = True def register_ai_callback(self, callback, channel_name=None): raise NotImplementedError def register_et_callback(self, callback, channel_name=None): raise NotImplementedError def unregister_ai_callback(self, callback, channel_name=None): raise NotImplementedError def unregister_et_callback(self, callback, channel_name=None): raise NotImplementedError def register_done_callback(self, callback): raise NotImplementedError def write_hw_ao(self, data, offset, timeout=1): ''' Write hardware-timed analog output data to the buffer Parameters ---------- data : 2D array Data to write (format channel x time) offset : int Sample at which to start writing data. Sample is relative to beginning of data acquisition. This can overwrite data that has already been written to the buffer but not consumed by the hardware. timeout : float Time, in seconds, to keep trying to write the data before failing. Notes ----- When subclassing, raise an exception if the system attempts to write data beginning at an offset that has already been consumed by the hardware and cannot be modified. ''' raise NotImplementedError def get_ts(self): raise NotImplementedError def start(self): raise NotImplementedError def stop(self): raise NotImplementedError def reset(self): raise NotImplementedError def get_ts(self): raise NotImplementedError def get_buffer_size(self, channel_name): raise NotImplementedError def update_hw_ao_multiple(self, offsets, channel_names): raise NotImplementedError def update_hw_ao(self, offsets, channel_name, method): raise NotImplementedError def clone(self, channel_names=None): ''' Return a copy of this engine with specified channels included This is intended as a utility function to assist various routines that may need to do a quick operation before starting the experiment. For example, calibration may only need to run a subset of the channels. ''' new = copy_declarative(self) for channel in new.children: channel.set_parent(None) if channel_names is not None: for channel_name in channel_names: channel = self.get_channel(channel_name) new_channel = copy_declarative(channel, parent=new, exclude=['inputs', 'outputs']) return new
class ArgReplacer(Atom): """Replaces one value in an ``args, kwargs`` pair. Inspects the function signature to find an argument by name whether it is passed by position or keyword. For use in decorators and similar wrappers. """ name = Unicode() arg_pos = Value() def __init__(self, func, name): super(ArgReplacer, self).__init__( name=name, ) # type: (Callable, str) -> None #self.name = name #self.arg_pos = None # try: # self.arg_pos = self._getargnames(func).index(name) # except ValueError: # # Not a positional parameter # self.arg_pos = None def _getargnames(self, func): # type: (Callable) -> List[str] try: return getargspec(func).args except TypeError: if hasattr(func, 'func_code'): # Cython-generated code has all the attributes needed # by inspect.getargspec, but the inspect module only # works with ordinary functions. Inline the portion of # getargspec that we need here. Note that for static # functions the @cython.binding(True) decorator must # be used (for methods it works out of the box). code = func.func_code # type: ignore return code.co_varnames[:code.co_argcount] raise def get_old_value(self, args, kwargs, default=None): # type: (List[Any], Dict[str, Any], Any) -> Any """Returns the old value of the named argument without replacing it. Returns ``default`` if the argument is not present. """ if self.arg_pos is not None and len(args) > self.arg_pos: return args[self.arg_pos] else: return kwargs.get(self.name, default) def replace(self, new_value, args, kwargs): # type: (Any, List[Any], Dict[str, Any]) -> Tuple[Any, List[Any], Dict[str, Any]] """Replace the named argument in ``args, kwargs`` with ``new_value``. Returns ``(old_value, args, kwargs)``. The returned ``args`` and ``kwargs`` objects may not be the same as the input objects, or the input objects may be mutated. If the named argument was not found, ``new_value`` will be added to ``kwargs`` and None will be returned as ``old_value``. """ if self.arg_pos is not None and len(args) > self.arg_pos: # The arg to replace is passed positionally old_value = args[self.arg_pos] args = list(args) # *args is normally a tuple args[self.arg_pos] = new_value else: # The arg to replace is either omitted or passed by keyword. old_value = kwargs.get(self.name) kwargs[self.name] = new_value return old_value, args, kwargs
class ProfileInfos(Atom): """Details about a profile. This is used as a cache to avoid reloading all the profile everytime. """ #: Path to the .ini file holding the full infos. path = Str() #: Reference to the instrument plugin. plugin = Value() #: Profile id. id = Str() #: Supported model model = Typed(InstrumentModelInfos) #: Dict of the connections connections = Dict() #: Dict of the settings settings = Dict() def write_to_file(self): """Save the profile to a file. """ self._config.filename = self.path self._config.update( dict(id=self.id, model_id=self.model.id, connections=self.connections, settings=self.settings)) self._config.write() def clone(self): """Clone this object. """ c = ConfigObj(encoding='utf-8', indent_type=' ') c.update( dict(id=self.id, model_id=self.model.id, connections=self.connections, settings=self.settings)) return type(self)(path=self.path, _config=c, plugin=self.plugin) @classmethod def create_blank(cls, plugin): """Create a new blank ProfileInfos. """ c = ConfigObj( encoding='utf-8', indent_type=' ', ) c['id'] = '' c['model_id'] = '' c['connections'] = {} c['settings'] = {} return cls(plugin=plugin, _config=c) # ========================================================================= # --- Private API --------------------------------------------------------- # ========================================================================= #: ConfigObj object associated to the profile. _config = Value() def _default_id(self): """Get the id from the profile. """ return self._config['id'] def _default_model(self): """Get the model from the profile. """ infos = self._config['model_id'].split('.') h = self.plugin._manufacturers if len(infos) == 2: manufacturer, model = infos return h._manufacturers[manufacturer]._models[model] if len(infos) == 3: manufacturer, serie, model = infos m = h._manufacturers[manufacturer] return m._series[serie]._models[model] def _default_connections(self): """Get the defined connections from the profile. """ return dict(self._config['connections']) def _default_settings(self): """Get the defined settings from the profile. """ return dict(self._config['settings']) def _default__config(self): """Load the config from the file. """ return ConfigObj(self.path, encoding='utf-8', indent_type=' ') def _post_setattr__config(self, old, new): """Clean id, model, connections and settings so that default is called again. """ del self.id, self.model, self.connections, self.settings
class VTKRenderController(Atom): """ vtk render controller - supply renderers to VTKCanvas """ zoomFactor = Value() numOfRenderers = Int() callbacks = Dict() customPorts = Dict() customBackgrounds = Dict() interactorStyle = Str() style = Value() bgColor = Tuple() motionFactor = Int() renderers = List() view = Value() appName = Str() logFile = Str() kwargs = Dict() def __init__(self, numOfRenderers=1, view=None, callbacks=None, bgColor=None, customPorts=None, customBackgrounds=None, logToFile=True, logFile=None, appName=None, interactorStyle=None, motionFactor=None, zoomFactor=None, **kwargs): """ default init numOfRenderers - how many renderers to create bgColor - default background color customPorts - custom view port params in form of tuple( x0,y0,x1,y1 ) where x and y between (0,1) customBackgrounds - custom backgrounds for each of the view port """ self.bgColor = bgColor or (0.25, 0.25, 0.25) self.callbacks = callbacks or {} self.numOfRenderers = numOfRenderers if numOfRenderers > 0 else 1 self.customPorts = customPorts or { 0: (0.0, 0.0, 1.0, 1.0), } self.customBackgrounds = customBackgrounds or {} self.motionFactor = motionFactor or 5 self.interactorStyle = interactorStyle or 'TrackBallCamera' self.style = INTERACTION_STYLES.get( self.interactorStyle, vtk.vtkInteractorStyleTrackballCamera()) self.zoomFactor = zoomFactor self.view = view self.renderers = [] self.appName = appName or 'vtkApp' self.logFile = logFile or '' self.kwargs = kwargs if logToFile: self._setupLogging() self.activate() def activate(self): """ activate widget - called only when parent widget is activated """ self._make_renderers() self._bindEvents() def _bindEvents(self): """ bind additional events """ if self.view and self.view.proxy.vtk_widget: self.view.proxy.vtk_widget.AddObserver( 'LeftButtonPressEvent', partial(self._OnEventAction, event_type='OnLeftDown')) self.view.proxy.vtk_widget.AddObserver( 'MiddleButtonPressEvent', partial(self._OnEventAction, event_type='OnMiddleDown')) self.view.proxy.vtk_widget.AddObserver( 'RightButtonPressEvent', partial(self._OnEventAction, event_type='OnRightDown')) self.view.proxy.vtk_widget.AddObserver( 'KeyPressEvent', partial(self._OnEventAction, event_type='OnKeyDown')) def _OnEventAction(self, obj, event, event_type=None): """ on left down event """ (x, y) = obj.GetEventPosition() obj.GetPicker().Pick( x, y, 0, obj.GetRenderWindow().GetRenderers().GetFirstRenderer()) pos = obj.GetPicker().GetPickPosition() if self.callbacks: fn = self.callbacks.get(event_type) if fn and callable(fn): fn(position=pos, event=event) def _make_renderers(self): """ make rendereres """ if self.view and self.view.proxy.vtk_widget: self.make_local_renderers() def make_local_renderers(self): """ make local renderers """ self.renderers = [ vtk.vtkRenderer() for _ in xrange(self.numOfRenderers) ] if self.customBackgrounds and len( self.customBackgrounds) == self.numOfRenderers: for i in xrange(self.numOfRenderers): self.renderers[i].SetBackground( self.customBackgrounds.get(i, (0.1, 0.1, 0.1))) else: for rend in self.renderers: rend.SetBackground(self.bgColor) self.setViewPorts() self.setCamera() def setViewPorts(self): """ set automatic viewports for renderers only for 2 and 4 """ l = len(self.renderers) if self.customPorts and len(self.customPorts) == l: for i in xrange(l): port = self.customPorts.get(i) if port: self.renderers[i].SetViewport(*port) else: if 0 < l < 5: if l == 2: self.renderers[0].SetViewport(0.0, 0.0, 0.5, 1.0) self.renderers[1].SetViewport(0.5, 0.0, 1.0, 1.0) if l == 3: self.renderers[0].SetViewport(0.0, 0.0, 0.333, 1.0) self.renderers[1].SetViewport(0.333, 0.0, 0.666, 1.0) self.renderers[2].SetViewport(0.666, 0.0, 1.0, 1.0) if l == 4: self.renderers[0].SetViewport(0.0, 0.0, 0.5, 0.5) self.renderers[1].SetViewport(0.5, 0.0, 1.0, 0.5) self.renderers[2].SetViewport(0.5, 0.5, 0.5, 1.0) self.renderers[3].SetViewport(0.5, 0.5, 1.0, 1.0) def addActors(self, actors=None): """ add actors to renderers """ if isinstance(actors, dict): renders = dict(enumerate(self.renderers)) for ix, acts in actors.iteritems(): rend = renders.get(ix) if rend: for act in acts: rend.AddActor(act) else: rend = self.renderers[0] for act in actors: rend.AddActor(act) def setCamera(self): """ set camera settings """ for ren in self.renderers: if self.zoomFactor: ren.ResetCamera() ren.GetActiveCamera().Zoom(self.zoomFactor) def get_renderers(self): """ return current renderers """ return self.renderers def get_renderer(self): """ return main renderer """ return self.renderers[0] def setInteractorStyle(self, interactorStyle='TrackBallCamera'): """ set custom interactor style """ self.interactorStyle = interactorStyle self.style = INTERACTION_STYLES.get( interactorStyle, vtk.vtkInteractorStyleTrackballCamera()) self._setInteractorStyle() def _setInteractorStyle(self): """ set current intercator style """ if self.view and self.style and self.view.proxy.vtk_widget: self.view.proxy.vtk_widget.SetInteractorStyle(self.style) def _setupLogging(self): """ setup local logging """ if not self.logFile: import os import tempfile temp = tempfile.gettempdir() app = ''.join( c for c in self.appName if 'a' <= c.lower() <= 'z') + '.log.' self.logFile = os.path.join(temp, app) fileOutputWindow = vtk.vtkFileOutputWindow() fileOutputWindow.SetFileName(self.logFile) outputWindow = vtk.vtkOutputWindow().GetInstance() if outputWindow: outputWindow.SetInstance(fileOutputWindow) @observe(('bgColor', )) def _onBGColorUpdated(self, change): """ update bg color """ if self.renderers and change: type_ = change.get('type') if type_ != 'create': bgColor = change.get('value') if bgColor: self.bgColor = bgColor for rend in self.renderers: rend.SetBackground(self.bgColor) @observe(('customBackgrounds', )) def _onCustomColorsUpdated(self, change): """ update custom bg colors """ if self.renderers and change: type_ = change.get('type') if type_ != 'create': customColors = change.get('value') if customColors: self.customBackgrounds = customColors renders = dict(enumerate(self.renderers)) for i, colors in self.customBackgrounds.iteritems(): rend = renders.get(i) if rend: rend.SetBackground(colors)
class SequenceEditionSpace(Workspace): """Workspace dedicated to the edition of pulse sequences. """ # --- Public API ---------------------------------------------------------- #: Refrence to the plugin. plugin = Value() #: Reference to the workspace state store in the plugin. state = Typed(SequenceEditionSpaceState) #: Reference to the log panel model received from the log plugin. log_model = Value() #: Getter for the dock area linked to the workspace. dock_area = Property() window_title = set_default('Pulses') def start(self): """Add the workspace sepcific menu and create the content. """ plugin = self.workbench.get_plugin(u'ecpy.pulses') plugin.workspace = self self.plugin = plugin if plugin.workspace_state: self.state = plugin.workspace_state else: state = SequenceEditionSpaceState() self.state = state plugin.workspace_state = state # Add handler to the root logger to display messages in panel. core = self.workbench.get_plugin(u'enaml.workbench.core') cmd = u'ecpy.app.logging.add_handler' self.log_model = core.invoke_command(cmd, { 'id': LOG_ID, 'mode': 'ui' }, self)[0] # Create content. self.content = SequenceSpaceContent(workspace=self) # Contribute menus. self.workbench.register(SequenceSpaceMenu()) def stop(self): """Remove the menus and log handler. """ # Remove handler from the root logger. core = self.workbench.get_plugin(u'enaml.workbench.core') cmd = u'ecpy.app.logging.remove_handler' core.invoke_command(cmd, {'id': LOG_ID}, self) self.workbench.unregister(u'ecpy.pulses.workspace.menus') plugin = self.workbench.get_plugin(u'ecpy.pulses') plugin.workspace = None def new_sequence(self): """ Create a brand new empty sequence. """ message = cleandoc("""Make sure you saved your modification to the sequence you are editing before creating a new one. Press Yes to confirm, or No to go back to editing and get a chance to save it.""") result = question( self.content, 'Currently edited sequence replacement', fill(message.replace('\n', ' '), 79), ) if result is not None and result.action == 'accept': self.state.sequence = RootSequence() logger = logging.getLogger(__name__) logger.info('New sequence created') def save_sequence(self, mode='default'): """ Save the currently edited sequence. Parameters ---------- mode : {'default', 'file', 'template'} - default : save the sequence by using the state to determine the procedure to use. - file : save the sequence as a standard sequence and prompt the user to select a file. - template : save the sequence as a template sequence, prompt the user to choose a template name and give a documentation. """ if mode == 'default': state = self.state if state.sequence_type == 'Unknown': self.save_sequence('file') # TODO reactivate when templates are back # # Here ask question and call save_sequence with right kind. # dial = TypeSelectionDialog(self.content) # dial.exec_() # if dial.result: # self.save_sequence(dial.type) elif state.sequence_type == 'Standard': self._save_sequence_to_file(state.sequence_path) # Use else here as sequence_type is an enum. else: raise NotImplementedError() # Here stuff is a bit more complex as compilation checks need # to be performed. # Could implement TemplateSaveDialog as a wizard using a stack # widget here I would bypass the first item. This would avoid # code duplication and allow the user to change the compilation # vars (useful for loaded seq that will be identified but will # lack vars, might later implement a cache for this using # pickle) # The compilation part would win at being implemented with a # separate model and view, to be used in # time_sequence_compilation. # dial = TemplateSaveDialog(self.content, workspace=self, # step=1) # dial.exec_() # if dial.result: # s_ = self.state # self._save_sequence_to_template(s_.sequence_path, # s_.sequence_doc) elif mode == 'file': factory = FileDialogEx.get_save_file_name path = '' if self.state.sequence_path: path = os.path.dirname(self.state.sequence_path) save_path = factory(self.content, current_path=path, name_filters=['*.pulse.ini']) if save_path: self._save_sequence_to_file(save_path) self.state.sequence_type = 'Standard' self.state.sequence_path = save_path logger = logging.getLogger(__name__) logger.info('Correctly saved sequence in file.') # elif mode == 'template': # # Here must check context is TemplateContext and compilation is ok # # (as template cannot be re-edited if not merged). Variable used # # for compilation are cached. # dial = TemplateSaveDialog(self.content, workspace=self) # dial.exec_() # if dial.result: # self._save_sequence_to_template(dial.path, dial.doc) # self.state.sequence_type = 'Template' # self.state.sequence_path = dial.path # self.state.sequence_doc = dial.doc # logger = logging.getLogger(__name__) # logger.info('Correctly saved sequence as template.') else: mess = cleandoc('''Invalid mode for save sequence : {}. Admissible values are 'default', 'file' and 'template'.''') raise ValueError(mess.format(mode)) def load_sequence(self, mode='file'): """ Load an existing sequence to edit it. Parameters ---------- mode : {'file', 'template'} - file : load a sequence from a file chosen by the user. - template : lod a sequence from a template chosen by the user. """ if mode == 'file': factory = FileDialogEx.get_open_file_name path = '' if self.state.sequence_path: path = os.path.dirname(self.state.sequence_path) load_path = factory(self.content, current_path=path, name_filters=['*.pulse.ini']) if load_path: try: seq = self._load_sequence_from_file(load_path) except Exception: core = self.workbench.get_plugin('enaml.workbench.core') cmd = 'ecpy.app.errors.signal' msg = 'Failed to rebuild sequence {} :\n\n{}' core.invoke_command( cmd, dict(kind='error', message=msg.format(load_path, format_exc()))) else: self.state.sequence = seq self.state.sequence_type = 'Standard' self.state.sequence_path = load_path logger = logging.getLogger(__name__) logger.debug('Sequence correctly loaded from %s.' % load_path) # elif mode == 'template': # dial = TemplateLoadDialog(self.content, manager=self.plugin) # dial.exec_() # if dial.result: # seq = self._load_sequence_from_template(dial.prefs) # self.state.sequence = seq # self.state.sequence_type = 'Template' # self.state.sequence_path = dial.t_infos.metadata['path'] # self.state.sequence_doc = dial.t_infos.metadata['doc'] # logger = logging.getLogger(__name__) # logger.info('Sequence correctly loaded from template.') else: mess = cleandoc('''Invalid mode for load sequence : {}. Admissible values are 'file' and 'template'.''') raise ValueError(mess.format(mode)) # --- Private API --------------------------------------------------------- def _get_dock_area(self): if self.content and self.content.children: return self.content.children[0] def _save_sequence_to_file(self, path): if not path.endswith('.pulse.ini'): path += '.pulse.ini' seq = self.state.sequence prefs = seq.preferences_from_members() empty_vars = OrderedDict.fromkeys(seq.external_vars, '') prefs['external_vars'] = repr(list(empty_vars.items())) save_sequence_prefs(path, prefs) # def _save_sequence_to_template(self, path, doc): # seq = self.state.sequence # prefs = seq.preferences_from_members() # prefs['external_vars'] = repr(dict.fromkeys(seq.external_vars.keys(), # '')) # prefs['template_vars'] = prefs.pop('external_vars') # del prefs['item_id'] # del prefs['time_constrained'] # save_sequence_prefs(path, prefs, doc) def _load_sequence_from_file(self, path): core = self.workbench.get_plugin('enaml.workbench.core') cmd = 'ecpy.pulses.build_sequence' return core.invoke_command(cmd, {'path': path})
class SaveFileTask(SimpleTask): """ Save the specified entries in a CSV file. Wait for any parallel operation before execution. Notes ----- Currently only support saving floats and arrays of floats (record arrays or simple arrays). """ #: Folder in which to save the data. folder = Unicode('{default_path}').tag(pref=True) #: Name of the file in which to write the data. filename = Unicode().tag(pref=True) #: Currently opened file object. (File mode) file_object = Value() #: Header to write at the top of the file. header = Str().tag(pref=True) #: List of values to be saved store as (label, value). saved_values = ContainerList(Tuple()).tag(pref=True) #: Flag indicating whether or not initialisation has been performed. initialized = Bool(False) #: Column indices identified as arrays. array_values = Value() task_database_entries = set_default({'file': None}) wait = set_default({'activated': True}) # Wait on all pools by default. def perform(self): """ Collect all data and write them to file. """ # Initialisation. if not self.initialized: full_folder_path = self.format_string(self.folder) filename = self.format_string(self.filename) full_path = os.path.join(full_folder_path, filename) try: self.file_object = open(full_path, 'wb') except IOError as e: log = logging.getLogger() mes = cleandoc('''In {}, failed to open the specified file {}'''.format(self.task_name, e)) log.error(mes) self.root_task.should_stop.set() self.root_task.files[full_path] = self.file_object if self.header: for line in self.header.split('\n'): self.file_object.write('# ' + line + '\n') labels = [] self.array_values = set() for i, s in enumerate(self.saved_values): value = self.format_and_eval_string(s[1]) if isinstance(value, numpy.ndarray): names = value.dtype.names self.array_values.add(i) if names: labels.extend([s[0] + '_' + m for m in names]) else: labels.append(s[0]) else: labels.append(s[0]) self.file_object.write('\t'.join(labels) + '\n') self.file_object.flush() self.initialized = True lengths = set() values = [] for i, s in enumerate(self.saved_values): value = self.format_and_eval_string(s[1]) values.append(value) if i in self.array_values: lengths.add(value.shape[0]) if lengths: if len(lengths) > 1: log = logging.getLogger() mes = cleandoc('''In {}, impossible to save simultaneously arrays of different sizes '''.format(self.task_name)) log.error(mes) self.root_task.should_stop.set() else: length = lengths.pop() if not self.array_values: self.file_object.write('\t'.join([str(val) for val in values]) + '\n') self.file_object.flush() else: columns = [] for i, val in enumerate(values): if i in self.array_values: if val.dtype.names: columns.extend([val[m] for m in val.dtype.names]) else: columns.append(val) else: columns.append(numpy.ones(length) * val) array_to_save = numpy.rec.fromarrays(columns) numpy.savetxt(self.file_object, array_to_save, delimiter='\t') self.file_object.flush() def check(self, *args, **kwargs): """ """ err_path = self.task_path + '/' + self.task_name traceback = {} try: full_folder_path = self.format_string(self.folder) except Exception as e: mess = 'Failed to format the folder path: {}' traceback[err_path] = mess.format(e) return False, traceback try: filename = self.format_string(self.filename) except Exception as e: mess = 'Failed to format the filename: {}' traceback[err_path] = mess.format(e) return False, traceback full_path = os.path.join(full_folder_path, filename) overwrite = False if os.path.isfile(full_path): overwrite = True traceback[err_path + '-file'] = \ cleandoc('''File already exists, running the measure will override it.''') try: f = open(full_path, 'ab') f.close() if not overwrite: os.remove(full_path) except Exception as e: mess = 'Failed to open the specified file : {}'.format(e) traceback[err_path] = mess.format(e) return False, traceback test = True for i, s in enumerate(self.saved_values): try: self.format_and_eval_string(s[1]) except Exception as e: traceback[err_path + '-entry' + str(i)] = \ 'Failed to evaluate entry {}: {}'.format(s[0], e) test = False return test, traceback
class CloneTest(Atom): v = Value().tag(test=True) @observe('v') def react(self, change): pass
class MeasureProcessor(Atom): """Object reponsible for a measure execution. """ #: Boolean indicating whether or not the processor is working. active = Bool() #: Reference to the measure plugin. plugin = ForwardTyped(plugin) #: Currently run measure or last measure run. running_measure = Typed(Measure) #: Instance of the currently used engine. engine = Typed(BaseEngine) #: Boolean indicating whether or not process all enqueued measures. continuous_processing = Bool(True) #: Monitors window monitors_window = Typed(Window) def start_measure(self, measure): """Start a new measure. """ if self._thread and self._thread.is_alive(): self._state.set('stop_processing') self._thread.join(5) if self._thread.is_alive(): core = self.plugin.workbench.get_plugin('enaml.workbench.core') cmd = 'ecpy.app.errors.signal' msg = ("Can't stop the running execution thread. Please " "restart the application and consider reporting this " "as a bug.") core.invoke_command(cmd, dict(kind='error', message=msg)) return if self.continuous_processing: self._state.set('continuous_processing') else: self._state.clear('continuous_processing') deferred_call(setattr, self, 'active', True) self._thread = Thread(target=self._run_measures, args=(measure, )) self._thread.daemon = True self._thread.start() def pause_measure(self): """Pause the currently active measure. """ logger.info('Pausing measure {}.'.format(self.running_measure.name)) self.running_measure.status = 'PAUSING' self._state.set('pause_attempt') if self._state.test('running_main'): self.engine.pause() self.engine.observe('status', self._watch_engine_state) else: if self._active_hook: self._active_hook.pause() self._active_hook.observe('paused', self._watch_hook_state) def resume_measure(self): """Resume the currently paused measure. """ logger.info('Resuming measure {}.'.format(self.running_measure.name)) self.running_measure.status = 'RESUMING' self._state.clear('paused') self._state.set('resuming') if self._state.test('running_main'): self.engine.resume() self.engine.observe('status', self._watch_engine_state) else: if self._active_hook: self._active_hook.resume() self._active_hook.observe('resumed', self._watch_hook_state) def stop_measure(self, no_post_exec=False, force=False): """Stop the currently active measure. """ if no_post_exec or force: self._state.set('no_post_exec') self._state.set('stop_attempt') if self.running_measure: logger.info('Stopping measure %s.' % self.running_measure.name) self.running_measure.status = 'STOPPING' if self._state.test('running_main'): self.engine.stop(force) else: if self._active_hook: self._active_hook.stop(force) def stop_processing(self, no_post_exec=False, force=False): """Stop processing the enqueued measures. """ if self.running_measure: logger.info('Stopping measure %s.' % self.running_measure.name) if no_post_exec or force: self._state.set('no_post_exec') self._state.set('stop_attempt', 'stop_processing') self._state.clear('processing') if self._state.test('running_main'): self.engine.stop(force) else: if self._active_hook: self._active_hook.stop(force) # ========================================================================= # --- Private API --------------------------------------------------------- # ========================================================================= #: Background thread handling the measure execution _thread = Value() #: Internal flags used to keep track of the execution state. _state = Typed(BitFlag, (('processing', 'running_pre_hooks', 'running_main', 'running_post_hooks', 'pause_attempt', 'paused', 'resuming', 'stop_attempt', 'stop_processing', 'no_post_exec', 'continuous_processing'), )) #: Hook currently executed. The value is meaningful only when #: 'running_pre_hooks' or 'running_post_hooks' is set. _active_hook = Value() #: Lock to avoid race condition when pausing. _lock = Value(factory=RLock) def _run_measures(self, measure): """Run measures (either all enqueued or only one) This code is executed by a thread (stored in _thread) Parameters ---------- measure : Measure First measure to run. Other measures will be run in their order of appearance in the queue if the user enable continuous processing. """ # If the engine does not exist, create one. plugin = self.plugin if not self.engine: engine = plugin.create('engine', plugin.selected_engine) schedule_and_block(setattr, (self, 'engine', engine)) # Mark that we started processing measures. self._state.set('processing') # Process enqueued measure as long as we are supposed to. while not self._state.test('stop_processing'): # Clear the internal state to start fresh. self._clear_state() # If we were provided with a measure use it, otherwise find the # next one. if measure: meas = measure measure = None else: meas = self.plugin.find_next_measure() # If there is a measure register it as the running one, update its # status and log its execution. if meas is not None: meas_id = meas.name + '_' + meas.id self._set_measure_state('RUNNING', 'The measure is being run.', meas) msg = 'Starting execution of measure %s' logger.info(msg % meas.name + meas.id) status, infos = self._run_measure(meas) # Release runtime dependencies. meas.dependencies.release_runtimes() # If no measure remains stop. else: break # Log the result. mess = 'Measure %s processed, status : %s' % (meas_id, status) if infos: mess += '\n' + infos logger.info(mess) # Update the status and infos. self._set_measure_state(status, infos, clear=True) # If we are supposed to stop, stop. if (not self._state.test('continuous_processing') or self._state.test('stop_processing')): break if self.engine and self.plugin.engine_policy == 'stop': self._stop_engine() self._state.clear('processing') deferred_call(setattr, self, 'active', False) def _run_measure(self, measure): """Run a single measure. """ # Switch to running state. measure.enter_running_state() meas_id = measure.name + '_' + measure.id # Collect runtime dependencies res, msg, errors = measure.dependencies.collect_runtimes() if not res: status = 'SKIPPED' if 'unavailable' in msg else 'FAILED' return status, msg + '\n' + errors_to_msg(errors) # Records that we got access to all the runtimes. mess = ('The use of all runtime resources have been granted to the ' 'measure %s' % meas_id) logger.info(mess.replace('\n', ' ')) # Run checks now that we have all the runtimes. if not measure.forced_enqueued: res, errors = measure.run_checks() if not res: msg = 'Measure %s failed to pass the checks :\n' % meas_id return 'FAILED', msg + errors_to_msg(errors) # Now that we know the measure is going to run save it. default_filename = meas_id + '.meas.ini' path = os.path.join(measure.root_task.default_path, default_filename) measure.save(path) logger.info('Starting measure {}.'.format(meas_id)) # Execute all pre-execution hooks. result, errors = self._run_pre_execution(measure) if not result: msg = 'Measure %s failed to run pre-execution hooks :\n' % meas_id return 'FAILED', msg + errors_to_msg(errors) result = True errors = {} if self._check_for_pause_or_stop(): # Connect new monitors, and start them. logger.debug('Connecting monitors for measure %s', meas_id) self._start_monitors(measure) # Assemble the task infos for the engine to run the main task. deps = measure.dependencies infos = ExecutionInfos( id=meas_id + '-main', task=measure.root_task, build_deps=deps.get_build_dependencies().dependencies, runtime_deps=deps.get_runtime_dependencies('main'), observed_entries=measure.collect_monitored_entries(), checks=not measure.forced_enqueued, ) # Ask the engine to perform the main task. logger.debug('Passing measure %s to the engine.', meas_id) self._state.set('running_main') execution_result = self.engine.perform(infos) self._state.clear('running_main') # Record the result and store engine return value in the measure # for the post execution hooks. result &= execution_result.success errors.update(execution_result.errors) measure.task_execution_result = execution_result # Disconnect monitors. logger.debug('Disonnecting monitors for measure %s', meas_id) self._stop_monitors(measure) # Save the stop_attempt state to allow to run post execution if we # are supposed to do so. state = self._state.test('stop_attempt') self._state.clear('stop_attempt') # Execute all post-execution hooks if pertinent. if not self._state.test('no_post_exec'): res, errors = self._run_post_execution(measure) result &= res if state: self._state.set('stop_attempt') if self._state.test('stop_attempt'): return ('INTERRUPTED', 'The measure has been interrupted by the user.') if not result: if not execution_result.success: msg = 'Execution of the main task failed :\n' else: msg = 'Some post-execution hook failed to run :\n' return 'FAILED', msg + errors_to_msg(errors) return 'COMPLETED', 'The measure successfully completed.' def _run_pre_execution(self, measure): """Run pre measure execution operations. Returns ------- result : bool Boolean indicating whether or not the operations succeeded. report : dict Dict storing the errors (as dict) by id of the operation in which they occured. """ result = True full_report = {} self._state.set('running_pre_hooks') meas_id = measure.name + '_' + measure.id for id, hook in measure.pre_hooks.items(): if not self._check_for_pause_or_stop(): break logger.debug('Calling pre-measure hook %s for measure %s', id, meas_id) with self._lock: self._active_hook = hook try: hook.run(self.plugin.workbench, self.engine) except Exception: result = False full_report[id] = format_exc() # Prevent issues with pausing/resuming with self._lock: self._active_hook.unobserve('paused', self._watch_hook_state) self._active_hook = None self._state.clear('running_pre_hooks') return result, full_report def _run_post_execution(self, measure): """Run post measure operations. Parameters ---------- measure : Measure Returns ------- result : bool Boolean indicating whether or not the operations succeeded. report : dict Dict storing the errors (as dict) by id of the operation in which they occured. """ result = True full_report = {} self._state.set('running_post_hooks') meas_id = measure.name + '_' + measure.id for id, hook in measure.post_hooks.items(): if not self._check_for_pause_or_stop(): break logger.debug('Calling post-measure hook %s for measure %s', id, meas_id) with self._lock: self._active_hook = hook try: hook.run(self.plugin.workbench, self.engine) except Exception: result = False full_report[id] = format_exc() # Prevent issues with pausing/resuming with self._lock: self._active_hook.unobserve('paused', self._watch_hook_state) self._active_hook = None self._state.clear('running_post_hooks') return result, full_report def _start_monitors(self, measure): """Start the monitors attached to a measure and display them. If no dedicated window exists one will be created. For monitors for which a dockitem already exists it is re-used. """ def start_monitors(self, measure): """Start the monitors attached to a measure. Called in the main thread. """ workbench = self.plugin.workbench if not self.monitors_window: with enaml.imports(): from .workspace.monitors_window import MonitorsWindow self.monitors_window = MonitorsWindow() else: self.monitors_window.send_to_front() self.monitors_window.measure = measure dock_area = self.monitors_window.dock_area anchor = '' for dock_item in dock_area.dock_items(): if dock_item.name not in measure.monitors: dock_item.destroy() elif not anchor: anchor = dock_item.name # We show the window now because otherwise the layout ops are not # properly executed. if self.plugin.auto_show_monitors: self.monitors_window.show() ops = [] for monitor in measure.monitors.values(): decl = monitor.declaration dock_item = dock_area.find(decl.id) if dock_item is None: try: dock_item = decl.create_item(workbench, dock_area) except Exception: msg = 'Failed to create widget for monitor %s :\n %s' logger.error(msg, decl.id, format_exc()) continue if dock_item is not None: if dock_item.float_default: ops.append(FloatItem(item=decl.id)) else: ops.append(InsertTab(item=decl.id, target=anchor)) self.engine.observe('progress', monitor.process_news) if dock_item: dock_item.monitor = monitor monitor.start() if ops: dock_area.update_layout(ops) # Executed in the main thread to avoid GUI update issues. schedule_and_block(start_monitors, (self, measure), priority=100) def _stop_monitors(self, measure): """Disconnect the monitors from the engine and stop them. The monitors windows is not hidden as the user may want to check it later. """ def stop_monitors(engine, measure): """Stop the monitors. Executed on the main thread. """ if engine: engine.unobserve('news') for monitor in measure.monitors.values(): monitor.stop() # Executed in the main thread to avoid GUI update issues. schedule_and_block(stop_monitors, (self.engine, measure), priority=100) def _check_for_pause_or_stop(self): """Check if a pause or stop request is pending and process it. Returns ------- should_stop : bool Booelan indicating whether or not the execution of the measure should stop. """ flag = self._state if flag.test('stop_attempt'): return False if flag.test('pause_attempt'): flag.clear('pause_attempt') self._set_measure_state('PAUSED', 'The measure is paused.') flag.set('paused') while True: if flag.wait(0.1, 'resuming'): flag.clear('resuming') self._set_measure_state('RUNNING', 'The measure has resumed.') return True if flag.test('stop_attempt'): return False return True # Those must post update of measure.status and remove observers def _watch_engine_state(self, change): """Observe engine state to notify that the engine paused or resumed. """ if change['value'] == 'Paused': self._state.clear('pause_attempt') self.engine.unobserve('status', self._watch_engine_state) self._set_measure_state('PAUSED', 'The measure is paused.') self._state.set('paused') elif change['value'] == 'Running': self._state.clear('resuming') self.engine.unobserve('status', self._watch_engine_state) self._set_measure_state('RUNNING', 'The measure has resumed.') def _watch_hook_state(self, change): """Observe hook paused/resumed events to validate pausing/resuming. """ if change['name'] == 'paused': self._active_hook.unobserve('status', self._watch_hook_state) self._set_measure_state('PAUSED', 'The measure is paused.') self._state.clear('pause_attempt') self._state.set('paused') elif change['name'] == 'resumed': self._state.clear('resuming') self._active_hook.unobserve('status', self._watch_hook_state) self._set_measure_state('RUNNING', 'The measure has resumed.') def _set_measure_state(self, status, infos, measure=None, clear=False): """Set the measure status and infos in the main thread. """ def set_state(processor, status, infos, meas, clear): if meas: processor.running_measure = meas measure = processor.running_measure measure.status = status measure.infos = infos if clear: processor.running_measure = None # Executed in the main thread to avoid GUI update issues. schedule_and_block(set_state, (self, status, infos, measure, clear), priority=100) def _stop_engine(self): """Stop the engine. """ logger.debug('Stopping engine') engine = self.engine engine.shutdown() i = 0 while engine and engine.status != 'Stopped': sleep(0.5) i += 1 if i > 10: engine.shutdown(force=True) def _clear_state(self): """Clear the state when starting while preserving persistent settings. """ flags = list(self._state.flags) flags.remove('processing') flags.remove('continuous_processing') self._state.clear(*flags) def _post_setattr_continuous_processing(self, old, new): """Make sure the internal bit flag does reflect the real setting. """ if new: self._state.set('continuous_processing') else: self._state.clear('continuous_processing')
class ContainerChange(Atom): """Payload to use when notifying the system about a container change. """ #: Reference to object from which this event originate. obj = Value() #: Name of the modified container. name = Unicode() #: List of added entries. Should not be manipulated directly by user code. #: Use the add_operation method to add operations. added = List() #: List of moved entries with their old and new positions. Should not be #: manipulated directly by user code. Use the add_operation method to add #: operations. moved = List() #: List of removed entries. Should not be manipulated directly by user #: code. Use the add_operation method to add operations. removed = List() #: List of ContainerChange representing an ordered sequence of change. collapsed = List() #: Private member used to store the last kind of added operation. _last_added = Value() def add_operation(self, typ, op_desc): """Add an operation. If two operations of different types they are represented by two ContainerChange added in the collapsed list. Using this method ensure that only one list is non empty. Consumer should always check the collapsed list first. Parameters ---------- typ : {'added', 'moved', removed'} The type of operation to add to the change set. op_desc : tuple Tuple describing the operation it should be of the form: - 'added' : (index, obj) - 'moved' : (old_index, new_index, obj) - 'removed' : (index, obj) """ # If we are already working with a collapsed change simply check the # last one to see if we can append to its changes or create a new # entry. if self.collapsed: if typ != self.collapsed[-1]._last_added: self.collapsed.append( ContainerChange(obj=self.obj, name=self.name)) self.collapsed[-1].add_operation(typ, op_desc) return if self._last_added and typ != self._last_added: # Clone ourself and clean all lists clone = ContainerChange(obj=self.obj, name=self.name, added=self.added, moved=self.moved, removed=self.removed, _last_added=self._last_added) del self.added, self.moved, self.removed self.collapsed.append(clone) # We are now in a collapsed state so add_operation will do its # job self.add_operation(typ, op_desc) return if typ not in ('moved', 'added', 'removed'): msg = "typ argument must be in 'moved', 'added', 'removed' not {}" raise ValueError(msg.format(typ)) if typ == 'moved': if not len(op_desc) == 3: raise ValueError('Moved operation should be described by :' '(old, new, obj) not {}'.format(op_desc)) elif typ in ('added', 'removed'): if not len(op_desc) == 2: t = typ.capitalize() raise ValueError(t + ' operation should be described by :' '(index, obj) not {}'.format(op_desc)) # Otherwise simply append the operation. getattr(self, typ).append(op_desc) self._last_added = typ
class BaseInstrumentView(GroupBox): """ Base class for instrument task views. This class handles internally the access to the profiles. """ #: Reference to the task being edited by this view. task = d_(Instance(InstrumentTask)) #: List of drivers which can be used with that task. drivers = d_(List(Str())) #: List of profiles matching the currently selected one. profiles = d_(List(Str())) #: Reference to the core plugin of the application. core = d_(Typed(CorePlugin)) #: Reference to the InstrManager State. instr_man_state = Value() #: References to the currently instantiated interface views. i_views = Tuple(default=()) padding = set_default((0, 2, 2, 2)) def initialize(self): """ Overrridden initializer to get a ref to the instr manager state on start up. """ super(BaseInstrumentView, self).initialize() cmd = 'hqc_meas.state.get' state = self.core.invoke_command( cmd, {'state_id': 'hqc_meas.states.instr_manager'}) self.instr_man_state = state if isinstance(self.task, InterfaceableTaskMixin): cmd = 'hqc_meas.task_manager.interfaces_request' inter, _ = self.core.invoke_command( cmd, {'tasks': [self.task.task_class]}) # Get the drivers defined on the tasks ie using the default # interface implemented through i_perform drivers = self.task.driver_list[:] interfaces = {driver: type(None) for driver in drivers} # Map driver to their interface. for i in inter.get(self.task.task_class, []): drivers.extend(i.driver_list) interfaces.update({d: i for d in i.driver_list}) self.drivers = drivers self._interfaces = interfaces else: self.drivers = self.task.driver_list if getattr(self.task, 'interface', None): self._insert_interface_views(self.task.interface) self._update_profiles({}) self._bind_observers() def destroy(self): """ Overriden destroyer to remove observers from instr manager state. """ self._unbind_observers() super(BaseInstrumentView, self).destroy() # --- Private API --------------------------------------------------------- #: Map between driver and interface. _interfaces = Dict(Str()) def _update_interface(self, change): """ Update the interface when the selected driver change. """ driver = self.task.selected_driver interface = self._interfaces[driver] # The or clause handle the absence of an interface (ie None for both # interface and task.interface). if type(self.task.interface) != interface: # Destroy the views associated with the ancient interface. for i_v in self.i_views: i_v.destroy() # If no interface is used simply assign None if type(None) == interface: self.task.interface = None return # Otherwise create interface and insert its views. self.task.interface = interface() self._insert_interface_views(self.task.interface) def _insert_interface_views(self, interface): """ Insert trhe view associated with an interface instance. """ cmd = 'hqc_meas.task_manager.interface_views_request' i_c_name = type(interface).__name__ views, _ = self.core.invoke_command(cmd, {'interface_classes': [i_c_name]}) if interface.has_view: i_views = [ v(self, interface=self.task.interface) for v in views[i_c_name] ] # TODO handle more complex insertions. if hasattr(i_views[0], 'index'): self.insert_children(i_views[0].index, i_views) else: self.insert_children(None, i_views) self.i_views = tuple(i_views) else: self.i_views = () def _update_profiles(self, change): """ Update the list of matching profiles for the selected driver. """ driver = self.task.selected_driver if driver: cmd = 'hqc_meas.instr_manager.matching_profiles' self.profiles = self.core.invoke_command(cmd, {'drivers': [driver]}) def _bind_observers(self): """ Bind the observers at widget initialisation. """ self.instr_man_state.observe('all_profiles', self._update_profiles) self.task.observe('selected_driver', self._update_profiles) if isinstance(self.task, InterfaceableTaskMixin): self.task.observe('selected_driver', self._update_interface) def _unbind_observers(self): """ Unbind the observers at widget destruction. """ self.instr_man_state.unobserve('all_profiles', self._update_profiles) self.task.unobserve('selected_driver', self._update_profiles) if isinstance(self.task, InterfaceableTaskMixin): self.task.unobserve('selected_driver', self._update_interface)
class Matplotlib2DRectangularMeshProxy(Plot2DRectangularMeshProxy): """Matplotlib proxy for a mesh plot. If the grid can be identified as regular we use imshow, otherwise we use pcolormesh. """ def activate(self): super().activate() axes_mapping = self.element.axes_mapping axes = (axes_mapping["x"], axes_mapping["y"]) if axes_mapping["x"] in ("left", "right"): self._invert = True axes = axes[::-1] self._mpl_axes = self.element.axes.proxy._axes[axes] self._display_data() def finalize(self): self._line.remove() super().deactivate() def set_data(self, data): if self._mesh: self._mesh.remove() self._display_data() # --- Private API #: Matplotlib axes in which to draw _mpl_axes = Typed(Axes) #: Do we need to invert and y due to the axes mapping _invert = Bool() #: Reference to the currently displayed mesh _mesh = Value() def _display_data(self): data = self.element.data use_imshow = False x, y, c = data.x, data.y, data.c if self._invert: x, y = y, x if len(c.shape) == 2: pass # No reshaping needed elif len(x.shape) == 1 and len( y.shape) == 1 and len(x) * len(y) == len(c): c = np.reshape(c, (len(x), len(y))) elif len(x) == len(c) and len(y) == len(c): # Ravel to avoid weird issue with N-D array x, y, c = np.ravel(x), np.ravel(y), np.ravel(c) shape = (len(np.unique(x)), len(np.unique(y))) index = np.lexsort((y, x)) x, y, c = x[index], y[index], c[index] if len(c) < np.product(shape): to_add = np.ones(np.product(shape) - len(c)) x = np.append(x, x[-1] * to_add) y = np.append(y, y[-1] * to_add) c = np.append(c, c[-1] * to_add) elif len(c) > np.product(shape): to_add = np.ones(shape[0] - (len(c) % shape[0])) x = np.append(x, x[-1] * to_add) y = np.append(y, y[-1] * to_add) c = np.append(c, c[-1] * to_add) shape = (shape[0], -1) x, y, c = (np.reshape(x, shape), np.reshape(y, shape), np.reshape(c, shape)) else: raise RuntimeError( f"Cannot reshape c {c.shape} to plot it (x: {x.shape}, y: {y.shape}" ) if use_imshow: self._mesh = self._mpl_axes.imshow( c, origin="lower", aspect="auto", ) else: # Need matplotlib > 3.3 self._mesh = self._mpl_axes.pcolormesh( x, y, c, shading="nearest", cmap=self.element.colormap, zorder=self.element.zorder, ) if self.element.axes.colorbar: self.element.axes.colorbar.proxy.connect_mappable(self._mesh) # FIXME ugly but the automatic manner does not work. self._mpl_axes.set_xlim((x.min(), x.max())) self._mpl_axes.set_ylim((y.min(), y.max())) self.element.axes.figure.proxy.request_redraw()
class FalseAtom(object): v = Value()
class InstrumentTask(SimpleTask): """Base class for all tasks calling instruments. """ #: Selected instrument as (profile, driver, collection, settings) tuple selected_instrument = Tuple(default=('', '', '', '')).tag(pref=True) #: Instance of instrument driver. driver = Value() # HINT done this way so that classes overriding this one does not # forget to preserve it. def __init__(self, **kwargs): super(InstrumentTask, self).__init__(**kwargs) de = self.database_entries.copy() de['instrument'] = '' self.database_entries = de def check(self, *args, **kwargs): """Chech that the provided informations allows to establish the connection to the instrument. """ # TODO add a check that the same profile is not used by different tasks # with different infos (need a way to share states, could use the # errors member of the root or similar, to avoid modifying the way # this method is called. test, traceback = super(InstrumentTask, self).check(*args, **kwargs) err_path = self.get_error_path() + '-instrument' run_time = self.root.run_time profile = None if self.selected_instrument and len(self.selected_instrument) == 4: p_id, d_id, c_id, s_id = self.selected_instrument self.write_in_database('instrument', p_id) if PROFILE_DEPENDENCY_ID in run_time: # Here use .get() to avoid errors if we were not granted the # use of the profile. In that case config won't be used. profile = run_time[PROFILE_DEPENDENCY_ID].get(p_id) else: msg = ('No instrument was selected or not all informations were ' 'provided. The instrument selected should be specified as ' '(profile_id, driver_id, connection_id, settings_id). ' 'settings_id can be None') traceback[err_path] = msg return False, traceback if run_time and d_id in run_time[DRIVER_DEPENDENCY_ID]: d_cls, starter = run_time[DRIVER_DEPENDENCY_ID][d_id] else: msg = ('Failed to get the specified driver : %s. Collected drivers' ' are %s.') traceback[err_path] = msg % (d_id, run_time[DRIVER_DEPENDENCY_ID]) return False, traceback if profile: if c_id not in profile['connections']: traceback[err_path] = ('The selected profile does not contain ' 'the %s connection') % c_id return False, traceback elif s_id is not None and s_id not in profile['settings']: traceback[err_path] = ('The selected profile does not contain ' 'the %s settings') % s_id return False, traceback if kwargs.get('test_instr', True): s = profile['settings'].get(s_id, {}) res, msg = starter.check_infos(d_cls, profile['connections'][c_id], s) if not res: traceback[err_path] = msg return False, traceback return test, traceback def prepare(self): """Always start the driver. """ super(InstrumentTask, self).prepare() self.write_in_database('instrument', self.selected_instrument[0]) self.start_driver() def start_driver(self): """Create an instance of the instrument driver and connect it. """ run_time = self.root.run_time instrs = self.root.resources['instrs'] p_id, d_id, c_id, s_id = self.selected_instrument if self.selected_instrument in instrs: self.driver = instrs[self.selected_instrument][0] else: profile = run_time[PROFILE_DEPENDENCY_ID][p_id] d_cls, starter = run_time[DRIVER_DEPENDENCY_ID][d_id] # Profile do not always contain a settings. self.driver = starter.start(d_cls, profile['connections'][c_id], profile['settings'].get(s_id, {})) # HINT allow something dangerous as the same instrument can be # accessed using multiple settings. # User should be careful about this (and should be warned) instrs[self.selected_instrument] = (self.driver, starter) @contextmanager def test_driver(self): """Safe temporary access to the driver to run some checks. Yield either a fully initialized driver or None. """ try: run_time = self.root.run_time p_id, d_id, c_id, s_id = self.selected_instrument profile = run_time[PROFILE_DEPENDENCY_ID][p_id] d_cls, starter = run_time[DRIVER_DEPENDENCY_ID][d_id] driver = starter.start(d_cls, profile['connections'][c_id], profile['settings'][s_id]) except Exception: driver = None yield driver if driver: starter.stop(driver)
class SaveTask(SimpleTask): """ Save the specified entries either in a CSV file or an array. The file is closed when the line number is reached. Wait for any parallel operation before execution. Notes ----- Currently only support saving floats. """ #: Kind of object in which to save the data. saving_target = Enum('File', 'Array', 'File and array').tag(pref=True) #: Folder in which to save the data. folder = Unicode().tag(pref=True) #: Name of the file in which to write the data. filename = Unicode().tag(pref=True) #: Currently opened file object. (File mode) file_object = Value() #: Opening mode to use when saving to a file. file_mode = Enum('New', 'Add') #: Header to write at the top of the file. header = Str().tag(pref=True) #: Numpy array in which data are stored (Array mode) array = Value() # Array #: Size of the data to be saved. (Evaluated at runtime) array_size = Str().tag(pref=True) #: Computed size of the data (post evaluation) array_length = Int() #: Index of the current line. line_index = Int(0) #: List of values to be saved store as (label, value). saved_values = ContainerList(Tuple()).tag(pref=True) #: Flag indicating whether or not initialisation has been performed. initialized = Bool(False) task_database_entries = set_default({'file': None}) wait = set_default({'activated': True}) # Wait on all pools by default. def perform(self): """ Collect all data and write them to array or file according to mode. On first call initialise the system by opening file and/or array. Close file when the expected number of lines has been written. """ # Initialisation. if not self.initialized: self.line_index = 0 size_str = self.array_size if size_str: self.array_length = self.format_and_eval_string(size_str) else: self.array_length = -1 if self.saving_target != 'Array': full_folder_path = self.format_string(self.folder) filename = self.format_string(self.filename) full_path = os.path.join(full_folder_path, filename) mode = 'wb' if self.file_mode == 'New' else 'ab' try: self.file_object = open(full_path, mode) except IOError as e: log = logging.getLogger() mes = cleandoc('''In {}, failed to open the specified file {}'''.format(self.task_name, e)) log.error(mes) self.root_task.should_stop.set() self.root_task.files[full_path] = self.file_object if self.header: for line in self.header.split('\n'): self.file_object.write('# ' + line + '\n') labels = [s[0] for s in self.saved_values] self.file_object.write('\t'.join(labels) + '\n') self.file_object.flush() if self.saving_target != 'File': # TODO add more flexibilty on the dtype (possible complex # values) array_type = numpy.dtype([(str(s[0]), 'f8') for s in self.saved_values]) self.array = numpy.empty((self.array_length), dtype=array_type) self.write_in_database('array', self.array) self.initialized = True # Writing values = [self.format_and_eval_string(s[1]) for s in self.saved_values] if self.saving_target != 'Array': self.file_object.write('\t'.join([str(val) for val in values]) + '\n') self.file_object.flush() if self.saving_target != 'File': self.array[self.line_index] = tuple(values) self.line_index += 1 # Closing if self.line_index == self.array_length: if self.file_object: self.file_object.close() self.initialized = False def check(self, *args, **kwargs): """ """ err_path = self.task_path + '/' + self.task_name traceback = {} if self.saving_target != 'Array': try: full_folder_path = self.format_string(self.folder) except Exception as e: mess = 'Failed to format the folder path: {}' traceback[err_path] = mess.format(e) return False, traceback try: filename = self.format_string(self.filename) except Exception as e: mess = 'Failed to format the filename: {}' traceback[err_path] = mess.format(e) return False, traceback full_path = os.path.join(full_folder_path, filename) overwrite = False if self.file_mode == 'New' and os.path.isfile(full_path): overwrite = True traceback[err_path + '-file'] = \ cleandoc('''File already exists, running the measure will override it.''') try: f = open(full_path, 'ab') f.close() if self.file_mode == 'New' and not overwrite: os.remove(full_path) except Exception as e: mess = 'Failed to open the specified file : {}'.format(e) traceback[err_path] = mess.format(e) return False, traceback if self.array_size: try: self.format_and_eval_string(self.array_size) except Exception as e: mess = 'Failed to compute the array size: {}' traceback[err_path] = mess.format(e) return False, traceback elif self.saving_target != 'File': traceback[err_path] = 'A size for the array must be provided.' return False, traceback test = True for i, s in enumerate(self.saved_values): try: self.format_and_eval_string(s[1]) except Exception as e: traceback[err_path + '-entry' + str(i)] = \ 'Failed to evaluate entry {}: {}'.format(s[0], e) test = False if self.saving_target != 'File': data = [numpy.array([0.0, 1.0]) for s in self.saved_values] names = str(','.join([s[0] for s in self.saved_values])) final_arr = numpy.rec.fromarrays(data, names=names) self.write_in_database('array', final_arr) return test, traceback @observe('saving_target') def _update_database_entries(self, change): """ """ new = change['value'] if new != 'File': self.task_database_entries = {'array': numpy.array([1.0])} else: self.task_database_entries = {}
class TreeNode(Declarative): """Represents a tree node. This declaration is used to help the system determine how to extract informations from the underlying object to populate the node. Note that a Menu can be contributed as a child and will be used when right clicking a node. It will be passed a 'context' describing the node being right-clicked. The context will be a dictionary with the following keys : - 'copyable': bool, can the node be copied - 'cutable': bool, can the node be cut - 'pasteable': bool, can node be pasted here - 'renamable': bool, can the node be renamed - 'deletable': bool, can the node be deleted - 'not_root': bool, is the node the root node of the tree - 'data': tuple, (tree, TreeNode instance, object, id of the node) """ #: List of object classes and/or interfaces that the node applies to node_for = d_(List()) #: Either the name of a member containing a label, or a constant label, if #: the string starts with '='. label = d_(Unicode()) #: Either the name of a member containing a tooltip, or constant tooltip, #: if the string starts with '='. tooltip = d_(Unicode()) #: Name of the member containing children (if '', the node is a leaf). children_member = d_(Unicode()) #: Name of the signal use to notify changes to the children. The payload of #: the signal should be a ContainerChange instance. children_changed = d_(Unicode()) #: List of object classes than can be added or copied add = d_(List()) #: List of object classes that can be moved move = d_(List()) #: Name to use for a new instance name = d_(Unicode()) #: Can the object's children be renamed? rename = d_(Bool(True)) #: Can the object be renamed? rename_me = d_(Bool(True)) #: Can the object's children be copied? copy = d_(Bool(True)) #: Can the object's children be deleted? delete = d_(Bool(True)) #: Can the object be deleted (if its parent allows it)? delete_me = d_(Bool(True)) #: Can children be inserted (vs. appended)? insert = d_(Bool(True)) #: Should tree nodes be automatically opened (expanded)? auto_open = d_(Bool(False)) #: Automatically close sibling tree nodes? auto_close = d_(Bool(False)) #: Tuple of object classes that the node applies to node_for_class = Property() #: Name of leaf item icon icon_item = d_(Unicode('<item>')) #: Name of group item icon icon_group = d_(Unicode('<group>')) #: Name of opened group item icon icon_open = d_(Unicode('<open>')) #: Resource path used to locate the node icon icon_path = d_(Unicode('Icon')) #: Selector or name for background color background = Value('white') #: Selector or name for foreground color foreground = Value('black') _py_data = Value() _menu = Value() # --- Declarative functions ----------------------------------------------- @d_func def insert_child(self, obj, index, child): """Inserts a child into the object's children. """ getattr(obj, self.children_member)[index:index] = [child] @d_func def confirm_delete(self, obj): """Checks whether a specified object can be deleted. Returns ------- - **True** if the object should be deleted with no further prompting. - **False** if the object should not be deleted. - Anything else: Caller should take its default action (which might include prompting the user to confirm deletion). """ return None @d_func def delete_child(self, obj, index): """Deletes a child at a specified index from the object's children. """ del getattr(obj, self.children_member)[index] @d_func def move_child(self, obj, old, new): """Move a child of the object's children. """ child = getattr(obj, self.children_member)[old] del getattr(obj, self.children_member)[old] getattr(obj, self.children_member)[new:new] = [child] @d_func def enter_rename(self, obj): """Start renaming an object. This method can be customized in case the renaming operation should not occur directly on the label. Parameters ---------- obj : Refrence to the object the tree node being renamed is representing. Returns ------- name : unicode String on which to perform the renaming. """ return self.get_label(obj) @d_func def exit_rename(self, obj, label): """Sets the label for a specified object after a renaming operation. """ label_name = self.label if label_name[:1] != '=': setattr(obj, label_name, label) @d_func def get_label(self, obj): """Gets the label to display for a specified object. """ label = self.label if label[:1] == '=': return label[1:] label = getattr(obj, label) return label # ========================================================================= # --- Initializes the object ---------------------------------------------- # ========================================================================= def initialize(self): """Collect the Menu provided as a child. """ for ch in self.children: if isinstance(ch, Menu): self._menu = ch break # ========================================================================= # --- Property Implementations -------------------------------------------- # ========================================================================= @node_for_class.getter def _get_node_for_class(self): return tuple([klass for klass in self.node_for]) # ========================================================================= # --- Overridable Methods: ------------------------------------------------ # ========================================================================= def allows_children(self, obj): """Returns whether this object can have children. """ return self.children_member != '' def has_children(self, obj): """Returns whether the object has children. """ return len(self.get_children(obj)) > 0 def get_children(self, obj): """Gets the object's children. """ return getattr(obj, self.children_member) def get_children_id(self, obj): """Gets the object's children identifier. """ return self.children_member def append_child(self, obj, child): """Appends a child to the object's children. """ self.insert_child(obj, len(getattr(obj, self.children_member)), child) def get_tooltip(self, obj): """Gets the tooltip to display for a specified object. """ tooltip = self.tooltip if tooltip == '': return tooltip if tooltip[:1] == '=': return tooltip[1:] tooltip = getattr(obj, tooltip) if not tooltip: tooltip = '' if self.tooltip_formatter is None: return tooltip return self.tooltip_formatter(obj, tooltip) def get_icon(self, obj, is_expanded): """Returns the icon for a specified object. """ if not self.allows_children(obj): return self.icon_item if is_expanded: return self.icon_open return self.icon_group def get_icon_path(self, obj): """Returns the path used to locate an object's icon. """ return self.icon_path def get_name(self, obj): """Returns the name to use when adding a new object instance (displayed in the "New" submenu). """ return self.name def get_menu(self, context): """Returns the right-click context menu for an object. """ if self._menu: self._menu.context = context return self._menu else: return None def get_background(self, obj): """Returns the background color for the item. """ background = self.background if isinstance(background, basestring): background = getattr(obj, background, background) return background def get_foreground(self, obj): """Returns the foreground color for the item. """ foreground = self.foreground if isinstance(foreground, basestring): foreground = getattr(obj, foreground, foreground) return foreground def can_rename(self, obj): """Returns whether the object's children can be renamed. """ return self.rename def can_rename_me(self, obj): """Returns whether the object can be renamed. """ return self.rename_me def can_copy(self, obj): """Returns whether the object's children can be copied. """ return self.copy def can_delete(self, obj): """Returns whether the object's children can be deleted. """ return self.delete def can_delete_me(self, obj): """Returns whether the object can be deleted. """ return self.delete_me def can_insert(self, obj): """Returns whether the object's children can be inserted (vs. appended). """ return self.insert def can_auto_open(self, obj): """Returns whether the object's children should be automatically opened. """ return self.auto_open def can_auto_close(self, obj): """Returns whether the object's children should be automatically closed. """ return self.auto_close def is_node_for(self, obj): """Returns whether this is the node that handles a specified object. """ return isinstance(obj, self.node_for_class) def can_add(self, obj, add_object): """Returns whether a given object is droppable on the node. """ klass = self._class_for(add_object) if self.is_addable(klass): return True for item in self.move: if type(item) in (List, Dict): item = item[0] if issubclass(klass, item): return True return False def get_add(self, obj): """Returns the list of classes that can be added to the object. """ return self.add def get_drag_object(self, obj): """Returns a draggable version of a specified object. """ return obj def drop_object(self, obj, dropped_object): """Returns a droppable version of a specified object. """ klass = self._class_for(dropped_object) if self.is_addable(klass): return dropped_object for item in self.move: if type(item) in (List, Dict): if issubclass(klass, item[0]): return item[1](obj, dropped_object) elif issubclass(klass, item): return dropped_object return dropped_object def select(self, obj): """Handles an object being selected. """ return True def is_addable(self, klass): """Returns whether a specified object class can be added to the node. """ for item in self.add: if type(item) in (List, Dict): item = item[0] if issubclass(klass, item): return True return False def when_label_changed(self, obj, listener, remove): """Sets up or removes a listener for the label being changed on a specified object. """ label = self.label if label[:1] != '=': if remove: obj.unobserve(label, listener) else: obj.observe(label, listener) def _class_for(self, obj): """Returns the class of an object. """ if isinstance(obj, type): return obj return obj.__class__
class ThreadDispatcher(Atom): """Dispatch calling a function to a thread. """ #: Flag set when the thread is ready to accept new jobs. inactive = Value(factory=Event) def __init__(self, perform, pool): self._func = smooth_crash(perform) self._pool = pool self.inactive.set() def dispatch(self, task, *args, **kwargs): """Dispatch the work to the background thread. """ if self._thread is None: pools = task.root.resources['threads'] with pools.safe_access(self._pool) as threads: threads.append(self) self._thread = Thread(group=None, target=self._background_loop) self._thread.start() # Make sure the background thread is done processing the previous work. self.inactive.wait() # Mark the thread as active. self.inactive.clear() task.root.active_threads_counter.increment() pools = task.root.resources['active_threads'] with pools.safe_access(self._pool) as threads: threads.append(self) # Pass the arguments self._args_kwargs = task, args, kwargs self._new_args.set() def stop(self): """Stop the background thread. """ if self._thread is None: return while self._new_args.is_set(): sleep(1e-3) self.inactive.wait() self._args_kwargs = (None, None, None) self._new_args.set() self._thread.join() del self._thread self.inactive.set() # --- Private API --------------------------------------------------------- #: Thread to which the work is dispatched. _thread = Value() #: Flag set when the new arguments are available.. _new_args = Value(factory=Event) #: Arguments and keywords arguments for the next dispatch. _args_kwargs = Value() #: Reference to the function to call on each dispatch. _func = Callable() #: Pool id to which this dispatcher belongs. _pool = Str() def _background_loop(self): """Background function executed by the thread. """ while True: self._new_args.wait() task, args, kwargs = self._args_kwargs if task is None: break self._func(task, *args, **kwargs) self._new_args.clear() self.inactive.set() task.root.active_threads_counter.decrement()
class BorderGuide(GuideHandler): """ A guide handler which manages the border guide. """ _guides = Value( factory=lambda: { QGuideRose.Guide.BorderNorth: GuideImage('thin_horizontal'), QGuideRose.Guide.BorderExNorth: GuideImage('bar_horizontal'), QGuideRose.Guide.BorderEast: GuideImage('thin_vertical'), QGuideRose.Guide.BorderExEast: GuideImage('bar_vertical'), QGuideRose.Guide.BorderSouth: GuideImage('thin_horizontal'), QGuideRose.Guide.BorderExSouth: GuideImage('bar_horizontal'), QGuideRose.Guide.BorderWest: GuideImage('thin_vertical'), QGuideRose.Guide.BorderExWest: GuideImage('bar_vertical'), }) _boxes = Value( factory=lambda: { QGuideRose.Guide.BorderNorth: GuideImage('guide_box'), QGuideRose.Guide.BorderEast: GuideImage('guide_box'), QGuideRose.Guide.BorderSouth: GuideImage('guide_box'), QGuideRose.Guide.BorderWest: GuideImage('guide_box'), }) def iterguides(self): """ Iterate the guides managed by the handler. Returns ------- result : iterable An iterable of (Guide, GuideImage) pairs which are the guides managed by the handler. """ return iter(self._guides.items()) def iterboxes(self): """ Iterate the boxes which lie under the guides. Returns ------- result : iterable An iterable of GuideImage instances which are the boxes to be painted under the guides. """ return iter(self._boxes.values()) def layout(self, rect): """ Layout the guides for the given rect. Parameters ---------- rect : QRect The rectangle in which to layout the border guides. """ boxes = self._boxes guides = self._guides w = rect.width() h = rect.height() cx = rect.left() + w / 2 cy = rect.top() + h / 2 Guide = QGuideRose.Guide guides[Guide.BorderNorth].rect = QRect(cx - 15, 27, 31, 19) guides[Guide.BorderExNorth].rect = QRect(cx - 15, 15, 31, 10) boxes[Guide.BorderNorth].rect = QRect(cx - 20, 10, 41, 41) guides[Guide.BorderEast].rect = QRect(w - 45, cy - 15, 19, 31) guides[Guide.BorderExEast].rect = QRect(w - 24, cy - 15, 10, 31) boxes[Guide.BorderEast].rect = QRect(w - 50, cy - 20, 41, 41) guides[Guide.BorderSouth].rect = QRect(cx - 15, h - 45, 31, 19) guides[Guide.BorderExSouth].rect = QRect(cx - 15, h - 24, 31, 10) boxes[Guide.BorderSouth].rect = QRect(cx - 20, h - 50, 41, 41) guides[Guide.BorderWest].rect = QRect(27, cy - 15, 19, 31) guides[Guide.BorderExWest].rect = QRect(15, cy - 15, 10, 31) boxes[Guide.BorderWest].rect = QRect(10, cy - 20, 41, 41)
class TransferAWGFileTask(InstrumentTask): """Build and transfer a pulse sequence to an instrument through generation of an .awg file """ #: Sequence path for the case of sequence simply referenced. sequence_path = Str().tag(pref=True) #: Time stamp of the last modification of the sequence file. sequence_timestamp = Float().tag(pref=True) #: Sequence of pulse to compile and transfer to the instrument. sequence = Value() #: Global variable to use for the sequence. sequence_vars = Typed(OrderedDict, ()).tag(pref=(ordered_dict_to_pref, ordered_dict_from_pref)) #: Loop variables: channels on which the loop will be done, loop parameters #: names, start value, stop value and number of points per loop loop_name = Str('pulse_rabi_length').tag(pref=True) loop_start = Str('0').tag(pref=True) loop_stop = Str('1').tag(pref=True) loop_points = Str('2').tag(pref=True) #: wait for trigger before playing each sequence wait_trigger = Bool(False).tag(pref=True) #: internal or external trigger internal_trigger = Bool(False).tag(pref=True) #: Internal trigger period in mus trigger_period = Str('20').tag(pref=True) #: Take an external event to enter/exit the loop start_with_event = Bool(False).tag(pref=True) #: AWG Channel Config Dict awg_configuration = Str('').tag(pref=True) def check(self, *args, **kwargs): """Check that the sequence can be compiled. """ test, traceback = super(TransferAWGFileTask, self).check(*args, **kwargs) err_path = self.path + '/' + self.name + '-' msg = 'Failed to evaluate {} ({}): {}' seq = self.sequence for k, v in self.sequence_vars.items(): try: seq.external_vars[k] = self.format_and_eval_string(v) except Exception: test = False traceback[err_path + k] = msg.format(k, v, format_exc()) if not test: return test, traceback context = seq.context res, infos, errors = context.compile_and_transfer_sequence(seq) if not res: traceback[err_path + 'compil'] = errors return False, traceback for k, v in infos.items(): self.write_in_database(k, v) if self.sequence_path: if not (self.sequence_timestamp == os.path.getmtime( self.sequence_path)): msg = 'The sequence is outdated, consider refreshing it.' traceback[err_path + 'outdated'] = msg return test, traceback def _pack_record(self, name, value, dtype): """ packs awg_file record into a struct in the folowing way: struct.pack(fmtstring, namesize, datasize, name, data) where fmtstring = '<IIs"dtype"' The file record format is as follows: Record Name Size: (32-bit unsigned integer) Record Data Size: (32-bit unsigned integer) Record Name: (ASCII) (Include NULL.) Record Data < denotes little-endian encoding, I and other dtypes are format characters denoted in the documentation of the struct package Args: name (str): Name of the record (Example: 'MAGIC' or 'SAMPLING_RATE') value (Union[int, str]): The value of that record. dtype (str): String specifying the data type of the record. Allowed values: 'h', 'd', 's'. """ if len(dtype) == 1: record_data = struct.pack('<' + dtype, value) else: if dtype[-1] == 's': record_data = value.encode('ASCII') else: record_data = struct.pack('<' + dtype, *value) # the zero byte at the end the record name is the "(Include NULL.)" record_name = name.encode('ASCII') + b'\x00' record_name_size = len(record_name) record_data_size = len(record_data) size_struct = struct.pack('<II', record_name_size, record_data_size) packed_record = size_struct + record_name + record_data return packed_record def generate_awg_file(self, packed_waveforms, wfname_l, nrep, trig_wait, goto_state, jump_to, channel_cfg): """ This function generates an .awg-file for uploading to the AWG. The .awg-file contains a waveform list, full sequencing information and instrument configuration settings. Args: packed_waveforms (dict): dictionary containing packed waveforms with keys wfname_l wfname_l (numpy.ndarray): array of waveform names, e.g. array([[segm1_ch1,segm2_ch1..], [segm1_ch2,segm2_ch2..],...]) nrep (list): list of len(segments) of integers specifying the no. of repetions per sequence element. Allowed values: 1 to 65536. trig_wait (list): list of len(segments) of integers specifying the trigger wait state of each sequence element. Allowed values: 0 (OFF) or 1 (ON). goto_state (list): list of len(segments) of integers specifying the goto state of each sequence element. Allowed values: 0 to 65536 (0 means next) jump_to (list): list of len(segments) of integers specifying the logic jump state for each sequence element. Allowed values: 0 (OFF) or 1 (ON). channel_cfg (dict): dictionary of valid channel configuration records. """ timetuple = tuple( np.array(time.localtime())[[0, 1, 8, 2, 3, 4, 5, 6, 7]]) # general settings head_str = BytesIO() bytes_to_write = (self._pack_record('MAGIC', 5000, 'h') + self._pack_record('VERSION', 1, 'h')) head_str.write(bytes_to_write) sequence_cfg = { 'SAMPLING_RATE': 1e9, 'CLOCK_SOURCE': 1, 'REFERENCE_SOURCE': 2, 'EXTERNAL_REFERENCE_TYPE': 1, 'REFERENCE_CLOCK_FREQUENCY_SELECTION': 1, 'TRIGGER_SOURCE': int(self.internal_trigger + 1), 'INTERNAL_TRIGGER_RATE': float(self.trigger_period) * 1e-6, 'TRIGGER_INPUT_IMPEDANCE': 1, 'TRIGGER_INPUT_SLOPE': 1, 'TRIGGER_INPUT_POLARITY': 1, 'TRIGGER_INPUT_THRESHOLD': 1.4, 'EVENT_INPUT_IMPEDANCE': 1, 'EVENT_INPUT_POLARITY': 1, 'EVENT_INPUT_THRESHOLD': 1.4, 'JUMP_TIMING': 1, 'RUN_MODE': 4, # Continuous | Triggered | Gated | Sequence 'RUN_STATE': 1, # Off | On } for k in list(sequence_cfg.keys()): head_str.write( self._pack_record(k, sequence_cfg[k], self.driver.AWG_FILE_FORMAT_HEAD[k])) # channel settings ch_record_str = BytesIO() for k in list(channel_cfg.keys()): ch_k = k[:-1] + 'N' if ch_k in self.driver.AWG_FILE_FORMAT_CHANNEL: pack = self._pack_record( k, channel_cfg[k], self.driver.AWG_FILE_FORMAT_CHANNEL[ch_k]) ch_record_str.write(pack) # waveforms ii = 21 wf_record_str = BytesIO() wlist = list(packed_waveforms.keys()) wlist.sort() for wf in wlist: wfdat = packed_waveforms[wf] lenwfdat = len(wfdat) wf_record_str.write( self._pack_record('WAVEFORM_NAME_{}'.format(ii), wf + '\x00', '{}s'.format(len(wf + '\x00'))) + self._pack_record('WAVEFORM_TYPE_{}'.format(ii), 1, 'h') + self._pack_record('WAVEFORM_LENGTH_{}'.format(ii), lenwfdat, 'l') + self._pack_record('WAVEFORM_TIMESTAMP_{}'.format(ii), timetuple[:-1], '8H') + self._pack_record('WAVEFORM_DATA_{}'.format(ii), wfdat, '{}H'.format(lenwfdat))) ii += 1 # sequence seq_record_str = BytesIO() for i, t in enumerate(trig_wait): seq_record_str.write( self._pack_record('SEQUENCE_WAIT_{}'.format( i + 1), trig_wait[i], 'h') + self._pack_record( 'SEQUENCE_LOOP_{}'.format(i + 1), int(nrep[i]), 'l') + self._pack_record('SEQUENCE_JUMP_{}'.format( i + 1), jump_to[i], 'h') + self._pack_record( 'SEQUENCE_GOTO_{}'.format(i + 1), goto_state[i], 'h')) for ch_id in list(wfname_l.keys()): wfname = wfname_l[ch_id][i] seq_record_str.write( self._pack_record( 'SEQUENCE_WAVEFORM_NAME_CH_' + str(ch_id) + '_{}'.format(i + 1), wfname + '\x00', '{}s'.format(len(wfname + '\x00')))) awg_file = (head_str.getvalue() + ch_record_str.getvalue() + wf_record_str.getvalue() + seq_record_str.getvalue()) return awg_file def prepare_sequences(self, loop_points, loop_values, wait_trigger, start_with_event): seq = self.sequence context = seq.context packed_waveforms = {} wfname_l = {} nrep = [] trig_wait = [] goto_state = [] jump_to = [] for nn in range(loop_points): self.sequence_vars[self.loop_name] = str(loop_values[nn]) for k, v in self.sequence_vars.items(): seq.external_vars[k] = self.format_and_eval_string(v) context.sequence_name = '{}'.format(nn + 1) res, byteseq, repeat, infos, errors = context.compile_loop( seq, for_file=True) already_added = {} is_first_ch = True for ch_id in self.driver.defined_channels: if ch_id in byteseq: used_pos = [] for pos, waveform in enumerate(byteseq[ch_id]): addr = id(waveform) if addr not in already_added: seq_name_transfered = context.sequence_name + '_Ch{}'.format(ch_id) +\ '_' + str(pos) packed_waveforms[seq_name_transfered] = waveform already_added[addr] = seq_name_transfered else: seq_name_transfered = already_added[addr] if ch_id not in list(wfname_l.keys()): wfname_l[ch_id] = [seq_name_transfered] else: wfname_l[ch_id].append(seq_name_transfered) if (pos not in used_pos) and is_first_ch: nrep.append(repeat[pos]) if wait_trigger and used_pos == []: trig_wait.append(1) else: trig_wait.append(0) goto_state.append(0) if start_with_event: jump_to.append(1) else: jump_to.append(0) used_pos.append(pos) is_first_ch = False if start_with_event: trig_wait = [0] + trig_wait goto_state = [0] + goto_state jump_to = [2] + jump_to nrep = [0] + nrep for ch_id in byteseq: seq_name_standby = 'Standby_Ch{}'.format(ch_id) wfname_l[ch_id] = [seq_name_standby] + wfname_l[ch_id] packed_waveforms[seq_name_standby] = np.ones( 256, dtype=np.uint16) * (2**13) goto_state[-1] = 2 else: goto_state[-1] = 1 if not res: raise Exception('Failed to compile sequence') for k, v in infos.items(): self.write_in_database(k, v) return packed_waveforms, wfname_l, nrep, trig_wait, goto_state, jump_to def perform(self): """Compile the sequence. """ loop_start = float(self.format_and_eval_string(self.loop_start)) loop_stop = float(self.format_and_eval_string(self.loop_stop)) loop_points = int(self.format_and_eval_string(self.loop_points)) channel_cfg = self.format_and_eval_string(self.awg_configuration) loop_values = np.linspace(loop_start, loop_stop, loop_points) packed_waveforms, wfname_l, nrep, trig_wait, goto_state, \ jump_to = self.prepare_sequences(loop_points, loop_values, self.wait_trigger, self.start_with_event) awg_file = self.generate_awg_file(packed_waveforms, wfname_l, nrep, trig_wait, goto_state, jump_to, channel_cfg) self.driver.send_load_awg_file(awg_file) # Difficulty: the awg doesn't confirm the end of the loading timeout = 100 start_time = time.clock() while time.clock() - start_time <= timeout: try: for ch_id in range(1, 5): if ch_id in list(wfname_l.keys()): ch = self.driver.get_channel(ch_id) ch.output_state = 'ON' break except: pass def register_preferences(self): """Register the task preferences into the preferences system. """ super(TransferAWGFileTask, self).register_preferences() if self.sequence: self.preferences['sequence'] =\ self.sequence.preferences_from_members() update_preferences_from_members = register_preferences def traverse(self, depth=-1): """Reimplemented to also yield the sequence """ infos = super(TransferAWGFileTask, self).traverse(depth) for i in infos: yield i if self.sequence: for item in self.sequence.traverse(): yield item @classmethod def build_from_config(cls, config, dependencies): """Rebuild the task and the sequence from a config file. """ builder = cls.mro()[1].build_from_config.__func__ task = builder(cls, config, dependencies) if 'sequence' in config: pulse_dep = dependencies['exopy.pulses.item'] builder = pulse_dep['exopy_pulses.RootSequence'] conf = config['sequence'] seq = builder.build_from_config(conf, dependencies) task.sequence = seq return task def _post_setattr_sequence(self, old, new): """Set up n observer on the sequence context to properly update the database entries. """ entries = self.database_entries.copy() if old: old.unobserve('context', self._update_database_entries) if old.context: for k in old.context.list_sequence_infos(): del entries[k] if new: new.observe('context', self._update_database_entries) if new.context: entries.update(new.context.list_sequence_infos()) if entries != self.database_entries: self.database_entries = entries def _update_database_entries(self, change): """Reflect in the database the sequence infos of the context. """ entries = self.database_entries.copy() if change.get('oldvalue'): for k in change['oldvalue'].list_sequence_infos(): del entries[k] if change['value']: context = change['value'] entries.update(context.list_sequence_infos()) self.database_entries = entries
class CompassExGuide(GuideHandler): """ A class which renders the extended compass guide. """ _guides = Value( factory=lambda: { QGuideRose.Guide.CompassNorth: GuideImage('arrow_north'), QGuideRose.Guide.CompassEast: GuideImage('arrow_east'), QGuideRose.Guide.CompassSouth: GuideImage('arrow_south'), QGuideRose.Guide.CompassWest: GuideImage('arrow_west'), QGuideRose.Guide.CompassCenter: GuideImage('center'), QGuideRose.Guide.CompassExNorth: GuideImage('bar_horizontal'), QGuideRose.Guide.CompassExEast: GuideImage('bar_vertical'), QGuideRose.Guide.CompassExSouth: GuideImage('bar_horizontal'), QGuideRose.Guide.CompassExWest: GuideImage('bar_vertical'), }) _box = Value(factory=lambda: GuideImage('cross_ex_box')) def iterguides(self): """ Iterate the guides for the extented compass. Returns ------- result : generator A generator which yields 2-tuples of (enum, guide) for the relevant guides in the compass. """ return iter(self._guides.items()) def iterboxes(self): """ Iterate the boxes which lie under the guides. Returns ------- result : iterable An iterable of GuideImage instances which are the boxes to be painted under the guides. """ yield self._box def layout(self, pos): """ Layout the guides for the extended compass. Parameters ---------- pos : QPoint The center point of the compass. """ x = pos.x() y = pos.y() Guide = QGuideRose.Guide guides = self._guides guides[Guide.CompassNorth].rect = QRect(x - 15, y - 64, 31, 31) guides[Guide.CompassEast].rect = QRect(x + 34, y - 15, 31, 31) guides[Guide.CompassSouth].rect = QRect(x - 15, y + 34, 31, 31) guides[Guide.CompassWest].rect = QRect(x - 64, y - 15, 31, 31) guides[Guide.CompassCenter].rect = QRect(x - 15, y - 15, 31, 31) guides[Guide.CompassExNorth].rect = QRect(x - 15, y - 29, 31, 10) guides[Guide.CompassExEast].rect = QRect(x + 20, y - 15, 10, 31) guides[Guide.CompassExSouth].rect = QRect(x - 15, y + 20, 31, 10) guides[Guide.CompassExWest].rect = QRect(x - 29, y - 15, 10, 31) self._box.rect = QRect(x - 69, y - 69, 139, 139)
class ManufacturersHolder(Atom): """Container class for manufacturers. """ #: Refrence to the instrument plugin. plugin = Value() #: Filtered list of manufacturers. manufacturers = List() #: Expose the known instrument by series. use_series = Bool(True) #: Expose the known instruments only of the matching kind. kind = Str('All') def update_manufacturers(self, drivers, removed=False): """Update a manufacturer infos and create it if it does not exist yet. Parameters ---------- drivers : list List of drivers. """ aliases = { a: o for o, m_a in self.plugin._aliases.contributions.items() for a in m_a.aliases } manufacturers = defaultdict(list) for d in drivers: m = d.infos['manufacturer'] alias = aliases.get(m, m) d.infos['manufacturer'] = alias manufacturers[alias].append(d) for m, ds in manufacturers.items(): if m not in self._manufacturers: if removed: continue self._manufacturers[m] = \ ManufacturerInfos(name=m, kind=self.kind, use_series=self.use_series) manufacturer = self._manufacturers[m] manufacturer.update_series_and_models(ds, removed) if (removed and not manufacturer._series and not manufacturer._models): del self._manufacturers[m] self._list_manufacturers() # ========================================================================= # --- Private API --------------------------------------------------------- # ========================================================================= #: All known manufacturers. _manufacturers = Dict() def _post_setattr_kind(self, old, new): """Regenerate the list of models. """ for m in self._manufacturers.values(): m.kind = new self._list_manufacturers() def _post_setattr_use_series(self, old, new): """Regenerate the list of models. """ for m in self._manufacturers.values(): m.use_series = new self._list_manufacturers() def _list_manufacturers(self): """Make available only the manufacturers whose at least one model fit the search criterias. """ ms = [m for m in self._manufacturers.values() if m.instruments] ms.sort(key=attrgetter('name')) self.manufacturers = ms
class DummyHook(Atom): """Base class for dummy mesure hook used for testing. """ fail_check = Bool().tag(pref=True) fail_run = Bool() should_pause = Bool() accept_pause = Bool(True) should_resume = Bool() stop_called = Bool() waiting = Value(factory=Event) go_on = Value(factory=Event) signal_resuming = Value(factory=Event) go_on_resuming = Value(factory=Event) signal_resumed = Value(factory=Event) go_on_resumed = Value(factory=Event) def run(self, workbench, engine): """Run method esecuting the hook. """ self.waiting.set() self.go_on.wait() if self.fail_run: raise RuntimeError() if self.accept_pause and self.should_pause: self.paused = True while True: sleep(0.001) if self.should_resume: self.signal_resuming.set() self.go_on_resuming.wait() self.resumed = True break self.signal_resumed.set() self.go_on_resumed.wait() self.waiting.clear() self.go_on.clear() def pause(self): """Method to call to pause execution. """ self.should_pause = True def resume(self): """Method to call to resume execution. """ self.should_resume = True def stop(self, force=False): """Method to call to stop execution. """ self.stop_called = True
class QtListStrWidget(RawWidget): """A list widget for Enaml displaying objects as strings. Objects that are not string should be convertible to str and hashable. """ #: The list of str being viewed items = d_(List()) #: The list of the currently selected str selected_item = d_(Value()) selected_items = d_(List()) #: Whether or not the user can select multiple lines multiselect = d_(Bool(False)) #: Callable to use to build a unicode representation of the objects #: (one at a time). to_string = d_(Callable(ustr)) #: Whether or not to sort the items before inserting them. sort = d_(Bool(True)) hug_width = set_default(str('strong')) hug_height = set_default(str('ignore')) # PySide requires weakrefs for using bound methods as slots. # PyQt doesn't, but executes unsafe code if not using weakrefs. __slots__ = '__weakref__' def initialize(self): """Ensures that the selected members always have meaningful values. """ self._build_mapping(self.items) if self.items: self._do_default_selection() super(QtListStrWidget, self).initialize() def refresh_items(self): """Refresh the items displayed in the list. This is useful after an inplace operation on the list which is not notified. """ self._post_setattr_items([], self.items) def clear_selection(self): """Make no item be selected. """ # HINT : this only gives a visual hint to the user the selected value # is not updated. widget = self.get_widget() if widget is not None: widget.clearSelection() def create_widget(self, parent): """ Create the QListView widget. """ # Create the list widget. widget = QtWidgets.QListWidget(parent) # Populate the widget. self._set_widget_items(widget) # Set the selection mode. if self.multiselect: mode = QtWidgets.QAbstractItemView.ExtendedSelection selected = self.selected_items else: mode = QtWidgets.QAbstractItemView.SingleSelection selected = [self.selected_item] widget.setSelectionMode(mode) self.proxy.widget = widget # Anticipated so that selection works # Make sure the widget selection reflects the members. if self.items: self._select_on_widget(selected, widget) widget.itemSelectionChanged.connect(self.on_selection) return widget def on_selection(self): """ The signal handler for the index changed signal. """ if not self._guard & INDEX_GUARD: self._guard ^= INDEX_GUARD widget = self.get_widget() selected = [ self._rmap[index.row()] for index in widget.selectedIndexes() ] if selected: if self.multiselect: self.selected_items = selected else: self.selected_item = selected[0] self._guard ^= INDEX_GUARD # ========================================================================= # --- Private API --------------------------------------------------------- # ========================================================================= #: Guard bit field. _guard = Int(0) #: Mapping between user list objects and widget list indexes. _map = Dict() #: Mapping between the widget list indexes and the user list objects. _rmap = Dict() #: String representation of the objects in the widget order. _items = List() def _post_setattr_items(self, old, new): """Update the widget content when the items changes. """ self._build_mapping(new) self._set_widget_items(self.get_widget()) if new: self._do_default_selection() else: if self.multiselect: self.selected_items = [] else: self.selected_item = None def _post_setattr_multiselect(self, old, new): """Update the widget selection mode. """ widget = self.get_widget() if widget is None: return if new: mode = QtWidgets.QAbstractItemView.ExtendedSelection if self.items: self.selected_items = [self.selected_item] else: mode = QtWidgets.QAbstractItemView.SingleSelection if self.items: self.selected_item = self.selected_items[0] widget.setSelectionMode(mode) if self.items: self._select_on_widget( self.selected_items if new else [self.selected_item]) def _post_setattr_selected_item(self, old, new): """Update the widget when the selected item is changed externally. """ if not self._guard & INDEX_GUARD and self.items: self._guard ^= INDEX_GUARD self._select_on_widget([new]) self._guard ^= INDEX_GUARD def _post_setattr_selected_items(self, old, new): """Update the widget when the selected items are changed externally. """ if not self._guard & INDEX_GUARD and self.items: self._guard ^= INDEX_GUARD self._select_on_widget(new) self._guard ^= INDEX_GUARD def _build_mapping(self, items): """Build the mapping between user objects and widget indexes. """ items_map = {self.to_string(o): o for o in items} items = sorted(items_map) if self.sort else list(items_map) self._rmap = {i: items_map[item] for i, item in enumerate(items)} self._map = {v: k for k, v in self._rmap.items()} self._items = items def _set_widget_items(self, widget): """Set the list items sorting if necessary. """ if widget is not None: widget.clearSelection() widget.clear() for i in self._items: widget.addItem(i) def _do_default_selection(self): """Determine the items that should be selected. This method also ensures that the widget state reflects the member values. """ items = self.items if not self.multiselect: if self.selected_item not in items: self.selected_item = self._rmap[0] else: self._post_setattr_selected_item(None, self.selected_item) else: if not any(i in items for i in self.selected_items): self.selected_items = [self._rmap[0]] else: items_selected = [i for i in self.selected_items if i in items] if len(items_selected) == len(self.selected_item): self._post_setattr_selected_items(None, items) else: self.selected_items = items_selected def _select_on_widget(self, items, widget=None): """Select the specified items on the widget. """ if widget is None: widget = self.get_widget() if widget is not None: widget.setCurrentItem(widget.item(0), QtCore.QItemSelectionModel.Clear) item_map = self._map for n in items: widget.setCurrentItem(widget.item(item_map[n]), QtCore.QItemSelectionModel.Select)
class DummyEngine(BaseEngine): """Dummy engine used for testing. """ fail_perform = Bool() waiting = Value(factory=Event) go_on = Value(factory=Event) should_pause = Bool() accept_pause = Bool(True) should_resume = Bool() measurement_force_enqueued = Bool() signal_resuming = Value(factory=Event) go_on_resuming = Value(factory=Event) signal_resumed = Value(factory=Event) go_on_resumed = Value(factory=Event) _stop = Bool() def perform(self, exec_infos): """Simply return the exec_infos. """ self.measurement_force_enqueued = not exec_infos.checks self.waiting.set() self.progress(('test', True)) self.go_on.wait() if self.accept_pause and self.should_pause: self.status = 'Pausing' sleep(0.001) self.status = 'Paused' while True: if self.should_resume: self.signal_resuming.set() self.status = 'Resuming' self.go_on_resuming.wait() self.status = 'Running' break sleep(0.001) self.signal_resumed.set() self.go_on_resumed.wait() if self._stop: return exec_infos self.waiting.clear() self.go_on.clear() exec_infos.success = False if self.fail_perform else True return exec_infos def pause(self): self.should_pause = True def resume(self): self.should_resume = True def stop(self, force=False): """Stop the execution. """ self._stop = True def shutdown(self, force=False): if force: self.status = 'Stopped'
class ItemPreferences(_AutoPreferences): item = d_(Value()) def get_object(self, workbench): return self.item
class ExecutionEditorModel(Atom): """Model for the execution editor. Walk all the tasks to determine which pool of tasks are defined and keep a counter. """ #: Reference to the root task of the hierarchy. root = Value() #: List of already existing execution pools. pools = List() def bind_observers(self): """Set up the observers on the task hierarchy. """ counter = Counter() self._bind_observers(self.root, counter) self._counter = counter self.pools = list(set(counter.elements())) def unbind_observers(self): """Remove all the observer from all tasks. """ self._unbind_observers(self.root, Counter()) # ========================================================================= # --- Private API --------------------------------------------------------- # ========================================================================= #: Counter keeping track of how many times each pool appear. _counter = Typed(Counter, ()) def _bind_observers(self, task, counter): """Bind the observer to a specific task and its children. """ if isinstance(task, ComplexTask): for m in tagged_members(task, 'child'): task.observe(m, self._child_observer) for m in tagged_members(task, 'child_notifier'): task.observe(m, self._child_notifier_observer) for child in task.gather_children(): self._bind_observers(child, counter) pools = [] parallel = task.parallel if parallel.get('activated'): pool = parallel['pool'] if pool: pools.append(pool) wait = task.wait if wait.get('activated'): pools.extend(wait.get('wait', [])) pools.extend(wait.get('no_wait', [])) counter.update(pools) task.observe('parallel', self._task_observer) task.observe('wait', self._task_observer) def _unbind_observers(self, task, counter): """Remove the observer linked to a specific task. """ if isinstance(task, ComplexTask): for m in tagged_members(task, 'child'): task.unobserve(m, self._child_observer) for m in tagged_members(task, 'child_notifier'): task.unobserve(m, self._child_notifier_observer) for child in task.gather_children(): self._unbind_observers(child, counter) pools = [] parallel = task.parallel if parallel.get('activated'): pool = parallel['pool'] if pool: pools.append(pool) wait = task.wait if wait.get('activated'): pools.extend(wait.get('wait', [])) pools.extend(wait.get('no_wait', [])) counter.subtract(pools) task.unobserve('parallel', self._task_observer) task.unobserve('wait', self._task_observer) def _post_setattr_root(self, old, new): """Make sure we always observe the right root. """ if old: self._unbind_observers(old, self._counter) if new: self.bind_observers() def _task_observer(self, change): """Observer handler reacting to task change. """ if change['name'] == 'parallel': activated = change['value'].get('activated') pool = change['value'].get('pool') if not activated and pool: self._counter[pool] -= 1 elif activated and pool: self._counter[pool] += 1 self._update_pools() else: activated = change['value'].get('activated') wait = change['value'].get('wait', []) no_wait = change['value'].get('no_wait', []) counter = Counter(wait + no_wait) if not activated and counter: self._counter.subtract(counter) elif activated and counter: self._counter.update(counter) self._update_pools() def _child_observer(self, change): """Observe rtracking a member tagged with child. """ counter = Counter() value = change['value'] if isinstance(value, Iterable): for c in value: self._bind_observers(c, counter) elif value: self._bind_observers(value, counter) if 'oldvalue' in change: value = change['oldvalue'] if isinstance(value, Iterable): for c in value: self._unbind_observers(c, counter) elif value: self._unbind_observers(value, counter) self._counter.update(counter) self._update_pools() def _child_notifier_observer(self, change): """Keep track of children addition and removal. """ if change.collapsed: for c in change.collapsed: self._child_notifier_observer(c) counter = Counter() for _, child in change.removed: self._unbind_observers(child, counter) for _, child in change.added: self._bind_observers(child, counter) self._counter.update(counter) self._update_pools() def _update_pools(self, counter=None): """Update the pool with the elements having a positive count. """ c = counter or self._counter self.pools = list(set(c.elements()))
class AndroidFragment(AndroidToolkitObject, ProxyFragment): """An Android implementation of an Enaml ProxyFragment.""" #: A reference to the fragment created by the proxy. fragment = Typed(BridgedFragment) #: Future set when ready ready = Value() def _default_ready(self): return self.get_context().create_future() # ------------------------------------------------------------------------- # Initialization API # ------------------------------------------------------------------------- def create_widget(self): """Create the underlying widget.""" self.fragment = BridgedFragment() def init_widget(self): """Initialize the underlying widget.""" super().init_widget() f = self.fragment f.setFragmentListener(f.getId()) f.onCreateView.connect(self.on_create_view) f.onDestroyView.connect(self.on_destroy_view) def destroy(self): """Custom destructor that deletes the fragment and removes itself from the adapter it was added to. """ #: Destroy fragment fragment = self.fragment if fragment: del self.fragment super().destroy() # ------------------------------------------------------------------------- # FragmentListener API # ------------------------------------------------------------------------- def on_create_view(self): """Trigger the click""" d = self.declaration changed = not d.condition if changed: d.condition = True view = self.get_view() if changed: self.ready.set_result(True) return view def on_destroy_view(self): d = self.declaration #: Destroy if we don't want to cache it if not d.cached: d.condition = False #: Delete the reference if self.widget: del self.widget #: Clear the ready state again! self.ready = self._default_ready() # ------------------------------------------------------------------------- # ProxyFragment API # ------------------------------------------------------------------------- def get_view(self): """Get the page to display. If a view has already been created and is cached, use that otherwise initialize the view and proxy. If defer loading is used, wrap the view in a FrameLayout and defer add view until later. """ d = self.declaration if d.cached and self.widget: return self.widget if d.defer_loading: self.widget = FrameLayout(self.get_context()) app = self.get_context() app.deferred_call(lambda: self.widget.addView(self.load_view(), 0)) else: self.widget = self.load_view() return self.widget def load_view(self): d = self.declaration for view in d.items: if not view.is_initialized: view.initialize() if not view.proxy_is_active: view.activate_proxy() return view.proxy.widget def set_cached(self, cached): pass def set_defer_loading(self, defer): pass
class MetadataTest(Atom): m = Value().tag(pref=True)
class NoOpValAtom(Atom): v = Value() v.set_validate_mode(Validate.NoOp, None)
class SlotTest(Atom): v = Value()