class GPIB_Instrument(Instrument): """Extends Instrument definition to GPIB Instruments""" #session=Typed(visa.Instrument).tag(private=True, desc="visa session of the instrument") base_name="GPIB_Instrument" @tag_Callable(sub=True, desc="function for test page which sends a commands, waits and gets a response") def command_response(self): if get_tag(self, "command", "do", False): self.send("command") if get_tag(self, "resp_delay", "do", False): sleep(self.resp_delay) if get_tag(self, "response", "do", False): self.receive("response") command=Unicode().tag(sub=True, GPIB_writes="{command}", send_now=False, do=True) resp_delay=Float(0.0).tag(sub=True, unit=" s", desc="delay between command and response", do=True) response=Unicode().tag(sub=True, get_cmd=GPIB_read, spec="multiline", do=True) @booter def booter(self, address, delay, timeout, reset, selftest, lock, send_end, identify, clear): start_GPIB(self, self.address, self.delay, self.timeout, self.reset, self.selftest, self.lock, self.send_end, self.identify, self.clear) address=Unicode("GPIB0::22::INSTR").tag(sub=True, label = "GPIB Address") delay=Float(0).tag(sub=True, unit=" s", desc="delay between GPIB commands") timeout=Float(5).tag(sub=True, unit=" s", desc="timeout") @tag_Callable(sub=True) def reset(self): """send special GPIB command *RST""" GPIB_write(self, "*RST") lock = Bool(False).tag(sub=True) send_end = Bool(True).tag(sub=True) @tag_Callable(sub=True) def clear(self): """calls visa GPIB clear""" self.session.clear() #clear=Callable(GPIB_clear).tag(value=True) identify = Unicode().tag(sub=True, GPIB_asks="*IDN?", do=True, read_only=True) @tag_Callable(sub=True) def selftest(self): """perform selftest specified by special GPIB command *TST?""" tst=GPIB_ask(self, "*TST?") if int(tst): raise InstrumentError("Instrument {0} did not pass the selftest. CODE: {1}".format(self.name, tst)) #GPIB_selftest.GPIB_string="*TST?" @closer def closer(self): """default GPIB stop is visa close""" self.session.close() def extra_setup(self, param, typer): super(GPIB_Instrument, self).extra_setup(param, typer) GPIB_string=get_tag(self, param, 'GPIB_writes') if GPIB_string!=None: do=get_tag(self, param, "do", False) set_tag(self, param, set_cmd=GPIB_write_it(GPIB_string, param), do=do) GPIB_string=get_tag(self, param, 'GPIB_asks') if GPIB_string!=None: do=get_tag(self, param, "do", False) set_tag(self, param, get_cmd=GPIB_ask_it(GPIB_string, param), do=do) @private_property def view_window(self): from enaml import imports with imports(): from taref.instruments.instrument_e import GPIB_InstrumentView return GPIB_InstrumentView(instr=self)
class Extension(Declarative): """ A declarative class which represents a plugin extension. An Extension must be declared as a child of a PluginManifest. """ #: The globally unique identifier for the extension. id = d_(Unicode()) #: The fully qualified id of the target extension point. point = d_(Unicode()) #: An optional rank to use for order the extension among others. rank = d_(Int()) #: A callable which will create the implementation object for the #: extension point. The call signature and return type are defined #: by the extension point plugin which invokes the factory. factory = d_(Callable()) #: An optional description of the extension. description = d_(Unicode()) @property def plugin_id(self): """ Get the plugin id from the parent plugin manifest. """ return self.parent.id @property def qualified_id(self): """ Get the fully qualified extension identifer. """ this_id = self.id if '.' in this_id: return this_id return '%s.%s' % (self.plugin_id, this_id) def get_child(self, kind, reverse=False): """ Find a child by the given type. Parameters ---------- kind : type The declarative type of the child of interest. reverse : bool, optional Whether to search in reversed order. The default is False. Returns ------- result : child or None The first child found of the requested type. """ it = reversed if reverse else iter for child in it(self.children): if isinstance(child, kind): return child return None def get_children(self, kind): """ Get all the children of the given type. Parameters ---------- kind : type The declarative type of the children of interest. Returns ------- result : list The list of children of the request type. """ return [c for c in self.children if isinstance(c, kind)]
class TemplateConfig(AbstractConfig): """ Config used to insert a template into a sequence. The template can either be inserted as a TemplateSequence or merged. In the first case it will appear as a single item and the only inputs will be the declared template vars and the mapping between the true context of execution channels and the ones from the template context. The id of the template will be kept and the template will be re-used each time the sequence is rebuilt. In the second the template will be unraveled and inserted as many items, the user will be allowed to choose where the template vars should appear and to give a mapping between the contexts channels. """ #: Name of the sequence used to make the sequence easier to read. template_name = Unicode() #: Docstring of the sequence. template_doc = Unicode() #: Configobj object describing the template. template_config = Value() #: Flag indicating whether the Template should be merged as a standard #: sequence or included as a TemplateSequence. In the first case all #: reference to the template is lost, in the second the template sequence #: rememeber its templates and use it when rebuilding itself. merge = Bool() #: When merging should the template vars be added as local_vars or #: external_vars in the root. t_vars_as_root = Bool() #: False template context used to determine the mapping between the #: template context channels and the ones from the root. #: Only used in merge mode. context = Typed(TemplateContext) def build_sequence(self): """ Build sequence using the selected template. """ config = deepcopy(self.template_config) #: Here we set the item id of the "root" of the sequence (that needed #: to collect the right dependency). If we are NOT merging, then the #: template will be added as a TemplateSequence (and therefore we need #: a TemplateSequence as dependecy). If we ARE merging , then the root #: is a BaseSequence holding everything and we must collect it. if not self.merge: config['item_id'] = "ecpy_pulses.__template__" else: config['item_id'] = "ecpy_pulses.BaseSequence" #: Set the rest of the config variables that are needed. config['name'] = self.template_name config['template_id'] = '__template__' config['template_doc'] = self.template_doc #: Collect Dependencies core = self.manager.workbench.get_plugin('enaml.workbench.core') cmd = 'ecpy.app.dependencies.analyse' cont = core.invoke_command(cmd, {'obj': config}) if cont.errors: msg = 'Failed to analyse dependencies :\n%s' self.errors['dependencies'] = msg % pformat(cont.errors) return None cmd = 'ecpy.app.dependencies.collect' cont = core.invoke_command(cmd, {'kind': 'build', 'dependencies': cont.dependencies}) if cont.errors: msg = 'Failed to collect dependencies :\n%s' self.errors['dependencies'] = msg % pformat(cont.errors) return None #: Shorthand build_dep = cont.dependencies if not self.merge: seq = TemplateSequence.build_from_config(config, build_dep) return seq else: t_vars = literal_eval(config.pop('template_vars')) if not self.t_vars_as_root: loc_vars = literal_eval(config['local_vars']) loc_vars.update(t_vars) config['local_vars'] = repr(loc_vars) else: self.root.external_vars.update(t_vars) # Don't want to alter the dependencies dict in case somebody else # use the same template. t_config = deepcopy(config) t_config.merge(config) config = t_config seq = BaseSequence.build_from_config(t_config, build_dep) self._apply_mapping(seq) return seq # --- Private API --------------------------------------------------------- def _post_setattr_template_name(self, old, new): """ Observer notifying that the configurer is ready to build. """ self.ready = bool(new) def _apply_mapping(self, seq): """ Apply the user defined mapping of channels for the pulses. """ c_mapping = self.context.channel_mapping for item in seq.items: if isinstance(item, Pulse): item.channel = c_mapping.get(item.channel, '') elif isinstance(item, TemplateSequence): mapping = item.context.channel_mapping for channel in mapping: mapping[channel] = c_mapping.get(item.channel, '') elif isinstance(item, BaseSequence): self._apply_mapping(item) def _default_context(self): """ Initialize the context using the config. """ config = self.template_config context = TemplateContext() update_members_from_preferences(context, config['context']) return context
class DockPane(Widget): """ A widget which can be docked in a MainWindow. A DockPane is a widget which can be docked in designated dock areas in a MainWindow. It can have at most a single child widget which is an instance of Container. """ #: The title to use in the title bar. title = d_(Unicode()) #: Whether or not the title bar is visible. title_bar_visible = d_(Bool(True)) #: The orientation of the title bar. title_bar_orientation = d_(Enum('horizontal', 'vertical')) #: Whether or not the dock pane is closable via a close button. closable = d_(Bool(True)) #: Whether or not the dock pane is movable by the user. movable = d_(Bool(True)) #: Whether or not the dock can be floated as a separate window. floatable = d_(Bool(True)) #: A boolean indicating whether or not the dock pane is floating. floating = d_(Bool(False)) #: The dock area in the MainWindow where the pane is docked. dock_area = d_(Enum('left', 'right', 'top', 'bottom')) #: The dock areas in the MainWindow where the pane can be docked #: by the user. Note that this does not preclude the pane from #: being docked programmatically via the 'dock_area' attribute. allowed_dock_areas = d_( List( Enum('left', 'right', 'top', 'bottom', 'all'), ['all'], )) #: An event fired when the user closes the pane by clicking on the #: dock pane's close button. closed = d_(Event(), writable=False) #: A reference to the ProxyDockPane object. proxy = Typed(ProxyDockPane) def dock_widget(self): """ Get the dock widget defined for the dock pane. The last child Container is considered the dock widget. """ for child in reversed(self.children): if isinstance(child, Container): return child #-------------------------------------------------------------------------- # Observers #-------------------------------------------------------------------------- @observe('title', 'title_bar_visible', 'title_bar_orientation', 'closable', 'movable', 'floatable', 'floating', 'dock_area', 'allowed_dock_areas') def _update_proxy(self, change): """ An observer which sends state change to the proxy. """ # The superclass handler implementation is sufficient. super(DockPane, self)._update_proxy(change) # TODO spend some time thinking about the open/close api # I would rather everything be consistent, which likely # means destroy-on-close behavior should be the norm. def open(self): #msg = "The 'open()' method will be removed in Enaml version " #msg += "0.8.0. Use 'show()' instead." #import warnings #warnings.warn(msg, FutureWarning, stacklevel=2) self.show() def close(self): #msg = "The 'close()' method will be removed in Enaml version " #msg += "0.8.0. Use 'hide()' instead." #import warnings #warnings.warn(msg, FutureWarning, stacklevel=2) self.hide()
class AndroidApplication(BridgedApplication): """ An Android implementation of an Enaml Native BridgedApplication. A AndroidApplication uses the native Android widget toolkit to implement an Enaml UI that runs in the local process. """ #: Attributes so it can be serialized over the bridge as a reference __nativeclass__ = Unicode('android.content.Context') #: Bridge widget widget = Typed(Activity) #: Application Window window = Typed(Window) #: Android Activity (jnius class) activity = Value() #: Pixel density of the device #: Loaded immediately as this is used often. dp = Float() #: Build info from #: https://developer.android.com/reference/android/os/Build.VERSION.html build_info = Dict() #: SDK version #: Loaded immediately api_level = Int() #: Triggered when the back button is pressed. This can be observed #: to handle back presses. back_pressed = Event(dict) #: Permission code increments on each request _permission_code = Int() #: Pending permission request listeners _permission_requests = Dict() # ------------------------------------------------------------------------- # Defaults # ------------------------------------------------------------------------- def _default_widget(self): """ Return a bridge object reference to the MainActivity """ return Activity(__id__=-1) # ------------------------------------------------------------------------- # AndroidApplication Constructor # ------------------------------------------------------------------------- def __init__(self, *args, **kwargs): """ Initialize a AndroidApplication. Uses jnius to retrieve an instance of the activity. """ super(AndroidApplication, self).__init__(*args, **kwargs) self.resolver = ProxyResolver(factories=factories.ANDROID_FACTORIES) def init_widget(self): """ Initialize on the first call """ #: Add a ActivityLifecycleListener to update the application state activity = self.widget activity.addActivityLifecycleListener(activity.getId()) activity.onActivityLifecycleChanged.connect( self.on_activity_lifecycle_changed) #: Add BackPressedListener to trigger the event activity.addBackPressedListener(activity.getId()) activity.onBackPressed.connect(self.on_back_pressed) #: Add ConfigurationChangedListener to trigger the event activity.addConfigurationChangedListener(activity.getId()) activity.onConfigurationChanged.connect(self.on_configuration_changed) activity.getWindow().then(self.init_window) def init_window(self, window): """ """ self.window = Window(__id__=window) self.set_keep_screen_on(self.keep_screen_on) if self.statusbar_color: self.set_statusbar_color(self.statusbar_color) # ------------------------------------------------------------------------- # App API Implementation # ------------------------------------------------------------------------- def has_permission(self, permission): """ Return a future that resolves with the result of the permission """ f = self.create_future() #: Old versions of android did permissions at install time if self.api_level < 23: f.set_result(True) return f def on_result(allowed): result = allowed == Activity.PERMISSION_GRANTED self.set_future_result(f, result) self.widget.checkSelfPermission(permission).then(on_result) return f def request_permissions(self, permissions): """ Return a future that resolves with the results of the permission requests """ f = self.create_future() #: Old versions of android did permissions at install time if self.api_level < 23: f.set_result({p: True for p in permissions}) return f w = self.widget request_code = self._permission_code self._permission_code += 1 #: So next call has a unique code #: On first request, setup our listener, and request the permission if request_code == 0: w.setPermissionResultListener(w.getId()) w.onRequestPermissionsResult.connect(self._on_permission_result) def on_results(code, perms, results): #: Check permissions f.set_result({ p: r == Activity.PERMISSION_GRANTED for (p, r) in zip(perms, results) }) #: Save a reference self._permission_requests[request_code] = on_results #: Send out the request self.widget.requestPermissions(permissions, request_code) return f def show_toast(self, msg, long=True): """ Show a toast message for the given duration. This is an android specific api. Parameters ----------- msg: str Text to display in the toast message long: bool Display for a long or short (system defined) duration """ from .android_toast import Toast def on_toast(ref): t = Toast(__id__=ref) t.show() Toast.makeText(self, msg, 1 if long else 0).then(on_toast) def on_activity_lifecycle_changed(self, state): """ Update the state when the android app is paused, resumed, etc.. Widgets can observe this value for changes if they need to react to app lifecycle changes. """ self.state = state def on_back_pressed(self): """ Fire the `back_pressed` event with a dictionary with a 'handled' key when the back hardware button is pressed If 'handled' is set to any value that evaluates to True the default event implementation will be ignored. """ try: event = {'handled': False} self.back_pressed(event) return bool(event.get('handled', False)) except Exception as e: #: Must return a boolean or we will cause android to abort return False def on_configuration_changed(self, config): """ Handles a screen configuration change. """ self.width = config['width'] self.height = config['height'] self.orientation = ('square', 'portrait', 'landscape')[config['orientation']] # -------------------------------------------------------------------------- # Bridge API Implementation # -------------------------------------------------------------------------- def show_view(self): """ Show the current `app.view`. This will fade out the previous with the new view. """ if not self.build_info: def on_build_info(info): """ Make sure the build info is ready before we display the view """ self.dp = info['DISPLAY_DENSITY'] self.width = info['DISPLAY_WIDTH'] self.height = info['DISPLAY_HEIGHT'] self.orientation = ('square', 'portrait', 'landscape')[info['DISPLAY_ORIENTATION']] self.api_level = info['SDK_INT'] self.build_info = info self._show_view() self.init_widget() self.widget.getBuildInfo().then(on_build_info) else: self._show_view() def _show_view(self): """ Show the view """ self.widget.setView(self.get_view()) def dispatch_events(self, data): """ Send the data to the Native application for processing """ nativehooks.publish(data) # ------------------------------------------------------------------------- # Android utilities API Implementation # ------------------------------------------------------------------------- def _on_permission_result(self, code, perms, results): """ Handles a permission request result by passing it to the handler with the given code. """ #: Get the handler for this request handler = self._permission_requests.get(code, None) if handler is not None: del self._permission_requests[code] #: Invoke that handler with the permission request response handler(code, perms, results) def _observe_keep_screen_on(self, change): """ Sets or clears the flag to keep the screen on. """ self.set_keep_screen_on(self.keep_screen_on) def set_keep_screen_on(self, keep_on): """ Set or clear the window flag to keep the screen on """ window = self.window if not window: return if keep_on: window.addFlags(Window.FLAG_KEEP_SCREEN_ON) else: window.clearFlags(Window.FLAG_KEEP_SCREEN_ON) def _observe_statusbar_color(self, change): """ Sets or clears the flag to keep the screen on. """ self.set_statusbar_color(self.statusbar_color) def set_statusbar_color(self, color): """ Set the color of the system statusbar. """ window = self.window if not window: return window.setStatusBarColor(color) def get_system_service(self, service): """ Wrapper for getSystemService. You MUST wrap the class with the appropriate object. """ return self.widget.getSystemService(service) # ------------------------------------------------------------------------- # Plugin API Implementation # ------------------------------------------------------------------------- def load_plugin_factories(self): """ Add any plugin toolkit widgets to the ANDROID_FACTORIES """ for plugin in self.get_plugins(group='enaml_native_android_factories'): get_factories = plugin.load() PLUGIN_FACTORIES = get_factories() factories.ANDROID_FACTORIES.update(PLUGIN_FACTORIES)
class JDF_Pattern(Atom): num = Coerced(int) #Int(1) x = Coerced(int) #Int() y = Coerced(int) #Int() name = Unicode()
class MicropydeWorkbench(UIWorkbench): #: Singleton instance _instance = None #: For error messages app_name = Unicode('Micropython IDE') @classmethod def instance(cls): return cls._instance @property def application(self): ui = self.get_plugin('enaml.workbench.ui') return ui._application @property def window(self): """ Return the main UI window or a dialog if it wasn't made yet (during loading) """ try: ui = self.get_plugin('enaml.workbench.ui') return ui.window.proxy.widget except: return QtGui.QDialog() # ------------------------------------------------------------------------- # Message API # ------------------------------------------------------------------------- def message_critical(self, title, message, *args, **kwargs): """ Shortcut to display a critical popup dialog. """ return QtWidgets.QMessageBox.critical( self.window, "{0} - {1}".format(self.app_name, title), message, *args, **kwargs) def message_warning(self, title, message, *args, **kwargs): """ Shortcut to display a warning popup dialog. """ return QtWidgets.QMessageBox.warning( self.window, "{0} - {1}".format(self.app_name, title), message, *args, **kwargs) def message_information(self, title, message, *args, **kwargs): """ Shortcut to display an info popup dialog. """ return QtWidgets.QMessageBox.information( self.window, "{0} - {1}".format(self.app_name, title), message, *args, **kwargs) def message_about(self, title, message, *args, **kwargs): """ Shortcut to display an about popup dialog. """ return QtWidgets.QMessageBox.about( self.window, "{0} - {1}".format(self.app_name, title), message, *args, **kwargs) def message_question(self, title, message, *args, **kwargs): """ Shortcut to display a question popup dialog. """ return QtWidgets.QMessageBox.question( self.window, "{0} - {1}".format(self.app_name, title), message, *args, **kwargs) # ------------------------------------------------------------------------- # Workbench API # ------------------------------------------------------------------------- def run(self): """ Run the UI workbench application. This method will load the core and ui plugins and start the main application event loop. This is a blocking call which will return when the application event loop exits. """ MicropydeWorkbench._instance = self with enaml.imports(): from enaml.workbench.core.core_manifest import CoreManifest from enaml.workbench.ui.ui_manifest import UIManifest self.register(CoreManifest()) self.register(UIManifest()) #: Init the ui ui = self.get_plugin('enaml.workbench.ui') ui.show_window() #: Start the core plugin plugin = self.get_plugin('micropyde.core') ui.start_application()
class Map(Tag): name = d_(Unicode()) @observe('name') def _update_proxy(self, change): super(Map, self)._update_proxy(change)
class BaseCollector(Atom): """Base class for automating extension collection. """ #: Reference to the application workbench. workbench = Typed(Workbench) #: Id of the extension point to observe. point = Unicode() #: Expected class(es) of the object generated by the extension. ext_class = Coerced(ClassTuple) #: Dictionary storing the consributiosn of the observed extension point. #: This should not be altered by user code. This is never modified in place #: so user code will get reliable notifications when observing it. contributions = Dict() def start(self): """Run first collections of contributions and set up observers. This method should be called in the start method of the plugin using this object. """ self._refresh_contributions() self._bind_observers() def stop(self): """Unbind observers and clean up ressources. This method should be called in the stop method of the plugin using this object. """ self._unbind_observers() self.unobserve('contributions') # Dicsonnect all observers self.contributions.clear() self._extensions.clear() # ========================================================================= # --- Private API --------------------------------------------------------- # ========================================================================= #: Private storage keeping track of which extension declared which object. _extensions = Typed(defaultdict, (list,)) def _refresh_contributions(self): """ Refresh the extensions contributions. This method should be called in the start method of the plugin using this object. """ raise NotImplementedError() def _on_contribs_updated(self, change): """ The observer for the extension point """ raise NotImplementedError() def _bind_observers(self): """ Setup the observers for the extension point. This method should be called in the start method of the plugin using this object. """ workbench = self.workbench point = workbench.get_extension_point(self.point) point.observe('extensions', self._on_contribs_updated) def _unbind_observers(self): """ Remove the observers for the plugin. This method should be called in the stop method of the plugin using this object. """ workbench = self.workbench point = workbench.get_extension_point(self.point) point.unobserve('extensions', self._on_contribs_updated)
class Blockquote(Tag): cite = d_(Unicode()) @observe('cite') def _update_proxy(self, change): super(Blockquote, self)._update_proxy(change)
class Bdo(Tag): dir = d_(Unicode())
class Tag(ToolkitObject): #: Reference to the proxy object proxy = Typed(ProxyTag) #: Object ID id = d_(Unicode()) #: Object ID ref = d_(Unicode()) #: Tag name (leave blank for class name) tag = d_(Unicode()).tag(attr=False) #: CSS classes cls = d_(Instance((list, object))).tag(attr=False) #: CSS styles style = d_(Instance((dict, object))).tag(attr=False) #: Node text text = d_(Unicode()).tag(attr=False) #: Node tail text tail = d_(Unicode()).tag(attr=False) #: Alt attribute alt = d_(Unicode()) #: Custom attributes not explicitly defined attrs = d_(Dict()).tag(attr=False) #: Event from JS onclick = d_(Unicode()) #: Whether this is clickable via websockets clickable = d_(Bool()) #: Event from JS clicked = d_(Event()) def _default_tag(self): return self.__class__.__name__.lower() def _default_ref(self): return u"{}".format(id(self)) @observe('id', 'tag', 'cls', 'style', 'text', 'tail', 'alt', 'attrs', 'onclick', 'clickable') def _update_proxy(self, change): """ Update the proxy widget when the Widget data changes. """ #: Try default handler if change['type'] == 'update' and self.proxy_is_active: handler = getattr(self.proxy, 'set_' + change['name'], None) if handler is not None: handler(change['value']) else: self.proxy.set_attribute(change['name'], change['value']) self._notify_modified(change) def _notify_modified(self, change): """ If a change occurs when we have a websocket connection active notify the websocket client of the change. """ root = self.root_object() if isinstance(root, Html): name = change['name'] change = { 'ref': self.ref, 'type': change['type'], 'name': change['name'], 'value': change['value'] } root.modified(change) def child_added(self, child): super(Tag, self).child_added(child) if isinstance(child, Tag) and self.proxy_is_active: change = { 'type': 'added', 'name': 'children', #'before':self.ch #: TODO: Handle placement? 'value': child.render().decode() } self._notify_modified(change) def child_removed(self, child): super(Tag, self).child_removed(child) if isinstance(child, Tag) and self.proxy_is_active: change = { 'type': 'removed', 'name': 'children', 'value': child.ref, } self._notify_modified(change) def xpath(self, *args, **kwargs): if not nodes: return [] refs = [node.attrib.get('ref') for node in nodes] if not refs: return [] return [CACHE[ref] for ref in refs if ref and ref in CACHE] def xpath(self, query, **kwargs): """ Find nodes matching the given xpath query """ if not self.proxy: return nodes = self.proxy.find(query, **kwargs) return [n.declaration for n in nodes] def prepare(self, **kwargs): """ Prepare for rendering """ for k, v in kwargs.items(): setattr(self, k, v) if not self.is_initialized: self.initialize() if not self.proxy_is_active: self.activate_proxy() def render(self, **kwargs): """ Render to a string""" self.prepare(**kwargs) return self.proxy.render()
class ImportedSymbol(Symbol): module = d_(Unicode()) def get_object(self): return importlib.import_module(self.module)
class SlopeShape(AbstractShape): """Shape whose amplitude varies linearly with time. """ #: Interpretation of the input values. #: Note that the slope is interpreted with respect to the context time #: unit. mode = Enum('Start/Stop', 'Start/Slope', 'Slope/Stop').tag(pref=True) #: First input parameter, will be interpreted based on the selected mode. def1 = Unicode('0.5').tag(pref=True, feval=Feval(types=Real)) #: Second input parameter, will be interpreted based on the selected mode. def2 = Unicode('1.0').tag(pref=True, feval=Feval(types=Real)) def eval_entries(self, root_vars, sequence_locals, missing, errors): """ Evaluate the parameters of the pulse shape. Parameters ---------- root_vars : dict Global variables. As shapes and modulation cannot update them an empty dict is passed. sequence_locals : dict Known locals variables for the pulse sequence. missing : set Set of variables missing to evaluate some entries in the sequence. errors : dict Errors which occurred when trying to compile the pulse sequence. Returns ------- result : bool Flag indicating whether or not the evaluation succeeded. """ res = super(SlopeShape, self).eval_entries(root_vars, sequence_locals, missing, errors) if res: if self.mode in ('Start/Stop', 'Start/Slope'): start = self._cache['def1'] if not -1.0 <= start <= 1.0: msg = 'Shape start must be between -1 and 1 (got %s).' errors[self.format_error_id('start')] = msg % start res = False if self.mode in ('Start/Stop', 'Slope/Stop'): stop = self._cache['def2'] if not -1.0 <= stop <= 1.0: msg = 'Shape stop must be between -1 and 1 (got %s).' errors[self.format_error_id('stop')] = msg % stop res = False if self.mode == 'Start/Slope': duration = sequence_locals['{}_duration'.format(self.index)] stop = self._cache['def1'] + self._cache['def2'] * duration if not -1.0 <= stop <= 1.0: msg = ('For the given slope and pulse duration, the stop ' 'is not between -1 and 1 (got %s).') errors[self.format_error_id('slope')] = msg % stop res = False elif self.mode == 'Slope/Stop': duration = sequence_locals['{}_duration'.format(self.index)] start = self._cache['def2'] - self._cache['def1'] * duration if not -1.0 <= start <= 1.0: msg = ('For the given slope and pulse duration, the start ' 'is not between -1 and 1 (got %s).') errors[self.format_error_id('slope')] = msg % start res = False return res def compute(self, time, unit): """ Computes the shape of the pulse at a given time. Parameters ---------- time : ndarray Times at which to compute the modulation. unit : str Unit in which the time is expressed. Returns ------- shape : ndarray Amplitude of the pulse. """ if self.mode == 'Start/Stop': start = self._cache['def1'] stop = self._cache['def2'] elif self.mode == 'Start/Slope': start = self._cache['def1'] stop = start + self._cache['def2'] * time[-1] else: stop = self._cache['def2'] start = stop - self._cache['def1'] * time[-1] return np.linspace(start, stop, len(time))
class DeclaracadPlugin(Plugin): #: Project site wiki_page = Unicode("https;//www.codelv.com/projects/declaracad") #: Dock items to add dock_items = List(DockItem) dock_layout = Instance(AreaLayout) #: Settings pages to add settings_pages = List(extensions.SettingsPage) #: Current settings page settings_page = Instance(extensions.SettingsPage) #: Internal settings models settings_typemap = Dict() settings_model = Instance(Atom) def start(self): """ Load all the plugins declaracad is dependent on """ w = self.workbench super(DeclaracadPlugin, self).start() self._refresh_dock_items() self._refresh_settings_pages() #self.workbench.application.deferred_call(self.start_default_workspace) def start_default_workspace(self): ui = self.workbench.get_plugin('enaml.workbench.ui') ui.select_workspace('declaracad.workspace') def _bind_observers(self): """ Setup the observers for the plugin. """ super(DeclaracadPlugin, self)._bind_observers() workbench = self.workbench point = workbench.get_extension_point(extensions.DOCK_ITEM_POINT) point.observe('extensions', self._refresh_dock_items) point = workbench.get_extension_point(extensions.SETTINGS_PAGE_POINT) point.observe('extensions', self._refresh_settings_pages) def _unbind_observers(self): """ Remove the observers for the plugin. """ super(DeclaracadPlugin, self)._unbind_observers() workbench = self.workbench point = workbench.get_extension_point(extensions.DOCK_ITEM_POINT) point.unobserve('extensions', self._refresh_dock_items) point = workbench.get_extension_point(extensions.SETTINGS_PAGE_POINT) point.unobserve('extensions', self._refresh_settings_pages) # ------------------------------------------------------------------------- # Dock API # ------------------------------------------------------------------------- def create_new_area(self): """ Create the dock area """ with enaml.imports(): from .dock import DockView area = DockView( workbench=self.workbench, plugin=self ) return area def get_dock_area(self): """ Get the dock area Returns ------- area: DockArea """ ui = self.workbench.get_plugin('enaml.workbench.ui') if not ui.workspace or not ui.workspace.content: ui.select_workspace('declaracad.workspace') return ui.workspace.content.find('dock_area') def _refresh_dock_items(self, change=None): """ Reload all DockItems registered by any Plugins Any plugin can add to this list by providing a DockItem extension in their PluginManifest. """ workbench = self.workbench point = workbench.get_extension_point(extensions.DOCK_ITEM_POINT) #: Layout spec layout = { 'main': [], 'left': [], 'right': [], 'bottom': [], 'top': [] } dock_items = [] for extension in sorted(point.extensions, key=lambda ext: ext.rank): for declaration in extension.get_children(extensions.DockItem): #: Create the item DockItem = declaration.factory() item = DockItem( plugin=workbench.get_plugin(declaration.plugin_id), ) #: Add to our layout layout[declaration.layout].append(item.name) #: Save it dock_items.append(item) #: Update items log.debug("Updating dock items: {}".format(dock_items)) self.dock_items = dock_items self._refresh_layout(layout) def _refresh_layout(self, layout): """ Create the layout for all the plugins """ if not self.dock_items: return AreaLayout() items = layout.pop('main') if not items: raise EnvironmentError("At least one main layout item must be " "defined!") main = (HSplitLayout(TabLayout(*items[1:]), items[0]) if len(items) > 1 else items[0]) dockbars = [DockBarLayout(*items, position=side) for side, items in layout.items() if items] #: Update layout self.dock_layout = AreaLayout( main, dock_bars=dockbars ) # ------------------------------------------------------------------------- # Settings API # ------------------------------------------------------------------------- def _default_settings_page(self): return self.settings_pages[0] def _observe_settings_page(self, change): log.debug("Settings page: {}".format(change)) def _refresh_settings_pages(self, change=None): """ Reload all SettingsPages registered by any Plugins Any plugin can add to this list by providing a SettingsPage extension in their PluginManifest. """ workbench = self.workbench point = workbench.get_extension_point(extensions.SETTINGS_PAGE_POINT) settings_pages = [] typemap = {} for extension in sorted(point.extensions, key=lambda ext: ext.rank): for d in extension.get_children(extensions.SettingsPage): #: Save it settings_pages.append(d) #: Update the type map plugin = self.workbench.get_plugin(d.plugin_id) t = type(getattr(plugin, d.model) if d.model else plugin) typemap[t] = d.factory() #: Update items log.debug("Updating settings pages: {}".format(settings_pages)) self.settings_typemap = typemap self.settings_pages = settings_pages
class RuntimeDependencyCollector(Declarative): """Runtime dependencies are ressources needed at runtime by some structure (ex: tasks using instrument need at runtime the driver class and the instrument profile to work correctly). """ #: Unique id for this extension. id = d_(Unicode()) @d_func def validate(self, workbench, dependencies, errors): """Validate that all the dependencies exists. This method should try to access the dependencies but simply assert that they exist. This method should never raise an error but rather use the errors dictionary to signal any issue. Parameters ---------- workbench : enaml.workbench.api.Workbench Reference to the application workbench. dependencies : set Set of depedencies to validate. errors : dict Dictionary in which to write the errors that occured during collection. """ raise NotImplementedError() @d_func def collect(self, workbench, owner, dependencies, unavailable, errors): """Collect the identified runtime dependencies. This method should never raise an error but rather use the errors dictionary to signal any issue. If some of them requires some kind of permission, this permission should be required. Parameters ---------- workbench : enaml.workbench.api.Workbench Reference to the application workbench. owner : unicode Calling plugin id . Used for some runtime dependencies needing to know the ressource owner. dependencies : dict Dictionary whose values are initialised to None listing the dependencies to collect. unavaible : set Set of resources that could not be provided because they are currently unavailable. errors : dict Dictionary in which to write the errors that occured during collection. """ raise NotImplementedError() @d_func def release(self, workbench, owner, dependencies): """Release resources previously collected. This makes sense only if the ressource requires some kind of permissions. Parameters ---------- workbench : Reference to the application workbench. owner : unicode Id of the plugin releasing the ressources. dependencies : iterable Iterable of dependencies that are no longer needed. """ pass
class Pattern(Atom): name = Unicode() shot_mod = Unicode()
class BuildDependency(Declarative): """Build dependencies are used to rebuild ecpy structures. If a plugin manage objects used to build a structure that can be saved to a config file it should declare a BuildDependency extension and contribute it to the 'build-dependencies' extensions point of the DependenciesPlugin (ecpy.app.dependencies). """ #: Unique id for this extension. Should match the dep_type attribute value #: of the object it is meant for. id = d_(Unicode()) @d_func def analyse(self, workbench, obj, getter, dependencies, errors): """Analyse the identified build dependencies and list runtime ones. This method should never raise an error but rather use the errors dictionary to signal any issue. Parameters ---------- workbench : enaml.workbench.api.Workbench Reference to the application workbench. obj : Object whose build dependencies should be analysed and runtime ones identified. getter : callable(obj, name) Callable to use to access obj attribute. Attribute must be accessed using this function rather than the usual '.' syntax as the passed object might be a dictionary like object. dependencies : set Set in which to list the dependencies. errors : dict Dictionary in which to write the errors that occured during collection. Returns ------- runtime_collectors : list List of runtime dependencies that this object have. """ raise NotImplementedError() @d_func def validate(self, workbench, dependencies, errors): """Validate that all the dependencies exists. This method is not intended to query the actual dependencies but simply to assert that they are theoretically available from the manager plugin. This method should never raise an error but rather use the errors dictionary to signal any issue. Parameters ---------- workbench : enaml.workbench.api.Workbench Reference to the application workbench. dependencies : set Set of depedencies to validate. errors : dict Dictionary in which to write the errors that occured during collection. """ raise NotImplementedError() @d_func def collect(self, workbench, dependencies, errors): """Collect build dependencies. This method should never raise an error but rather use the errors dictionary to signal any issue. Parameters ---------- workbench : enaml.workbench.api.Workbench Reference to the application workbench. dependencies : dict Dictionary whose values are initialised to None listing the dependencies to collect. errors : dict Dictionary in which to write the errors that occured during collection. """ raise NotImplementedError()
class JDF_Top(Atom): plot = Typed(Plotter, ()) agents = List() pattern_dict = Dict() quarter_wafer = Enum("A", "B", "C", "D") def distribute_coords(self, num=None): self.comments = [ "distributed main array for quarter wafer {}".format( self.quarter_wafer) ] self.Px, self.Py, self.Qx, self.Qy = get_GLM(self.quarter_wafer) if num is None: num = len(self.patterns) coords = distribute_coords(num, self.quarter_wafer) for n, c in enumerate(coords): self.arrays[0].assigns[n].pos_assign = c (self.arrays[0].x_start, self.arrays[0].x_num, self.arrays[0].x_step, self.arrays[0].y_start, self.arrays[0].y_num, self.arrays[0].y_step) = get_Array(self.quarter_wafer) def show(self): show(*self.agents) def pre_plot(self): for p in self.agents: p.verts = [] p.make_polylist() self.pattern_dict[p.name] = dict(verts=p.verts[:], color=p.color, layer=p.layer, plot_sep=p.plot_sep) for key in self.pattern_dict: if self.pattern_dict[key]["plot_sep"]: self.plot.set_data(key, self.pattern_dict[key]["verts"], self.pattern_dict[key]["color"]) xmin = min(b.xmin for b in self.agents) xmax = max(b.xmax for b in self.agents) ymin = min(b.ymin for b in self.agents) ymax = max(b.ymax for b in self.agents) self.plot.set_xlim(xmin, xmax) self.plot.set_ylim(ymin, ymax) self.plot.draw() @property def show_all(self): return True text = Unicode() output_jdf = Unicode() comments = List() #Unicode() Px = Coerced(int, (-40000, )) Py = Coerced(int, (4000, )) Qx = Coerced(int, (-4000, )) Qy = Coerced(int, (40000, )) mgn_name = Unicode("IDT") wafer_diameter = Coerced(int, (4, )) write_diameter = Coerced(float, (-4.2, )) stdcur = Coerced(int, (2, )) shot = Coerced(int, (8, )) resist = Coerced(int, (165, )) arrays = ContainerList() #.tag(width='max', inside_type=jdf_array) patterns = ContainerList() #.tag(width='max', inside_type=jdf_pattern) jdis = ContainerList() def do_plot(self, a=None): if a == None: a = self.arrays[0] for s in a.assigns: for p in s.assign_type: #generate verts if p[0] == "P": verts = [ t.name for t in self.patterns if t.num == int(p[2]) ][0] elif p[0] == "A": pass #do_plot array given by num for o in s.pos_assign: x_ref = a.x_start + (int(o[0]) - 1) * a.x_step y_ref = a.y_start + (int(o[1]) - 1) * a.x_step print x_ref, y_ref #offset vertices by x_ref, y_ref def _observe_text(self, change): self.jdf_parse(self.text) self.output_jdf = self.jdf_produce() @property def view_window(self): with imports(): from e_Show import JDFView return JDFView(jdf=self) def clear_JDF(self): self.comments = [] self.arrays = [] self.patterns = [] self.jdis = [] def jdf_parse(self, jdf_data): jdf_list = jdf_data.split("\n") inside_path = False inside_layer = False self.clear_JDF() array_num = 0 for n, line in enumerate(jdf_list): tempstr, comment = parse_comment(line) if tempstr == "" and comment != "": self.comments.append(comment) if tempstr.startswith('GLMPOS'): self.Px, self.Py, self.Qx, self.Qy = xy_string_split( tempstr) #get P and Q mark positions elif tempstr.startswith('JOB'): mgn_name, self.wafer_diameter, self.write_diameter = tempstr.split( ",") #magazine name and wafer size self.mgn_name = mgn_name.split("'")[1].strip() elif tempstr.startswith("PATH"): inside_path = True elif "LAYER" in tempstr: inside_layer = True if inside_path: if 'ARRAY' in tempstr: if ":" in tempstr: array_num = tempstr.split(":")[0] #for subarrays x_start, x_num, x_step, y_start, y_num, y_step = xy_string_split( tempstr) self.arrays.append( JDF_Array(array_num=array_num, x_start=x_start, x_num=x_num, x_step=x_step, y_start=y_start, y_num=y_num, y_step=y_step)) else: x_start, x_num, x_step, y_start, y_num, y_step = xy_string_split( tempstr) self.arrays.append( JDF_Main_Array(x_start=x_start, x_num=x_num, x_step=x_step, y_start=y_start, y_num=y_num, y_step=y_step)) elif 'ASSIGN' in tempstr: self.arrays[-1].add_assign(tempstr, comment) elif 'CHMPOS' in tempstr: M1x, M1y = tuple_split(tempstr) self.arrays[-1].M1x = M1x self.arrays[-1].M1y = M1y elif "PEND" in tempstr: inside_path = False elif inside_layer: if 'END' in tempstr: inside_layer = False elif 'STDCUR' in tempstr: stdcur = tempstr.split("STDCUR")[1] self.stdcur = stdcur elif 'SHOT' in tempstr: shot = tempstr.split(',')[1] self.shot = shot elif 'RESIST' in tempstr: resist = tempstr.split('RESIST')[1] self.resist = resist elif 'P(' in tempstr: pattern_name = tempstr.split("'")[1].split(".")[0] pattern_num = tempstr.split("(")[1].split(")")[0] pattern_x = tempstr.split("(")[2].split(")")[0].split( ",")[0] pattern_y = tempstr.split("(")[2].split(")")[0].split( ",")[0] self.patterns.append( JDF_Pattern(num=pattern_num, x=pattern_x, y=pattern_y, name=pattern_name)) elif tempstr.startswith('@'): jdi_str = tempstr.split("'")[1].split(".jdi")[0] self.jdis.append(jdi_str) def jdf_produce(self): jl = [] #jdf_data.split("\n") jl.append("JOB/W '{name}', {waf_diam}, {write_diam}\n".format( name=self.mgn_name, waf_diam=self.wafer_diameter, write_diam=self.write_diameter)) jl.append(";{comment}\n".format(comment=self.comments[0])) jl.append("GLMPOS P=({Px}, {Py}), Q=({Qx},{Qy})".format(Px=self.Px, Py=self.Py, Qx=self.Qx, Qy=self.Qy)) jl.append("PATH") for n, item in enumerate(self.arrays): if item.array_num == 0: jl.append( "ARRAY ({x_start}, {x_num}, {x_step})/({y_start}, {y_num}, {y_step})" .format(x_start=self.arrays[0].x_start, x_num=self.arrays[0].x_num, x_step=self.arrays[0].x_step, y_start=self.arrays[0].y_start, y_num=self.arrays[0].y_num, y_step=self.arrays[0].y_step)) jl.append("\tCHMPOS M1=({M1x}, {M1y})".format( M1x=self.arrays[0].M1x, M1y=self.arrays[0].M1y)) else: jl.append( "{arr_num}: ARRAY ({x_start}, {x_num}, {x_step})/({y_start}, {y_num}, {y_step})" .format(arr_num=item.array_num, x_start=item.x_start, x_num=item.x_num, x_step=item.x_step, y_start=item.y_start, y_num=item.y_num, y_step=item.y_step)) for item in item.assigns: asgn_type = "+".join(item.assign_type) pos_asgn = "" for pos in item.pos_assign: pos_asgn += "({x},{y}),".format(x=pos[0], y=pos[1]) pos_asgn = pos_asgn[:-1] if item.shot_assign == "": shot_asgn = "" else: shot_asgn = ", {sa}".format(sa=item.shot_assign) if item.assign_comment == "": asgn_comment = "" else: asgn_comment = ";{ac}".format(ac=item.assign_comment) jl.append( "\tASSIGN {asgn_type} -> ({pos_asgn}{shot_asgn}) {asgn_comment}" .format(asgn_type=asgn_type, pos_asgn=pos_asgn, shot_asgn=shot_asgn, asgn_comment=asgn_comment)) jl.append("AEND\n") jl.append("PEND\n\nLAYER 1") for n, item in enumerate(self.patterns): jl.append("P({pnum}) '{pname}.v30' ({px},{py})".format( pnum=item.num, pname=item.name, px=item.x, py=item.y)) jl.append("\nSTDCUR {0}".format(self.stdcur)) jl.append("SHOT A,{0}".format(self.shot)) jl.append("RESIST {}\n".format(self.resist)) for item in self.jdis: jl.append("@ '{jdi_name}.jdi'".format(jdi_name=item)) jl.append("\nEND 1") return "\n".join(jl)
class Plotter(SubAgent): base_name = "plot" plt_colors = mycolors #['auto', 'blue', 'red', 'green', 'purple', 'black', 'darkgray', 'cyan', 'magenta', 'orange'] title = Unicode() alldata = Array() xdist = Float().tag(read_only=True) ydist = Float().tag(read_only=True) xcoord = Float() ycoord = Float() xind = Int() yind = Int() show_cs = Bool(False) drawline = Bool(False) xstart = Float() ystart = Float() @tag_Property() def total_dist(self): return sqrt(self.xdist**2 + self.ydist**2) @observe("xdist", "ydist") def xydist_calc(self, change): if change["type"] == "update": self.get_member("total_dist").reset(self) def activated(self): pass #self.fig.canvas.mpl_connect('motion_notify_event', mpl_drag_event(self)) #self.fig.canvas.mpl_connect('motion_notify_event', mpl_click_event(self, arange(200), arange(200))) #button_press_event #self.fig.canvas.mpl_connect('button_press_event', mpl_click_event(self)) #self.fig.canvas.mpl_connect('scroll_event', mpl_scroll_event(self)) xyfs = Typed(OrderedDict) def _default_xyfs(self): xyfs = OrderedDict() xyfs["All"] = AllXYFormat(plotter=self, name="All") return xyfs @private_property def xyfs_keys(self): return self.xyfs.keys() @private_property def xyfs_items(self): return self.xyfs.values() fig = Typed(Figure).tag(private=True) axe = Typed(Axes).tag(private=True) plot_type_list = [ "Line plot", "Scatter plot", "Colormap", "Polygon", "Text" ] @private_property def plot_type_map(self): return { "Line plot": self.line_plot, "Scatter plot": self.scatter_plot, "Colormap": self.colormap, "Polygon": self.poly_plot, "Text": self.add_text } def set_data(self, zname=None, zdata=None, zcolor=None, plot_type="poly"): if zdata != None: if plot_type is "poly": if zname not in self.clts: #plottables['plotted']:#self.pd.list_data(): clt = PolyCollection( [], alpha=0.5, antialiased=True ) #, rasterized=False, antialiased=False) if zcolor is not None: clt.set_color(colorConverter.to_rgba(zcolor)) self.clts[zname] = clt self.axe.add_collection(self.clts[zname]) self.clts[zname].set_verts(zdata) elif plot_type is "line": if zname not in self.clts: clt = LineCollection( zdata) #, linewidths=(0.5, 1, 1.5, 2), #linestyles='solid', colors=("red", "blue", "green")) if zcolor is not None: clt.set_color(zcolor) else: clt.set_array(arange(len(zdata))) else: self.clts[zname].set_verts(zdata) #self.set_xlim(x.min(), x.max()) #self.set_ylim(ys.min(), ys.max()) elif plot_type is "scatter": self.axe.scatter(zdata, zdata) elif plot_type is "colormap": self.axe.pcolormesh(x, y, z) if 0: x = arange(3) ys = array([x + i for i in arange(5)]) #xdata=arange(len(getattr(self, zname))) data = [list(zip(x, y)) for y in ys] line_segments = LineCollection(data, linewidths=1, linestyles='solid', colors=mycolors) print data print len(data) #print line_segments.properties() #print line_segments.set_hatch("O") #print dir(self.axe) print[p.vertices for p in line_segments.get_paths()] #) print line_segments.get_segments() line_segments.set_array(arange(len(data))) x = arange(3) ys = array([x + i for i in arange(2)]) #xdata=arange(len(getattr(self, zname))) data = [list(zip(x, y)) for y in ys] line_segments.set_verts(data) #self.axe.add_collection(line_segments, autolim=True) clt = self.axe.scatter(x, x) #clt.set_linestyle("solid") print dir(clt) print clt.get_paths() if 0: #clt=QuadMesh(0, 0, [1]) n = 12 x = linspace(-1.5, 1.5, n) y = linspace(-1.5, 1.5, n * 2) X, Y = meshgrid(x, y) print X Qx = cos(Y) - cos(X) Qz = sin(Y) + sin(X) Qx = (Qx + 1.1) Z = sqrt(X**2 + Y**2) / 5 Z = (Z - Z.min()) / (Z.max() - Z.min()) Zm = ma.masked_where(fabs(Qz) < 0.5 * amax(Qz), Z) #ax = fig.add_subplot(121) #self.axe.set_axis_bgcolor("#bdb76b") clt = self.axe.pcolormesh(Z) #print dir(clt) self.axe.set_title('Without masked values') #ax = fig.add_subplot(122) #ax.set_axis_bgcolor("#bdb76b") # You can control the color of the masked region: #cmap = cm.jet #cmap.set_bad('r', 1.0) #ax.pcolormesh(Qx,Qz,Zm, cmap=cmap) # Or use the default, which is transparent: #col = self.axe.pcolormesh(Qx, Qz, Zm, shading='gouraud') #ax.set_title('With masked values') # def get_data(self, zname, index=None, axis=0): # data=[c.to_polygons() for c in self.clt.get_paths()] # if index==None: # return data # if axis==0: # return atleast_2d(data)[index, :] # return atleast_2d(data)[:, index] # # def add_img_plot(self, zname, zdata, xname=None, xdata=None, yname=None, ydata=None): # self.add_data(zname=zname, zdata=zdata, xname=xname, xdata=xdata, yname=yname, ydata=ydata, overwrite=True, concat=False) # print self.pd.get_data(zname) # xyf=XYFormat(plotter=self) # xyf.draw_img_plot(name='img_plot', zname=zname, xname=xname, yname=yname) # self.xyfs.update(**{xyf.name: xyf}) # self.overall_plot_type="img plot" # # def add_line_plot(self, name, zname, zdata, xname='', xdata=None): # #self.add_data(zname=zname, zdata=zdata, xname=xname, xdata=xdata, overwrite=True) # self.set_data(zname, zdata) # self.set_data(xname, xdata) # xyf=XYFormat(plotter=self) # zdata=self.get_data(zname) # #if 1: #zdata.ndim>1: # # for i, arr in enumerate(self.splitMultiD(zdata, 0)): # # self.add_line_plot(name+str(i), zname+str(i), squeeze(arr), xname, xdata ) # #else: # #self.set_data(zname, zdata) # #if xname!=None and xdata!=None: # # self.set_data(xname, xdata, coord='x') # xyf.draw_plot(name=name, zname=zname, xname=xname) # self.xyfs.update(**{xyf.name: xyf}) # self.overall_plot_type="XY plot" # # def splitMultiD(self, arr, axis=0): # if arr.ndim<2: # return atleast_2d(arr) # else: # return split(arr, arr.shape[axis], axis=axis) # # def gatherMultiD(self, name, arrs, appen=None, concat=True, overwrite=False): # if not isinstance(arrs, tuple): # arrs=(arrs,) # if appen==None: # if shape(arrs)==(1,): # appen=True # else: # appen=False # orig=self.get_data(name) # if orig!=None and not overwrite: # arrs=(orig,)+arrs # if appen: # axis=1 # else: # axis=0 # print arrs[0]==atleast_2d(*arrs) # #if ndim(arrs[0])>1: # # concat=False # # if concat: # data=concatenate(atleast_2d(*arrs), axis) # self.set_data(name, data) # # def add_data(self, zname, zdata, xname=None, xdata=None, yname=None, ydata=None, appen=None, concat=True, overwrite=False): # if xname!=None: # self.gatherMultiD(xname, xdata, appen=appen, overwrite=overwrite, concat=concat) # if yname!=None: # self.gatherMultiD(yname, ydata, appen=appen, overwrite=overwrite, concat=concat) # self.gatherMultiD(zname, zdata, appen=appen, overwrite=overwrite, concat=concat) @private_property def view_window(self): with imports(): from taref.core.plotter_e import PlotMain return PlotMain(plotr=self)
class IniConfigTask(AbstractConfigTask): """This class handle the """ template_path = Unicode() template_doc = Str() template_content = Str() def __init__(self, *args, **kwargs): super(IniConfigTask, self).__init__(*args, **kwargs) doc_list = ConfigObj(self.template_path).initial_comment doc = '' for line in doc_list: doc += line.replace('#', '') self.template_doc = doc @observe('task_name') def check_parameters(self): if self.task_name is not '': self.config_ready = True else: self.config_ready = False def build_task(self): """ """ config = ConfigObj(self.template_path) #Handle the case of an attempt to make a root task of a task which is #not a ComplexTask. built_task will be returned but task will be the #object used for the following manipulations. if self.task_class == RootTask and\ config['task_class'] != 'ComplexTask': built_task = RootTask() task = getattr(tasks, config['task_class'])(task_name=config['task_name']) built_task.children.append(task) else: task = self.task_class(task_name=self.task_name) built_task = task parameters = self._prepare_parameters(config) task.update_members_from_preferences(**parameters) return built_task @classmethod def build_task_from_config(cls, config): """ """ built_task = RootTask(task_name='Root') parameters = cls._prepare_parameters(config) built_task.update_members_from_preferences(**parameters) return built_task @classmethod def _build_child(cls, section): """ """ task = getattr(tasks, section['task_class'])(task_name=section['task_name']) parameters = cls._prepare_parameters(section) task.update_members_from_preferences(**parameters) return task @classmethod def _prepare_parameters(cls, section): """ Parameters: section : instance of Section Section describing the parameters which must be sent to the task Return: parameters : dict Dictionnary holding the parameters to be passed to a task """ #First getting the non-task traits as string parameters = {} if section.scalars: for entry in section.scalars: if entry != 'task_class' and entry != 'task_name': parameters[entry] = section[entry] #Second creating all the neccessary children if section.sections: for entry in section.sections: key = entry if any(i in entry for i in '0123456789'): key = ''.join(c for c in entry if not c.isdigit()) if key.endswith('_'): key = key[:-1] if parameters.has_key(key): parameters[key].append(cls._build_child(section[entry])) else: parameters[key] = [cls._build_child(section[entry])] return parameters
class QtNodeItem(QtGraphicsItem, ProxyNodeItem): """ A Qt implementation of an Enaml NodeItem. """ id = Unicode() width = Int(180) height = Int(240) position = Typed(Point2D) edge_size = Float(10.0) title_height = Float(24.0) padding = Float(4.0) font_title = Typed(QtGui.QFont) color_default = Typed(QtGui.QColor) color_selected = Typed(QtGui.QColor) color_title = Typed(QtGui.QColor) color_title_background = Typed(QtGui.QColor) color_background = Typed(QtGui.QColor) show_content_inline = Bool(False) content = Instance(QtNodeContent) #: A reference to the widget created by the proxy. widget = Typed(QNodeItem) #-------------------------------------------------------------------------- # Initialization API #-------------------------------------------------------------------------- def create_widget(self): """ Create the QComboBox widget. """ item = QNodeItem(self) self.widget = item def init_widget(self): """ Create and initialize the underlying widget. """ super(QtNodeItem, self).init_widget() d = self.declaration self.set_id(d.id) self.set_width(d.width) self.set_height(d.height) self.set_position(d.position) self.set_edge_size(d.edge_size) self.set_title_height(d.title_height) self.set_padding(d.padding) self.set_show_content_inline(d.show_content_inline) self.set_content(d.content) self.set_name(d.name) self.set_color_default(d.color_default) self.set_color_selected(d.color_selected) self.set_color_title(d.color_title) self.set_color_title_background(d.color_title_background) self.set_color_background(d.color_background) self.widget.setFlag(QtWidgets.QGraphicsItem.ItemIsSelectable) self.widget.setFlag(QtWidgets.QGraphicsItem.ItemIsMovable) self.widget.setFlag(QtWidgets.QGraphicsItem.ItemSendsGeometryChanges) def activate_bottom_up(self): """ Activate the proxy tree for the bottom-up pass. """ super(QtNodeItem, self).activate_bottom_up() if self.content is not None and self.content.is_active: self.setup_content() #-------------------------------------------------------------------------- # observers #-------------------------------------------------------------------------- @observe('padding', 'width') def _update_elements(self, change): if change['name'] == 'padding': self.widget.title_item.setPos(self.padding, 0) self.widget.title_item.setTextWidth(self.width - 2 * self.padding) def _observe_position(self, change): self.declaration.position = change['value'] #-------------------------------------------------------------------------- # Signal Handlers #-------------------------------------------------------------------------- def on_context_menu(self, event): """ The signal handler for the 'context_menu' signal. """ self.declaration.context_menu_event() #-------------------------------------------------------------------------- # Private API #-------------------------------------------------------------------------- def on_paint(self, painter, style_option, widget=None): lod = style_option.levelOfDetailFromTransform(painter.worldTransform()) # title path_title = QtGui.QPainterPath() path_title.setFillRule(QtCore.Qt.WindingFill) path_title.addRoundedRect(0, 0, self.width, self.title_height, self.edge_size, self.edge_size) path_title.addRect(0, self.title_height - self.edge_size, self.edge_size, self.edge_size) path_title.addRect(self.width - self.edge_size, self.title_height - self.edge_size, self.edge_size, self.edge_size) painter.setPen(QtCore.Qt.NoPen) painter.setBrush(self.color_title_background) painter.drawPath(path_title.simplified()) # content path_content = QtGui.QPainterPath() path_content.setFillRule(QtCore.Qt.WindingFill) path_content.addRoundedRect(0, self.title_height, self.width, self.height - self.title_height, self.edge_size, self.edge_size) path_content.addRect(0, self.title_height, self.edge_size, self.edge_size) path_content.addRect(self.width - self.edge_size, self.title_height, self.edge_size, self.edge_size) painter.setPen(QtCore.Qt.NoPen) painter.setBrush(self.color_background) painter.drawPath(path_content.simplified()) # outline path_outline = QtGui.QPainterPath() path_outline.addRoundedRect(0, 0, self.width, self.height, self.edge_size, self.edge_size) painter.setPen(self.color_default if not self.widget.isSelected() else self.color_selected) painter.setBrush(QtCore.Qt.NoBrush) painter.drawPath(path_outline.simplified()) def setup_content(self): self.widget.content_item = self.content.widget #-------------------------------------------------------------------------- # ProxyNodeItem API #-------------------------------------------------------------------------- def set_id(self, id): self.id = id def set_name(self, name): self.widget.title_item.setPlainText(name) def set_width(self, width): self.width = width def set_height(self, height): self.height = height def set_position(self, position): self.position = position self.widget.setPos(QtCore.QPointF(position.x, position.y)) def set_edge_size(self, edge_size): self.edge_size = edge_size def set_title_height(self, title_height): self.title_height = title_height def set_padding(self, padding): self.padding = padding def set_color_default(self, color_default): self.color_default = get_cached_qcolor(color_default) def set_color_selected(self, color_selected): self.color_selected = get_cached_qcolor(color_selected) def set_color_title(self, color_title): self.widget.title_item.setDefaultTextColor( get_cached_qcolor(color_title)) def set_color_title_background(self, color_title_background): self.color_title_background = get_cached_qcolor(color_title_background) def set_color_background(self, color_background): self.color_background = get_cached_qcolor(color_background) def set_font_title(self, font): if font is not None: self.font_title = get_cached_qfont(font) else: self.font_title = QtGui.QFont("Ubuntu", 10) self.widget.title_item.setFont(self.font_title) def set_show_content_inline(self, show): self.show_content_inline = show def set_content(self, content): if isinstance(content, NodeContent) and self.show_content_inline: self.content = content.proxy if self.content.is_active: self.setup_content()
class AgilentNetworkAnalyzer(COM_Instrument): base_name = "E8354B" @private_property def S_names(self): return ('S11', 'S21', 'S12', 'S22') @private_property def main_params(self): return [ "doS11", "doS21", "doS12", "doS22", "trigger_mode", "VNA_abort", "start_freq", "stop_freq", "points", "averaging", "averages", 'timeout', "power", 'clear_average', 'acquire_data', 'error_query' ] #::inst0::INSTR" #enable SICL in system settings #"TCPIP::129.16.115.134::5025::SOCKET" address = Unicode("TCPIP::129.16.115.134").tag(sub=True, no_spacer=True) simulate = Bool(False).tag(sub=True) VNA = Value().tag(private=True, desc="a link to the session of the instrument.") ch1 = Value().tag(private=True, desc="link to main instrument channel") measS11 = Value().tag(private=True, desc="link to measurement S11") measS12 = Value().tag(private=True, desc="link to measurement S12") measS21 = Value().tag(private=True, desc="link to measurement S21") measS22 = Value().tag(private=True, desc="link to measurements S22") trace_plot = Typed(Plotter).tag(private=True) def update_trace_plot(self): self.trace_plot.plot_dict["trace_mag S21"].clt.set_xdata(self.freq) S21dB = absolute(self.S21) #20.0*log10(absolute(self.S21)) print shape(self.freq) print shape(S21dB) if self.simulate: S21dB = absolute(self.S21) self.trace_plot.plot_dict["trace_mag S21"].clt.set_ydata(S21dB) if min(self.freq) != max(self.freq): self.trace_plot.set_xlim(min(self.freq), max(self.freq)) if min(S21dB) != max(S21dB): self.trace_plot.set_ylim(min(S21dB), max(S21dB)) self.trace_plot.draw() def _default_trace_plot(self): tp = Plotter(name=self.name + ' trace plot') print self.freq print absolute(self.S21) print shape(self.freq) print shape(self.S21) tp.line_plot('trace_mag S21', self.freq, absolute(self.S21)) #S20.0*log10(absolute(self.S21))) return tp doS11 = Bool(False) doS21 = Bool(False) doS12 = Bool(False) doS22 = Bool(False) do_freq = Bool(False) timeout = Int(10000) #clear_average=Bool(True).tag(sub=True) @observe("doS11", "doS21", "doS21", "doS22") def observe_doSs(self, change): log_debug(change) if change['type'] == 'update': Sname = change["name"][2:] if change.get("oldvalue", False): log_debug('del old meas') log_debug(getattr(self, 'meas' + Sname).Delete()) self.error_query() elif change["value"]: ReceiverPort = int(Sname[1]) SourcePort = int(Sname[2]) log_debug(ReceiverPort, SourcePort) if Sname not in self.query_measurements().values(): self.writer("CALC:PAR:DEF:EXT MEAS{0},{0}".format(Sname)) log_debug( getattr(self, 'meas' + Sname).Create(ReceiverPort, SourcePort)) self.error_query() print self.query_measurements() sleep(1) getattr(self, 'meas' + Sname).Format = 0 #self.error_query() def query_measurements(self): sAll = self.asker("CALC:PAR:CAT:EXT?")[1:-1] if self.simulate: sAll = 'NO CATALOG' if sAll == 'NO CATALOG': return {} t = sAll.split(",") return {t[i]: t[i + 1] for i in range(0, len(t), 2)} @reader def reader(self): """calls VNA ReadString""" return self.VNA.System2.ReadString() @writer def writer(self, VNA_string): """calls VNA WriteString using string formatting by kwargs""" self.VNA.System2.WriteString(VNA_string) @asker def asker(self, VNA_string): """calls VNA WriteString followed by VNA ReadString""" self.writer(VNA_string) return self.reader() @tag_Callable(do=True) def VNA_abort(self): self.VNA.Channels.Abort() self.writer("CALC:PAR:DEL:ALL") self.VNA.Status.Clear() #self.ch1.TriggerMode=TriggerModeDict['Hold'] @booter def booter(self, address): self.VNA = CreateObject("AgilentNA.AgilentNA") init_list = [ 'Simulate={0}'.format(self.simulate), #'QueryInstrStatus=true' ] init_str = ','.join(init_list) print init_str log_debug(self.VNA.Initialize(self.address, False, False, init_str)) self.ch1 = self.VNA.Channels["Channel1"] if not self.simulate: print self.VNA.System2.IO.IO.LockRsrc() if get_tag(self, 'abort', 'do', False): self.VNA_abort() #self.VNA_write("CALC:PAR:DEL:ALL") self.error_query() #log_debug(self.VNA.System2.WaitForOperationComplete(self.timeout)) #self.error_query() self.measS11 = self.ch1.Measurements["Measurement1"] self.measS21 = self.ch1.Measurements["Measurement2"] self.measS12 = self.ch1.Measurements["Measurement3"] self.measS22 = self.ch1.Measurements["Measurement4"] if self.simulate: self.stop_freq = 4.0e9 #sleep(1) #self.measS11.Create(1, 1) #self.error_query() #sleep(1) #self.measS11.Delete() #self.synchronize() self.error_query() def synchronize(self): self.receive('points') @tag_Callable() def error_query(self): for n in range(11): err_query = self.VNA.Utility.ErrorQuery() log_debug(err_query, n=3) if err_query[0] == 0: break def clear_all_traces(self): self.VNA.System2.WriteString("CALC:PAR:DEL:ALL") # def close_measurement(self, key): # try: # self.meas_dict[key].Delete() # except COMError as e: # log_debug(e) # # def close_all_measurements(self): # for key in self.meas_dict: # self.close_measurement(key) @closer def closer(self): for key in self.S_names: if getattr(self, 'do' + key): getattr(self, 'meas' + key).Delete() #self.VNA_abort() if not self.simulate: print self.VNA.System2.IO.IO.UnlockRsrc() log_debug(self.VNA.Close()) for n in self.loop(10): if self.VNA.Release() == 0: break #VNA.Channels["Channel1"].StimulusRange.Span #VNA.Channels["Channel1"].StimulusRange.Center #VNA.System2.WriteString(":OUTP 0") @tag_Callable() def clear_average(self): self.ch1.ClearAverage() # def acq2(self): # log_debug('acq2 started') # self.ch1.TriggerMode=1 # self.ch1.ClearAverage() # for n in range(self.averages): # self.ch1.TriggerSweep(1000) # self.VNA.System2.WaitForOperationComplete(10000) # log_debug('acq2 stopped') # # def acq(self): # log_debug('acq started') # self.VNA_write("SENSE:SWE:GRO:COUN {}".format(self.averages)) # # self.ch1.ClearAverage() # self.VNA.System2.WriteString("SENSE:SWE:MODE GROUPS") # getattr(self, 'meas'+'S21').Trace.AutoScale() # try: # log_debug(self.VNA.System2.WaitForOperationComplete(30000)) # #print self.error_query() # except Exception as e: # raise Exception(str(e)) # log_debug('acq stopped') @tag_Callable() def acquire_data(self): self.send(trigger_mode='Hold') #if get_tag(self, "clear_average", "do"): self.clear_average() if self.averaging: numTriggers = self.averages else: numTriggers = 1 for n in self.loop(numTriggers): self.ch1.TriggerSweep(1000) self.VNA.System2.WaitForOperationComplete(self.timeout) if n == 9: for key in self.S_names: if getattr(self, "do" + key): getattr(self, 'meas' + key).Trace.AutoScale() for key in self.S_names: if getattr(self, "do" + key): data = array(getattr(self, 'meas' + key).FetchComplex()) setattr(self, key, data[0] + 1.0j * data[1]) #log_debug(getattr(self, key)) if self.do_freq: self.freq = getattr(self, 'meas' + key).FetchX() if not self.do_freq: self.freq = linspace(self.start_freq, self.stop_freq, self.points) self.update_trace_plot() #print list(frq)==list(self.freq) start_freq = Float(4.0e9).tag(high=50.0e9, low=10.0e6, label='VNA start frequency', unit2='GHz', aka="self.ch1.StimulusRange.Start", show_value=True) stop_freq = Float(5.0e9).tag(low=10.0e6, high=50.0e9, label='VNA stop frequency', unit2='GHz', aka="self.ch1.StimulusRange.Stop", show_value=True) points = Int(1601).tag(low=1, high=20001, aka="self.ch1.Points") averages = Int(1).tag(low=1, high=50000, aka="self.ch1.AveragingFactor") averaging = Bool(True).tag(aka="self.ch1.Averaging") power = Float(-27.0).tag(low=-27.0, high=0.0, display_unit='dBm', aka="self.ch1.SourcePower.Level[1]") #electrical_delay = Float(0).tag(label='VNA electrical delay', unit = 's', # GPIB_writes=":CALCulate1:CORRection:EDELay:TIME {electrical_delay}") #subtract_background = Bool(False) #measurement_type = Enum('S11', 'S12', 'S21', 'S22') #start = Button() #adjust_electrical_delay = Button() #acquire_background = Button() freq = Array().tag(label='Frequency', sub=True) S11 = Array().tag(sub=True) S12 = Array().tag(sub=True) S21 = Array().tag(sub=True) S22 = Array().tag(sub=True) trigger_mode = Enum('Continuous', 'Hold').tag(mapping=TriggerModeDict, aka="self.ch1.TriggerMode")
class Employer(Person): """ An employer is a person who runs a company. """ # The name of the company company_name = Unicode()
class InkcutPlugin(Plugin): #: Project site wiki_page = Unicode("https://www.codelv.com/projects/inkcut") #: For anything that needs to update every second clock = Instance(Clock, ()) #: Dock items to add dock_items = List(DockItem) dock_layout = Instance(AreaLayout) dock_style = Enum(*reversed(ALL_STYLES)).tag(config=True) #: Settings pages to add settings_pages = List(extensions.SettingsPage) #: Current settings page settings_page = Instance(extensions.SettingsPage) #: Internal settings models settings_typemap = Dict() settings_model = Instance(Atom) def start(self): """ Load all plugins, refresh the dock area and then restore state from the disk (if any). """ self.set_app_name() self.set_window_icon() if sys.platform == 'darwin': self.fix_menubar() self.load_plugins() self._refresh_dock_items() self._refresh_settings_pages() super(InkcutPlugin, self).start() def load_plugins(self): """ Load all the plugins Inkcut is dependent on """ w = self.workbench plugins = [] with enaml.imports(): #: TODO autodiscover these from inkcut.preview.manifest import PreviewManifest from inkcut.job.manifest import JobManifest from inkcut.device.manifest import DeviceManifest from inkcut.joystick.manifest import JoystickManifest from inkcut.console.manifest import ConsoleManifest from inkcut.monitor.manifest import MonitorManifest plugins.append(PreviewManifest) plugins.append(JobManifest) plugins.append(DeviceManifest) plugins.append(JoystickManifest) plugins.append(ConsoleManifest) plugins.append(MonitorManifest) #: Load any plugins defined as extension points for entry_point in pkg_resources.iter_entry_points( group='inkcut.plugin', name=None): plugins.append(entry_point.load()) #: Install all of them for Manifest in plugins: w.register(Manifest()) def _bind_observers(self): """ Setup the observers for the plugin. """ super(InkcutPlugin, self)._bind_observers() workbench = self.workbench point = workbench.get_extension_point(extensions.DOCK_ITEM_POINT) point.observe('extensions', self._refresh_dock_items) point = workbench.get_extension_point(extensions.SETTINGS_PAGE_POINT) point.observe('extensions', self._refresh_settings_pages) def _unbind_observers(self): """ Remove the observers for the plugin. """ super(InkcutPlugin, self)._unbind_observers() workbench = self.workbench point = workbench.get_extension_point(extensions.DOCK_ITEM_POINT) point.unobserve('extensions', self._refresh_dock_items) point = workbench.get_extension_point(extensions.SETTINGS_PAGE_POINT) point.unobserve('extensions', self._refresh_settings_pages) # ------------------------------------------------------------------------- # Dock item extension API # ------------------------------------------------------------------------- def create_new_area(self): """ Create the dock area """ with enaml.imports(): from .dock import DockView area = DockView(workbench=self.workbench, plugin=self) return area def _refresh_dock_items(self, change=None): """ Reload all DockItems registered by any Plugins Any plugin can add to this list by providing a DockItem extension in their PluginManifest. """ workbench = self.workbench point = workbench.get_extension_point(extensions.DOCK_ITEM_POINT) #: Layout spec layout = {'main': [], 'left': [], 'right': [], 'bottom': [], 'top': []} dock_items = [] for extension in sorted(point.extensions, key=lambda ext: ext.rank): for declaration in extension.get_children(extensions.DockItem): # Load the plugin plugin_id = declaration.plugin_id log.info("Loading plugin {}".format(plugin_id)) plugin = workbench.get_plugin(plugin_id) # Check if it's known dependencies are met if not plugin.is_supported(): log.warning( "Plugin {} reported unsupported".format(plugin_id)) continue # Create the item DockItem = declaration.factory() item = DockItem(plugin=plugin, closable=False) # Add to our layout layout[declaration.layout].append(item.name) # Save it dock_items.append(item) #: Update items log.debug("Updating dock items: {}".format(dock_items)) self.dock_items = dock_items self._refresh_layout(layout) def _refresh_layout(self, layout): """ Create the layout for all the plugins """ items = layout.pop('main') main = HSplitLayout(*items) if len(items) > 1 else items[0] dockbars = [ DockBarLayout(*items, position=side) for side, items in layout.items() if items ] #: Update layout self.dock_layout = AreaLayout(main, dock_bars=dockbars) # ------------------------------------------------------------------------- # Settings page extension API # ------------------------------------------------------------------------- def _default_settings_page(self): return self.settings_pages[0] def _observe_settings_page(self, change): log.debug("Settings page: {}".format(change)) def _refresh_settings_pages(self, change=None): """ Reload all SettingsPages registered by any Plugins Any plugin can add to this list by providing a SettingsPage extension in their PluginManifest. """ workbench = self.workbench point = workbench.get_extension_point(extensions.SETTINGS_PAGE_POINT) settings_pages = [] typemap = {} for extension in sorted(point.extensions, key=lambda ext: ext.rank): for d in extension.get_children(extensions.SettingsPage): #: Save it settings_pages.append(d) #: Update the type map plugin = self.workbench.get_plugin(d.plugin_id) t = type(getattr(plugin, d.model) if d.model else plugin) typemap[t] = d.factory() #: Update items log.debug("Updating settings pages: {}".format(settings_pages)) self.settings_typemap = typemap self.settings_pages = sorted(settings_pages, key=lambda p: p.name) # ------------------------------------------------------------------------- # Utility methods # ------------------------------------------------------------------------- def set_app_name(self): """ Set the application name """ ui = self.workbench.get_plugin('enaml.workbench.ui') try: qt_app = ui._application._qapp qt_app.setApplicationName('inkcut') if hasattr(qt_app, 'setApplicationDisplayName'): qt_app.setApplicationDisplayName('Inkcut') except Exception as e: log.error('Failed to set app name: {}'.format(e)) def set_window_icon(self): """ Set the main application window icon """ ui = self.workbench.get_plugin('enaml.workbench.ui') try: icon = get_cached_qicon(load_icon('logo')) ui.window.proxy.widget.setWindowIcon(icon) except Exception as e: log.error('Failed to set window icon: {}'.format(e)) def fix_menubar(self): """ Disable native menu on OSX """ ui = self.workbench.get_plugin('enaml.workbench.ui') for c in ui.window.children: if isinstance(c, MenuBar): c.proxy.widget.setNativeMenuBar(False) break
class SequenceConfig(Declarator): """Declarator used to contribute a Sequence configurator """ #: path of the config object. Paths should be dot separed and the class #: name preceded by ':'. #: The path to any parent GroupDeclarator will be prepended to it. config = d_(Unicode()) #: Path to the view object associated to the sequence. #: The path of any parent GroupDeclarator will be prepended. view = d_(Unicode()) #: Id of the sequence computed from the top-level package and the sequence #: name id = Property(cached=True) @d_func def get_sequence_class(self): """Return the base sequence class this config is used for. """ raise NotImplementedError() def register(self, collector, traceback): """Collect config and view and add infos to the DeclaratorCollector contributions member under the supported task name. """ # Determine the path to the config and view. path = self.get_path() try: c_path, config = (path + '.' + self.config if path else self.config).split(':') v_path, view = (path + '.' + self.view if path else self.view).split(':') except ValueError: msg = 'Incorrect %s (%s), path must be of the form a.b.c:Class' if ':' in self.config: msg = msg % ('view', self.view) else: msg = msg % ('config', self.config) traceback[self.id] = msg return try: s_cls = self.get_sequence_class() except Exception: msg = 'Failed to get supported sequence : %s' traceback[self.id] = msg % format_exc() return # Check that the configurer does not already exist. if self.id in traceback: i = 1 while True: err_id = '%s_duplicate%d' % (self.id, i) if err_id not in traceback: break msg = 'Duplicate definition of {}, found in {}' traceback[err_id] = msg.format(s_cls, c_path) return if s_cls in collector.contributions: msg = 'Duplicate definition for {}, found in {}' traceback[self.id] = msg.format(s_cls, c_path) return infos = ConfigInfos() # Get the config class. c_cls = import_and_get(c_path, config, traceback, self.id) if c_cls is None: return try: infos.cls = c_cls except TypeError: msg = '{} should a subclass of AbstractConfig.\n{}' traceback[self.id] = msg.format(c_cls, format_exc()) return # Get the config view. view = import_and_get(v_path, view, traceback, self.id) if view is None: return try: infos.view = view except TypeError: msg = '{} should a subclass of AbstractConfigView.\n{}' traceback[self.id] = msg.format(c_cls, format_exc()) return collector.contributions[s_cls] = infos self.is_registered = True def unregister(self, collector): """Remove contributed infos from the collector. """ if self.is_registered: try: del collector.contributions[self.get_sequence_class()] except KeyError: pass self.is_registered = False def __str__(self): """Nice string representation giving attributes values. """ msg = cleandoc('''{} with: config: {}, view : {}''') return msg.format(type(self).__name__, self.config, self.view) def _get_id(self): """Create the unique identifier of the config using the top level package and the class name. """ if ':' in self.config: path = self.get_path() c_path, config = (path + '.' + self.config if path else self.config).split(':') # Build the task id by assembling the package name and the class # name return c_path.split('.', 1)[0] + '.' + config else: return self.config
class Lyzer(TA45_Fund): rd_hdf = Typed(TA45_Read) comment = Unicode().tag(read_only=True, spec="multiline") rt_atten = Float(40) rt_gain = Float(23 * 2) frequency = Array().tag(unit="GHz", plot=True, label="Frequency") yoko = Array().tag(unit="V", plot=True, label="Yoko") Magcom = Array().tag(private=True) pwr = Array() probe_frq = Float().tag(unit="GHz", label="Probe frequency", read_only=True) probe_pwr = Float().tag(label="Probe power", read_only=True, display_unit="dBm/mW") frqind = Int(1) @tag_Property(display_unit="dB", plot=True) def MagdB(self): return self.Magcom[self.frqind, :, :] / dB @tag_Property(plot=True) def Phase(self): return angle( self.Magcom[self.frqind, :, :] ) #-mean(self.Magcom[:, self.powind, 297:303], axis=1, keepdims=True)) @tag_Property(plot=True) def MagAbs(self): #return absolute(self.Magcom[:, :]) #bg=mean(self.Magcom[self.frqind, :, 0:1], axis=1, keepdims=True) bg = mean(self.Magcom[self.frqind, :, 299:300], axis=1, keepdims=True) return absolute( self.Magcom[self.frqind, :, :] - bg ) #mean(self.Magcom[self.frqind, :, 299:300], axis=1, keepdims=True)) def _default_rd_hdf(self): return TA45_Read(main_file="Data_0227/S4A4_TA45_testpwrswp.hdf5") def read_data(self): with File(self.rd_hdf.file_path, 'r') as f: print f["Traces"].keys() self.comment = f.attrs["comment"] print f["Instrument config"].keys() self.probe_frq = f["Instrument config"][ 'Rohde&Schwarz Network Analyzer - IP: 169.254.107.192, RS VNA at localhost'].attrs[ "Start frequency"] self.probe_pwr = f["Instrument config"][ 'Rohde&Schwarz Network Analyzer - IP: 169.254.107.192, RS VNA at localhost'].attrs[ "Output power"] print f["Instrument config"][ 'Rohde&Schwarz Network Analyzer - IP: 169.254.107.192, RS VNA at localhost'].attrs # print f["Data"]["Channel names"][:] Magvec = f["Traces"]["RS VNA - S21"] #[:] data = f["Data"]["Data"] print shape(data) # self.yoko = data[0, 1, :].astype(float64) self.pwr = data[:, 0, 0].astype(float64) print self.yoko fstart = f["Traces"]['RS VNA - S21_t0dt'][0][0] fstep = f["Traces"]['RS VNA - S21_t0dt'][0][1] print shape(Magvec) sm = shape(Magvec)[0] sy = shape(data) s = (sm, sy[0], sy[2]) print s Magcom = Magvec[:, 0, :] + 1j * Magvec[:, 1, :] Magcom = reshape(Magcom, s, order="F") self.frequency = linspace(fstart, fstart + fstep * (sm - 1), sm) print shape(Magcom) self.Magcom = squeeze(Magcom)
class Shape(Declarator): """Declarator used to contribute a Shape """ #: path of the shape object. Paths should be dot separed and the class #: name preceded by ':'. #: The path to any parent GroupDeclarator will be prepended to it. shape = d_(Unicode()) #: Path to the view object associated to the shape. #: The path of any parent GroupDeclarator will be prepended. view = d_(Unicode()) #: Metadata associated to the shape. metadata = d_(Dict()) #: Id of the shape computed from the top-level package and the shape #: name id = Property(cached=True) def register(self, collector, traceback): """Collect shape and view and add infos to the DeclaratorCollector contributions member. The group declared by a parent if any is taken into account. All Interface children are also registered. """ # Build the shape id by assembling the package name and the class # name. shape_id = self.id # If the shape only specifies a name update the matching infos. if ':' not in self.shape: if self.shape not in collector.contributions: collector._delayed.append(self) return infos = collector.contributions[shape_id] infos.metadata.update(self.metadata) self.is_registered = True return # Determine the path of shape and view path = self.get_path() try: s_path, shape = (path + '.' + self.shape if path else self.shape).split(':') v_path, view = (path + '.' + self.view if path else self.view).split(':') except ValueError: msg = 'Incorrect %s (%s), path must be of the form a.b.c:Class' err_id = s_path.split('.', 1)[0] + '.' + shape msg = msg % ('view', self.view) traceback[err_id] = msg return # Check that the shape does not already exist. if shape_id in collector.contributions or shape_id in traceback: i = 1 while True: err_id = '%s_duplicate%d' % (shape_id, i) if err_id not in traceback: break msg = 'Duplicate definition of {}, found in {}' traceback[err_id] = msg.format(shape, s_path) return infos = ShapeInfos(metadata=self.metadata) # Get the sequence class. s_cls = import_and_get(s_path, shape, traceback, shape_id) if s_cls is None: return try: infos.cls = s_cls except TypeError: msg = '{} should be a subclass of AbstractShape. \n{}' traceback[shape_id] = msg.format(s_cls, format_exc()) return # Get the shape view. s_view = import_and_get(v_path, view, traceback, shape_id) if s_view is None: return try: infos.view = s_view except TypeError: msg = '{} should be a subclass of AbstractShapeView,.\n{}' traceback[shape_id] = msg.format(s_view, format_exc()) return # Add group and add to collector infos.metadata['group'] = self.get_group() collector.contributions[shape_id] = infos self.is_registered = True def unregister(self, collector): """Remove contributed infos from the collector. """ if self.is_registered: # If we were just extending the shape, clean the metadata. if ':' not in self.shape: if self.id in collector.contributions: infos = collector.contributions[self.id] for k in self.metadata: del infos.metadata[k] return # Remove infos. try: del collector.contributions[self.id] except KeyError: pass self.is_registered = False def __str__(self): """Nice string representation giving attributes values. """ msg = cleandoc('''{} with: shape: {}, view : {} and metadata: {} declaring : {}''') return msg.format(type(self).__name__, self.shape, self.view, self.metadata, '\n'.join(' - {}'.format(c) for c in self.children)) def _get_id(self): """Create the unique identifier of the sequence using the top level package and the class name. """ if ':' in self.shape: path = self.get_path() s_path, shape = (path + '.' + self.shape if path else self.shape).split(':') # Build the shape id by assembling the package name and the # class name return s_path.split('.', 1)[0] + '.' + shape else: return self.shape
class RootTask(ComplexTask): """Special task which is always the root of a measurement. On this class and this class only perform can and should be called directly. """ #: Path to which log infos, preferences, etc should be written by default. default_path = Unicode('').tag(pref=True) #: Should the execution be profiled. should_profile = Bool().tag(pref=True) #: Dict storing data needed at execution time (ex: drivers classes) run_time = Dict() #: Inter-process event signaling the task it should stop execution. should_stop = Typed(Event) #: Inter-process event signaling the task it should pause execution. should_pause = Typed(Event) #: Inter-process event signaling the task is paused. paused = Typed(Event) #: Inter-process event signaling the main thread is done, handling the #: measurement resuming, and hence notifying the task execution has #: resumed. resumed = Typed(Event) #: Dictionary used to store errors occuring during performing. errors = Dict() #: Dictionary used to store references to resources that may need to be #: shared between task and which must be released when all tasks have been #: performed. #: Each key is associated to a different kind of resource. Resources must #: be stored in SharedDict subclass. #: By default three kind of resources exists: #: #: - threads : used threads grouped by pool. #: - active_threads : currently active threads. #: - instrs : used instruments referenced by profiles. #: - files : currently opened files by path. #: resources = Dict() #: Counter keeping track of the active threads. active_threads_counter = Typed(SharedCounter, kwargs={'count': 1}) #: Counter keeping track of the paused threads. paused_threads_counter = Typed(SharedCounter, ()) #: Thread from which the perform method has been called. thread_id = Int() # Those must not be modified so freeze them name = Constant('Root') depth = Constant(0) path = Constant('root') database_entries = set_default({'default_path': ''}) # HINT: RootTask instance tracking code # __slots__ = ('__weakref__',) def __init__(self, *args, **kwargs): self.preferences = ConfigObj(indent_type=' ', encoding='utf-8') self.database = TaskDatabase() super(RootTask, self).__init__(*args, **kwargs) self.register_in_database() self.root = self self.parent = self self.active_threads_counter.observe('count', self._state) self.paused_threads_counter.observe('count', self._state) # HINT: RootTask instance tracking code # ROOTS.add(self) # print(len(ROOTS)) def check(self, *args, **kwargs): """Check that the default path is a valid directory. """ traceback = {} test = True if not os.path.isdir(self.default_path): test = False traceback[self.path + '/' + self.name] =\ 'The provided default path is not a valid directory' self.write_in_database('default_path', self.default_path) check = super(RootTask, self).check(*args, **kwargs) test = test and check[0] traceback.update(check[1]) return test, traceback @smooth_crash def perform(self): """Run sequentially all child tasks, and close ressources. """ result = True self.thread_id = threading.current_thread().ident self.prepare() pr = Profile() if self.should_profile else None try: if pr: pr.enable() for child in self.children: child.perform_() except Exception: log = logging.getLogger(__name__) msg = 'The following unhandled exception occured :\n' log.exception(msg) self.should_stop.set() result = False self.errors['unhandled'] = msg + format_exc() finally: if pr: pr.disable() meas_name = self.get_from_database('meas_name') meas_id = self.get_from_database('meas_id') path = os.path.join(self.default_path, meas_name + '_' + meas_id + '.prof') pr.dump_stats(path) self.release_resources() if self.should_stop.is_set(): result = False return result def prepare(self): """Optimise the database for running state and prepare children. """ # We cannot assume that the checks were run (in the case of a # forced-enqueueing) so we need to make sure we set the default path. self.write_in_database('default_path', self.default_path) self.database.prepare_to_run() super().prepare() def release_resources(self): """Release all the resources used by tasks. """ # Release by priority to be sure that their is no-conflict # (Threads vs instruments for example) for resource in sorted(self.resources.values(), key=attrgetter('priority')): resource.release() def register_in_database(self): """Don't create a node for the root task. """ BaseTask.register_in_database(self) # ComplexTask defines children so we always get something for child in self.gather_children(): child.register_in_database() @classmethod def build_from_config(cls, config, dependencies): """Create a new instance using the provided infos for initialisation. Parameters ---------- config : dict(str) Dictionary holding the new values to give to the members in string format, or dictionnary like for instance with prefs. dependencies : dict Dictionary holding the necessary classes needed when rebuilding. This is assembled by the TaskManager. Returns ------- task : RootTask Newly created and initiliazed task. Notes ----- This method is fairly powerful and can handle a lot of cases so don't override it without checking that it works. """ task = super(RootTask, cls).build_from_config(config, dependencies) task._post_setattr_root(None, task) task.register_in_database() task.register_preferences() return task def get_used_names(self): """Return the list of all names used in the tree Returns ------- names : List(str) List of all the names used in the tree. """ names = [] for i in self.traverse(): # Effectively ignores TaskInterface objects if hasattr(i, 'name'): names.append(i.name) return names # ========================================================================= # --- Private API --------------------------------------------------------- # ========================================================================= def _default_task_id(self): pack, _ = self.__module__.split('.', 1) return pack + '.' + ComplexTask.__name__ def _child_path(self): """Overriden here to not add the task name. """ return self.path def _task_entry(self, entry): """Do not prepend the name of the root task. """ return entry def _state(self, change): """Determine whether the task is paused or not. This is done by checking the number of active and paused thread and setting accordingly the paused event. """ p_count = self.paused_threads_counter.count a_count = self.active_threads_counter.count if a_count == p_count: self.paused.set() if p_count == 0: self.paused.clear() def _default_resources(self): """Default resources. """ return {'threads': ThreadPoolResource(), # Reduce priority to stop through the thread resource. # This is far less likely to cause a deadlock. 'active_threads': ThreadPoolResource(priority=0), 'instrs': InstrsResource(), 'files': FilesResource()}
class Lyzer(TA88_Fund): rd_hdf = Typed(TA88_Read) comment = Unicode().tag(read_only=True, spec="multiline") rt_atten = Float(40) rt_gain = Float(23 * 2) frequency = Array().tag(unit="GHz", plot=True, label="Frequency") yoko = Array().tag(unit="V", plot=True, label="Yoko") Magcom = Array().tag(private=True) probe_frq = Float().tag(unit="GHz", label="Probe frequency", read_only=True) probe_pwr = Float().tag(label="Probe power", read_only=True, display_unit="dBm/mW") pind = Int() @tag_Property(display_unit="dB", plot=True) def MagdB(self): return self.Magcom[:, :] / dB - mean( self.Magcom[:, 169:171], axis=1, keepdims=True) / dB @tag_Property(plot=True) def Phase(self): return angle(self.Magcom[:, :] - mean(self.Magcom[:, 169:170], axis=1, keepdims=True)) @tag_Property(plot=True) def MagAbs(self): #return absolute(self.Magcom[:, :]) return absolute( self.Magcom[:, :] )**2 #-mean(self.Magcom[:, 0:1], axis=1, keepdims=True)) def _default_rd_hdf(self): return TA88_Read( main_file="Data_0311/S1A1_TA88_coupling_search_midpeak.hdf5" ) #"Data_0312/S4A1_TA88_coupling_search.hdf5") def read_data(self): with File(self.rd_hdf.file_path, 'r') as f: print f["Traces"].keys() self.comment = f.attrs["comment"] print f["Instrument config"].keys() self.probe_frq = f["Instrument config"][ 'Rohde&Schwarz Network Analyzer - IP: 169.254.107.192, RS VNA at localhost'].attrs[ "Start frequency"] self.probe_pwr = f["Instrument config"][ 'Rohde&Schwarz Network Analyzer - IP: 169.254.107.192, RS VNA at localhost'].attrs[ "Output power"] print f["Instrument config"][ 'Rohde&Schwarz Network Analyzer - IP: 169.254.107.192, RS VNA at localhost'].attrs # print f["Data"]["Channel names"][:] Magvec = f["Traces"]["RS VNA - S21"] #[:] data = f["Data"]["Data"] print shape(data) # self.yoko = data[:, 0, 0].astype(float64) fstart = f["Traces"]['RS VNA - S21_t0dt'][0][0] fstep = f["Traces"]['RS VNA - S21_t0dt'][0][1] print shape(Magvec) sm = shape(Magvec)[0] sy = shape(data) s = (sm, sy[0], 1) #sy[2]) print s Magcom = Magvec[:, 0, :] + 1j * Magvec[:, 1, :] Magcom = reshape(Magcom, s, order="F") self.frequency = linspace(fstart, fstart + fstep * (sm - 1), sm) print shape(Magcom) self.Magcom = squeeze(Magcom) with File( "/Users/thomasaref/Dropbox/Current stuff/Logbook/TA210715A88_cooldown210216/Data_0308/S1A4_TA88_coupling_search_midpeak.hdf5", "r") as f: Magvec = f["Traces"]["RS VNA - S21"] #[:] data = f["Data"]["Data"] yoko = data[:, 0, 0].astype(float64) fstart = f["Traces"]['RS VNA - S21_t0dt'][0][0] fstep = f["Traces"]['RS VNA - S21_t0dt'][0][1] sm = shape(Magvec)[0] sy = shape(data) s = (sm, sy[0], 1) #sy[2]) Magcom = Magvec[:, 0, :] + 1j * Magvec[:, 1, :] Magcom = reshape(Magcom, s, order="F") frequency = linspace(fstart, fstart + fstep * (sm - 1), sm) Magcom = squeeze(Magcom) return frequency, yoko, Magcom with File( "/Users/thomasaref/Dropbox/Current stuff/Logbook/TA210715A45_cooldown270216/Data_0227/S4A4_TA88_wideSC1116unswitched.hdf5", "r") as f: Magvec = f["Traces"]["RS VNA - S21"] fstart = f["Traces"]['RS VNA - S21_t0dt'][0][0] fstep = f["Traces"]['RS VNA - S21_t0dt'][0][1] sm = shape(Magvec)[0] s = (sm, 1, 1) Magcom = Magvec[:, 0, :] + 1j * Magvec[:, 1, :] Magcom = reshape(Magcom, s, order="F") frequency = linspace(fstart, fstart + fstep * (sm - 1), sm) Magcom = squeeze(Magcom) return frequency, Magcom