def read_RecordData(self, attr): try: macro_data = self.door.get_macro_data() codec = CodecFactory().getCodec('bz2_pickle') data = codec.encode(('', macro_data)) except MacroServerException, mse: throw_sardana_exception(mse)
def push_event(self, *args, **kwargs): '''callback method receiving the event''' event_data = args[0] if event_data.err: self._state_buffer = event_data.errors self._tango_macro_executor._done_event.set() # make sure we get it as string since PyTango 7.1.4 returns a buffer # object and json.loads doesn't support buffer objects (only str) attr_value = event_data.attr_value v = map(str, attr_value.value) if not len(v[1]): return fmt = v[0] codec = CodecFactory().getCodec(fmt) # make sure we get it as string since PyTango 7.1.4 returns a buffer # object and json.loads doesn't support buffer objects (only str) v[1] = str(v[1]) fmt, data = codec.decode(v) for macro_status in data: state = macro_status['state'] self._tango_macro_executor._exception = macro_status.get( 'exc_type') if state in self.START_STATES: #print 'TangoStatusCb.push_event: setting _started_event' self._tango_macro_executor._started_event.set() elif state in self.DONE_STATES: # print 'TangoStatusCb.push_event: setting _done_event %s' # %(state) self._tango_macro_executor._done_event.set() #else: # print 'State %s' %(state) self._tango_macro_executor._state_buffer.append(state)
def filterData(self, data): '''reimplementation to decode data using the DevEncoded codecs''' if type(data) == tuple: from taurus.core.util.codecs import CodecFactory codec = CodecFactory().getCodec(data[0]) try: fmt, decoded_data = codec.decode(data) except Exception, e: self.info('Decoder error: %s', e.message) raise e try: dtype = decoded_data.dtype v = decoded_data except: # note that this is potentially expensive v = numpy.array(decoded_data) dtype = v.dtype if dtype not in (float, numpy.double, numpy.int32, numpy.uint16, numpy.int16, numpy.uint8, numpy.int8, bool): # note: numpy.uint32 was not included because of # https://sourceforge.net/p/tauruslib/tickets/33/ try: self.debug('casting to numpy.int32') v = numpy.int32(v) except OverflowError: raise OverflowError( "type %s not supported by guiqwt and cannot be casted to int32" % repr(v.dtype)) return v
def push_event(self, *args, **kwargs): '''callback method receiving the event''' event_data = args[0] if event_data.err: self._state_buffer = event_data.errors self._tango_macro_executor._done_event.set() attr_value = getattr(event_data, 'attr_value') if attr_value is None: return v = attr_value.value if not len(v[1]): return fmt = v[0] codec = CodecFactory().getCodec(fmt) fmt, data = codec.decode(v) for macro_status in data: state = macro_status['state'] exc_stack = macro_status.get('exc_stack') if state == 'exception': exception_str = '' for line in exc_stack: exception_str += line self._tango_macro_executor._exception = exception_str if state in self.START_STATES: # print 'TangoStatusCb.push_event: setting _started_event' self._tango_macro_executor._started_event.set() elif state in self.DONE_STATES: # print 'TangoStatusCb.push_event: setting _done_event %s' # %(state) self._tango_macro_executor._done_event.set() # else: # print 'State %s' %(state) self._tango_macro_executor._state_buffer.append(state)
def _on_measurement_group_changed(self, event_source, event_type, event_value): # during server startup and shutdown avoid processing element # creation events if SardanaServer.server_state != State.Running: return timestamp = time.time() name = event_type.name name = name.replace('_', '') multi_attr = self.get_device_attr() attr = multi_attr.get_attr_by_name(name) quality = AttrQuality.ATTR_VALID priority = event_type.priority error = None if name == "state": event_value = self.calculate_tango_state(event_value) elif name == "status": event_value = self.calculate_tango_status(event_value) elif name == "acquisitionmode": event_value = AcqMode.whatis(event_value) elif name == "configuration": cfg = self.measurement_group.get_user_configuration() codec = CodecFactory().getCodec('json') _, event_value = codec.encode(('', cfg)) else: if isinstance(event_value, SardanaAttribute): if event_value.error: error = Except.to_dev_failed(*event_value.exc_info) timestamp = event_value.timestamp event_value = event_value.value self.set_attribute(attr, value=event_value, timestamp=timestamp, quality=quality, priority=priority, error=error, synch=False)
def macroStatusReceived(self, s, t, v): if v is None or self._running_macros is None: return if t not in CHANGE_EVT_TYPES: return # make sure we get it as string since PyTango 7.1.4 returns a buffer # object and json.loads doesn't support buffer objects (only str) v = map(str, v.value) if not len(v[1]): return format = v[0] codec = CodecFactory().getCodec(format) # make sure we get it as string since PyTango 7.1.4 returns a buffer # object and json.loads doesn't support buffer objects (only str) v[1] = str(v[1]) fmt, data = codec.decode(v) for macro_status in data: id = macro_status.get('id') macro = self._running_macros.get(id) self._last_running_macro = self._running_macro = macro # if we don't have the ID it's because the macro is running a submacro # or another client is connected to the same door (shame on him!) and # executing a macro we discard this event if macro is not None: macro.__dict__.update(macro_status) return data
def on_door_changed(self, event_source, event_type, event_value): # during server startup and shutdown avoid processing element # creation events if SardanaServer.server_state != State.Running: return timestamp = time.time() name = event_type.name.lower() multi_attr = self.get_device_attr() try: attr = multi_attr.get_attr_by_name(name) except DevFailed: return if name == "state": event_value = self.calculate_tango_state(event_value) elif name == "status": event_value = self.calculate_tango_status(event_value) elif name == "recorddata": format, value = event_value codec = CodecFactory().getCodec(format) event_value = codec.encode(('', value)) else: if isinstance(event_value, SardanaAttribute): if event_value.error: error = Except.to_dev_failed(*event_value.exc_info) timestamp = event_value.timestamp event_value = event_value.value if attr.get_data_type() == ArgType.DevEncoded: codec = CodecFactory().getCodec('json') event_value = codec.encode(('', event_value)) self.set_attribute(attr, value=event_value, timestamp=timestamp)
def __init__(self, dclass, name): """Constructor""" PoolElementDevice.__init__(self, dclass, name) codec_name = getattr(sardanacustomsettings, "VALUE_BUFFER_CODEC") self._value_buffer_codec = CodecFactory().getCodec(codec_name) codec_name = getattr(sardanacustomsettings, "VALUE_REF_BUFFER_CODEC") self._value_ref_buffer_codec = CodecFactory().getCodec(codec_name)
def on_macro_server_changed(self, evt_src, evt_type, evt_value): # during server startup and shutdown avoid processing element # creation events if SardanaServer.server_state != State.Running: return evt_name = evt_type.name.lower() multi_attr = self.get_device_attr() elems_attr = multi_attr.get_attr_by_name("Elements") if evt_name == "poolelementschanged": # force the element list cache to be rebuild next time someone reads # the element list self.ElementsCache = None self.set_attribute(elems_attr, value=evt_value.value) #self.push_change_event('Elements', *evt_value.value) elif evt_name in ("elementcreated", "elementdeleted"): # force the element list cache to be rebuild next time someone reads # the element list self.ElementsCache = None elem = evt_value value = {} if "created" in evt_name: key = 'new' else: key = 'del' json_elem = elem.serialize(pool=self.pool.full_name) value[key] = json_elem, value = CodecFactory().getCodec('json').encode(('', value)) self.set_attribute(elems_attr, value=value) #self.push_change_event('Elements', *value) elif evt_name == "elementschanged": # force the element list cache to be rebuild next time someone reads # the element list self.ElementsCache = None ms_name = self.macro_server.full_name new_values, changed_values, deleted_values = [], [], [] for elem in evt_value['new']: json_elem = elem.serialize(macro_server=ms_name) new_values.append(json_elem) for elem in evt_value['change']: json_elem = elem.serialize(macro_server=ms_name) changed_values.append(json_elem) for elem in evt_value['del']: json_elem = elem.serialize(macro_server=ms_name) deleted_values.append(json_elem) value = {"new": new_values, "change": changed_values, "del": deleted_values} value = CodecFactory().getCodec('json').encode(('', value)) self.set_attribute(elems_attr, value=value) #self.push_change_event('Elements', *value) elif evt_name == "environmentchanged": self.EnvironmentCache = None env_attr = multi_attr.get_attr_by_name("Environment") value = CodecFactory().getCodec('pickle').encode(('', evt_value)) self.set_attribute(env_attr, value=value)
def handle(self, func_name, *args, **kwargs): codec = CodecFactory().getCodec(self.format) data = dict(type='function', func_name=func_name, args=args, kwargs=kwargs) event_value = codec.encode(('', data)) self.door.set_attribute(self.attr, value=event_value)
def __init__(self, pseudo_obj, macro): self.pseudo = pseudo_obj self.macro = macro _physical_motor_name = self.pseudo.physical_elements[0] self.motor = macro.getMoveable(_physical_motor_name) cf = CodecFactory() self.json = cf.getCodec('json') conf = self.get_configuration() self.update(conf)
def set(self, conf, mnt_grps=None): """Sets the ExperimentConfiguration dictionary.""" if mnt_grps is None: mnt_grps = list(conf['MntGrpConfigs'].keys()) codec = CodecFactory().getCodec('json') msg_error = '' for mnt_grp in mnt_grps: try: mnt_grp_cfg = conf['MntGrpConfigs'][mnt_grp] if mnt_grp_cfg is None: # a mntGrp to be deleted pool = self._getPoolOfElement(mnt_grp) pool.DeleteElement(mnt_grp) else: try: # TODO: Fix incorrect implementation. It must check if # the measurement group is part of the Pools # controlled by the MacroServer. Otherwise, # it must raise an exception. mnt_grp_dev = Device(mnt_grp) except Exception: # if the mnt_grp did not already exist, create it now chconfigs = getChannelConfigs(mnt_grp_cfg) chnames, chinfos = list(zip(*chconfigs)) # unzipping # We assume that all the channels belong to the same # pool! pool = self._getPoolOfElement(chnames[0]) pool.createMeasurementGroup([mnt_grp] + list(chnames)) mnt_grp_dev = Device(mnt_grp) # TODO when we start using measurement group extension # change the code below with the following: # mnt_grp.setConfiguration(mnt_grp_cfg) data = codec.encode(('', mnt_grp_cfg))[1] mnt_grp_dev.write_attribute('configuration', data) except PyTango.DevFailed as df: # Take the description of the first exception. desc = df.args[0].desc desc = desc.replace('\r', '') desc = desc.replace('\n', '') msg_error += 'Measurement Group {0}:\n'\ '{1}\n\n'.format(mnt_grp, desc) if len(msg_error) > 0: raise RuntimeError(msg_error) # Send the environment changes env = dict(ScanDir=conf.get('ScanDir'), ScanFile=conf.get('ScanFile'), DataCompressionRank=conf.get('DataCompressionRank', -1), ActiveMntGrp=conf.get('ActiveMntGrp'), PreScanSnapshot=conf.get('PreScanSnapshot')) self._door.putEnvironments(env)
def _processRecordData(self, data): if data is None or data.rvalue is None: return data = data.rvalue size = len(data[1]) if size == 0: return format = data[0] codec = CodecFactory().getCodec(format) data = codec.decode(data) return data
def _processRecordData(self, data): if data is None or data.value is None: return # make sure we get it as string since PyTango 7.1.4 returns a buffer # object and json.loads doesn't support buffer objects (only str) data = map(str, data.value) size = len(data[1]) if size == 0: return format = data[0] codec = CodecFactory().getCodec(format) data = codec.decode(data) return data
def macrodata(self, parameter_s=''): """macrodata Returns the data produced by the last macro""" door = get_door() macro_data = door.read_attribute("RecordData") from taurus.core.util.codecs import CodecFactory factory = CodecFactory() data = factory.decode(macro_data.value) return data
def read_RecordData(self, attr): try: macro_data = self.door.get_macro_data() codec = CodecFactory().getCodec('bz2_pickle') data = codec.encode(('', macro_data)) except MacroServerException as mse: throw_sardana_exception(mse) attr.set_value(*data) # workaround for a bug in PyTango (tango-controls/pytango#147), # i.e. temporary solution for issue #447 # (storing reference to data so it can not be destroyed by GC) self.__buf_data = data
def dec(self, cname=None, data=None, expected=None): '''Check that data can be encoded-decoded properly''' cf = CodecFactory() codec = cf.getCodec(cname) fmt, dec = codec.decode((cname, data)) if expected is not None: msg = ('Wrong data after decoding with %s:\n' + ' -expected:%s\n -obtained:%s') % (cname, expected, dec) if numpy.isscalar(expected): equal = dec == expected else: equal = numpy.all(dec == expected) self.assertTrue(equal, msg) return fmt, dec
def getData(self): '''Returns the data object for the last executed macro :return: (obj) ''' data = self._door.RecordData return CodecFactory().decode(data)
def inputReceived(self, s, t, v): if t not in CHANGE_EVT_TYPES: return if v is None or self._running_macros is None: return input_data = CodecFactory().decode(('json', v.value)) self.processInput(input_data)
def write_Synchronization(self, attr): data = attr.get_write_value() synchronization = CodecFactory().decode(('json', data), ensure_ascii=True) # translate dictionary keys synchronization = self._synchronization_str2enum(synchronization) self.measurement_group.synchronization = synchronization
def write_SynchDescription(self, attr): data = attr.get_write_value() synch_description = CodecFactory().decode(('json', data)) # translate dictionary keys synch_description = \ self._synch_description_str2enum(synch_description) self.measurement_group.synch_description = synch_description
def start(user_ns=None): # Make sure the log level is changed to warning CodecFactory() taurus.setLogLevel(taurus.Warning) try: check_requirements() except exception.SpockMissingRequirement as requirement: print(str(requirement)) sys.exit(-1) except exception.SpockMissingRecommended as recommended: print(str(recommended)) user_ns = user_ns or {} try: user_ns.update(get_args(sys.argv)) except exception.SpockException as e: print(e) print('Starting normal IPython console') except KeyboardInterrupt: print("\nUser pressed Ctrl+C. Exiting...") sys.exit() except Exception as e: print('spock exited with an unmanaged exception: %s' % str(e)) sys.exit(-2) app = TerminalIPythonApp.instance() app.initialize() #config = get_config() return app
def _on_environment_changed(self, evt_src, evt_type, evt_value): ret = added, removed, changed = set(), set(), set() if evt_type not in CHANGE_EVT_TYPES: return ret env = CodecFactory().decode(evt_value.value) for key, value in env.get('new', {}).items(): self._addEnvironment(key, value) added.add(key) for key in env.get('del', []): self._removeEnvironment(key) removed.add(key) for key, value in env.get('change', {}).items(): self._removeEnvironment(key) self._addEnvironment(key, value) changed.add(key) return ret
def write_Configuration(self, attr): data = attr.get_write_value() cfg = CodecFactory().decode(('json', data)) util = Util.instance() if util.is_svr_starting(): self.measurement_group._config._value_ref_compat = True else: self.measurement_group._config._value_ref_compat = False self.measurement_group.set_configuration_from_user(cfg)
def getEnvironment(self, cache=True): value = self.EnvironmentCache if cache and value is not None: return value env = self.macro_server.get_env() value = dict(new=env) value = CodecFactory().getCodec('pickle').encode(('', value)) self.EnvironmentCache = value return value
def getElements(self, cache=True): value = self.ElementsCache if cache and value is not None: return value elements = self.macro_server.get_elements_info() value = dict(new=elements) value = CodecFactory().getCodec('json').encode(('', value)) self.ElementsCache = value return value
def get(self, cache=False): door = self._door macro_server = door.macro_server env = door.getEnvironment() ret = dict(ScanDir=env.get('ScanDir'), DataCompressionRank=env.get('DataCompressionRank', 1), PreScanSnapshot=env.get('PreScanSnapshot', [])) scan_file = env.get('ScanFile') if scan_file is None: scan_file = [] elif isinstance(scan_file, (str, unicode)): scan_file = [scan_file] ret['ScanFile'] = scan_file mnt_grps = macro_server.getElementsOfType("MeasurementGroup") mnt_grps_names = [mnt_grp.name for mnt_grp in mnt_grps.values()] mnt_grps_full_names = mnt_grps.keys() active_mnt_grp = env.get('ActiveMntGrp') if active_mnt_grp is None and len(mnt_grps): active_mnt_grp = mnt_grps_names[0] door.putEnvironment('ActiveMntGrp', active_mnt_grp) ret['ActiveMntGrp'] = active_mnt_grp ret['MntGrpConfigs'] = mnt_grp_configs = CaselessDict() if len(mnt_grps) == 0: return ret mnt_grp_grps = PyTango.Group("grp") # use full names cause we may be using a different Tango database mnt_grp_grps.add(mnt_grps_full_names) codec = CodecFactory().getCodec('json') replies = mnt_grp_grps.read_attribute("configuration") for mnt_grp, reply in zip(mnt_grps_names, replies): try: mnt_grp_configs[mnt_grp] = \ codec.decode(('json', reply.get_data().value), ensure_ascii=True)[1] except Exception, e: from taurus.core.util.log import warning warning('Cannot load Measurement group "%s": %s', repr(mnt_grp), repr(e))
def _processInput(self, input_data): input_type = input_data['type'] if input_type == 'input': result = self._input_handler.input(input_data) if result['input'] is '' and 'default_value' in input_data: result['input'] = input_data['default_value'] result = CodecFactory().encode('json', ('', result))[1] self.write_attribute('Input', result) elif input_type == 'timeout': self._input_handler.input_timeout(input_data)
def start(user_ns=None): # Make sure the log level is changed to warning CodecFactory() setLogLevel(Warning) try: check_requirements() except exception.SpockMissingRequirement, requirement: print str(requirement) sys.exit(-1)
def GetMacroInfo(self, macro_names): """GetMacroInfo(list<string> macro_names): Returns a list of string containing macro information. Each string is a JSON encoded. Params: - macro_name: a list of strings with the macro(s) name(s) Returns: - a list of string containing macro information. """ macro_server = self.macro_server codec = CodecFactory().getCodec("json") ret = [] for _, macro in macro_server.get_macros().items(): if macro.name in macro_names: ret.append(codec.encode(("", macro.serialize()))[1]) return ret
def GetMacroInfo(self, macro_names): """GetMacroInfo(list<string> macro_names): Returns a list of string containing macro information. Each string is a JSON encoded. Params: - macro_name: a list of strings with the macro(s) name(s) Returns: - a list of string containing macro information. """ macro_server = self.macro_server codec = CodecFactory().getCodec('json') ret = [] for _, macro in macro_server.get_macros().items(): if macro.name in macro_names: ret.append(codec.encode(('', macro.serialize()))[1]) return ret
def set(self, conf, mnt_grps=None): """Sets the ExperimentConfiguration dictionary.""" env = dict(ScanDir=conf.get('ScanDir'), ScanFile=conf.get('ScanFile'), DataCompressionRank=conf.get('DataCompressionRank', -1), ActiveMntGrp=conf.get('ActiveMntGrp'), PreScanSnapshot=conf.get('PreScanSnapshot')) if mnt_grps is None: mnt_grps = conf['MntGrpConfigs'].keys() self._door.putEnvironments(env) codec = CodecFactory().getCodec('json') for mnt_grp in mnt_grps: try: mnt_grp_cfg = conf['MntGrpConfigs'][mnt_grp] if mnt_grp_cfg is None: # a mntGrp to be deleted pool = self._getPoolOfElement(mnt_grp) pool.DeleteElement(mnt_grp) else: try: mnt_grp_dev = Device(mnt_grp) except: # if the mnt_grp did not already exist, create it now chconfigs = getChannelConfigs(mnt_grp_cfg) chnames, chinfos = zip(*chconfigs) # unzipping # We assume that all the channels belong to the same # pool! pool = self._getPoolOfElement(chnames[0]) pool.createMeasurementGroup([mnt_grp] + list(chnames)) mnt_grp_dev = Device(mnt_grp) # TODO when we start using measurement group extension change the # code below with the following: # mnt_grp.setConfiguration(mnt_grp_cfg) data = codec.encode(('', mnt_grp_cfg))[1] mnt_grp_dev.write_attribute('configuration', data) except Exception, e: from taurus.core.util.log import error error( 'Could not create/delete/modify Measurement group "%s": %s', mnt_grp, repr(e))
def _on_elements_changed(self, evt_src, evt_type, evt_value): ret = added, removed, changed = set(), set(), set() if evt_type not in CHANGE_EVT_TYPES: return ret try: elems = CodecFactory().decode(evt_value.value, ensure_ascii=True) except: self.error("Could not decode element info format=%s len=%s", evt_value.value[0], len(evt_value.value[1])) return ret for element_data in elems.get('new', ()): element_data['manager'] = self element = self._addElement(element_data) added.add(element) for element_data in elems.get('del', ()): element = self._removeElement(element_data) removed.add(element) for element_data in elems.get('change', ()): element = self._removeElement(element_data) element_data['manager'] = self element = self._addElement(element_data) changed.add(element) return ret
class PoolExpChannelDevice(PoolElementDevice): def __init__(self, dclass, name): """Constructor""" PoolElementDevice.__init__(self, dclass, name) self._codec = CodecFactory().getCodec('json') def _encode_value_chunk(self, value_chunk): """Prepare value chunk to be passed via communication channel. :param value_chunk: value chunk :type value_chunk: seq<SardanaValue> :return: json string representing value chunk :rtype: str""" data = [] index = [] for idx, sdn_value in value_chunk.iteritems(): index.append(idx) value = sdn_value.value # TODO: Improve it in the future # In case of big arrays e.g. 10k points and higher there are more # optimal solutions but they require complex changes on encoding # and decoding side. if isinstance(value, np.ndarray): value = value.tolist() data.append(value) data = dict(data=data, index=index) _, encoded_data = self._codec.encode(('', data)) return encoded_data def initialize_dynamic_attributes(self): attrs = PoolElementDevice.initialize_dynamic_attributes(self) non_detect_evts = "integrationtime", for attr_name in non_detect_evts: if attr_name in attrs: self.set_change_event(attr_name, True, False) return attrs def read_Data(self, attr): desc = "Data attribute is not foreseen for reading. It is used only "\ "as the communication channel for the continuous acquisitions." Except.throw_exception("UnsupportedFeature", desc, "PoolExpChannelDevice.read_Data", ErrSeverity.WARN) def read_IntegrationTime(self, attr): """Reads the integration time. :param attr: tango attribute :type attr: :class:`~PyTango.Attribute`""" attr.set_value(self.element.integration_time) def write_IntegrationTime(self, attr): """Sets the integration time. :param attr: tango attribute :type attr: :class:`~PyTango.Attribute`""" self.element.integration_time = attr.get_write_value()
def removeEnvironments(self, keys): obj = { 'del' : keys } codec = CodecFactory().getCodec('pickle') self.write_attribute('Environment', codec.encode(('', obj)))
def putEnvironments(self, obj): obj = dict(new=obj) codec = CodecFactory().getCodec('pickle') self.write_attribute('Environment', codec.encode(('', obj)))
def _fromJSON(self, json_str): json_codec = CodecFactory().getCodec('json') format, data = json_codec.decode(('json', json_str)) return data
def __init__(self, dclass, name): """Constructor""" PoolElementDevice.__init__(self, dclass, name) self._codec = CodecFactory().getCodec('json')
def read_Configuration(self, attr): cfg = self.measurement_group.get_user_configuration() codec = CodecFactory().getCodec('json') data = codec.encode(('', cfg)) attr.set_value(data[1])
def read_Synchronization(self, attr): synchronization = self.measurement_group.synchronization codec = CodecFactory().getCodec('json') data = codec.encode(('', synchronization)) attr.set_value(data[1])