def on_door_changed(self, event_source, event_type, event_value): # during server startup and shutdown avoid processing element # creation events if SardanaServer.server_state != State.Running: return timestamp = time.time() name = event_type.name.lower() multi_attr = self.get_device_attr() try: attr = multi_attr.get_attr_by_name(name) except DevFailed: return if name == "state": event_value = self.calculate_tango_state(event_value) elif name == "status": event_value = self.calculate_tango_status(event_value) elif name == "recorddata": format, value = event_value codec = CodecFactory().getCodec(format) event_value = codec.encode(('', value)) else: if isinstance(event_value, SardanaAttribute): if event_value.error: error = Except.to_dev_failed(*event_value.exc_info) timestamp = event_value.timestamp event_value = event_value.value if attr.get_data_type() == ArgType.DevEncoded: codec = CodecFactory().getCodec('json') event_value = codec.encode(('', event_value)) self.set_attribute(attr, value=event_value, timestamp=timestamp)
def on_door_changed(self, event_source, event_type, event_value): # during server startup and shutdown avoid processing element # creation events if SardanaServer.server_state != State.Running: return timestamp = time.time() name = event_type.name.lower() multi_attr = self.get_device_attr() try: attr = multi_attr.get_attr_by_name(name) except DevFailed: return if name == "state": event_value = self.calculate_tango_state(event_value) elif name == "status": event_value = self.calculate_tango_status(event_value) elif name == "recorddata": format, value = event_value codec = CodecFactory().getCodec(format) event_value = codec.encode(('', value)) else: if isinstance(event_value, SardanaAttribute): if event_value.error: error = Except.to_dev_failed(*event_value.exc_info) timestamp = event_value.timestamp event_value = event_value.value if attr.get_data_type() == ArgType.DevEncoded: codec = CodecFactory().getCodec('json') event_value = codec.encode(('', event_value)) self.set_attribute(attr, value=event_value, timestamp=timestamp)
def read_RecordData(self, attr): try: macro_data = self.door.get_macro_data() codec = CodecFactory().getCodec('bz2_pickle') data = codec.encode(('', macro_data)) except MacroServerException, mse: throw_sardana_exception(mse)
def _on_measurement_group_changed(self, event_source, event_type, event_value): # during server startup and shutdown avoid processing element # creation events if SardanaServer.server_state != State.Running: return timestamp = time.time() name = event_type.name name = name.replace('_', '') multi_attr = self.get_device_attr() attr = multi_attr.get_attr_by_name(name) quality = AttrQuality.ATTR_VALID priority = event_type.priority error = None if name == "state": event_value = self.calculate_tango_state(event_value) elif name == "status": event_value = self.calculate_tango_status(event_value) elif name == "acquisitionmode": event_value = AcqMode.whatis(event_value) elif name == "configuration": cfg = self.measurement_group.get_user_configuration() codec = CodecFactory().getCodec('json') _, event_value = codec.encode(('', cfg)) else: if isinstance(event_value, SardanaAttribute): if event_value.error: error = Except.to_dev_failed(*event_value.exc_info) timestamp = event_value.timestamp event_value = event_value.value self.set_attribute(attr, value=event_value, timestamp=timestamp, quality=quality, priority=priority, error=error, synch=False)
def read_RecordData(self, attr): try: macro_data = self.door.get_macro_data() codec = CodecFactory().getCodec('bz2_pickle') data = codec.encode(('', macro_data)) except MacroServerException, mse: throw_sardana_exception(mse)
def handle(self, func_name, *args, **kwargs): codec = CodecFactory().getCodec(self.format) data = dict(type='function', func_name=func_name, args=args, kwargs=kwargs) event_value = codec.encode(('', data)) self.door.set_attribute(self.attr, value=event_value)
def _on_measurement_group_changed(self, event_source, event_type, event_value): # during server startup and shutdown avoid processing element # creation events if SardanaServer.server_state != State.Running: return timestamp = time.time() name = event_type.name name = name.replace('_', '') multi_attr = self.get_device_attr() attr = multi_attr.get_attr_by_name(name) quality = AttrQuality.ATTR_VALID priority = event_type.priority error = None if name == "state": event_value = self.calculate_tango_state(event_value) elif name == "status": event_value = self.calculate_tango_status(event_value) elif name == "acquisitionmode": event_value = AcqMode.whatis(event_value) elif name == "configuration": cfg = self.measurement_group.get_user_configuration() codec = CodecFactory().getCodec('json') _, event_value = codec.encode(('', cfg)) elif name == "synchdescription": codec = CodecFactory().getCodec('json') _, event_value = codec.encode(('', event_value)) elif name == "moveable" and event_value is None: event_value = 'None' else: if isinstance(event_value, SardanaAttribute): if event_value.error: error = Except.to_dev_failed(*event_value.exc_info) timestamp = event_value.timestamp event_value = event_value.value self.set_attribute(attr, value=event_value, timestamp=timestamp, quality=quality, priority=priority, error=error, synch=False)
def set(self, conf, mnt_grps=None): """Sets the ExperimentConfiguration dictionary.""" if mnt_grps is None: mnt_grps = list(conf['MntGrpConfigs'].keys()) codec = CodecFactory().getCodec('json') msg_error = '' for mnt_grp in mnt_grps: try: mnt_grp_cfg = conf['MntGrpConfigs'][mnt_grp] if mnt_grp_cfg is None: # a mntGrp to be deleted pool = self._getPoolOfElement(mnt_grp) pool.DeleteElement(mnt_grp) else: try: # TODO: Fix incorrect implementation. It must check if # the measurement group is part of the Pools # controlled by the MacroServer. Otherwise, # it must raise an exception. mnt_grp_dev = Device(mnt_grp) except Exception: # if the mnt_grp did not already exist, create it now chconfigs = getChannelConfigs(mnt_grp_cfg) chnames, chinfos = list(zip(*chconfigs)) # unzipping # We assume that all the channels belong to the same # pool! pool = self._getPoolOfElement(chnames[0]) pool.createMeasurementGroup([mnt_grp] + list(chnames)) mnt_grp_dev = Device(mnt_grp) # TODO when we start using measurement group extension # change the code below with the following: # mnt_grp.setConfiguration(mnt_grp_cfg) data = codec.encode(('', mnt_grp_cfg))[1] mnt_grp_dev.write_attribute('configuration', data) except PyTango.DevFailed as df: # Take the description of the first exception. desc = df.args[0].desc desc = desc.replace('\r', '') desc = desc.replace('\n', '') msg_error += 'Measurement Group {0}:\n'\ '{1}\n\n'.format(mnt_grp, desc) if len(msg_error) > 0: raise RuntimeError(msg_error) # Send the environment changes env = dict(ScanDir=conf.get('ScanDir'), ScanFile=conf.get('ScanFile'), DataCompressionRank=conf.get('DataCompressionRank', -1), ActiveMntGrp=conf.get('ActiveMntGrp'), PreScanSnapshot=conf.get('PreScanSnapshot')) self._door.putEnvironments(env)
def read_RecordData(self, attr): try: macro_data = self.door.get_macro_data() codec = CodecFactory().getCodec('bz2_pickle') data = codec.encode(('', macro_data)) except MacroServerException as mse: throw_sardana_exception(mse) attr.set_value(*data) # workaround for a bug in PyTango (tango-controls/pytango#147), # i.e. temporary solution for issue #447 # (storing reference to data so it can not be destroyed by GC) self.__buf_data = data
def GetMacroInfo(self, macro_names): """GetMacroInfo(list<string> macro_names): Returns a list of string containing macro information. Each string is a JSON encoded. Params: - macro_name: a list of strings with the macro(s) name(s) Returns: - a list of string containing macro information. """ macro_server = self.macro_server codec = CodecFactory().getCodec("json") ret = [] for _, macro in macro_server.get_macros().items(): if macro.name in macro_names: ret.append(codec.encode(("", macro.serialize()))[1]) return ret
def GetMacroInfo(self, macro_names): """GetMacroInfo(list<string> macro_names): Returns a list of string containing macro information. Each string is a JSON encoded. Params: - macro_name: a list of strings with the macro(s) name(s) Returns: - a list of string containing macro information. """ macro_server = self.macro_server codec = CodecFactory().getCodec('json') ret = [] for _, macro in macro_server.get_macros().items(): if macro.name in macro_names: ret.append(codec.encode(('', macro.serialize()))[1]) return ret
def set(self, conf, mnt_grps=None): """Sets the ExperimentConfiguration dictionary.""" env = dict(ScanDir=conf.get('ScanDir'), ScanFile=conf.get('ScanFile'), DataCompressionRank=conf.get('DataCompressionRank', -1), ActiveMntGrp=conf.get('ActiveMntGrp'), PreScanSnapshot=conf.get('PreScanSnapshot')) if mnt_grps is None: mnt_grps = conf['MntGrpConfigs'].keys() self._door.putEnvironments(env) codec = CodecFactory().getCodec('json') for mnt_grp in mnt_grps: try: mnt_grp_cfg = conf['MntGrpConfigs'][mnt_grp] if mnt_grp_cfg is None: # a mntGrp to be deleted pool = self._getPoolOfElement(mnt_grp) pool.DeleteElement(mnt_grp) else: try: mnt_grp_dev = Device(mnt_grp) except: # if the mnt_grp did not already exist, create it now chconfigs = getChannelConfigs(mnt_grp_cfg) chnames, chinfos = zip(*chconfigs) # unzipping # We assume that all the channels belong to the same # pool! pool = self._getPoolOfElement(chnames[0]) pool.createMeasurementGroup([mnt_grp] + list(chnames)) mnt_grp_dev = Device(mnt_grp) # TODO when we start using measurement group extension change the # code below with the following: # mnt_grp.setConfiguration(mnt_grp_cfg) data = codec.encode(('', mnt_grp_cfg))[1] mnt_grp_dev.write_attribute('configuration', data) except Exception, e: from taurus.core.util.log import error error( 'Could not create/delete/modify Measurement group "%s": %s', mnt_grp, repr(e))
def set(self, conf, mnt_grps=None): """Sets the ExperimentConfiguration dictionary.""" env = dict(ScanDir=conf.get('ScanDir'), ScanFile=conf.get('ScanFile'), DataCompressionRank=conf.get('DataCompressionRank', -1), ActiveMntGrp=conf.get('ActiveMntGrp'), PreScanSnapshot=conf.get('PreScanSnapshot')) if mnt_grps is None: mnt_grps = conf['MntGrpConfigs'].keys() self._door.putEnvironments(env) codec = CodecFactory().getCodec('json') for mnt_grp in mnt_grps: try: mnt_grp_cfg = conf['MntGrpConfigs'][mnt_grp] if mnt_grp_cfg is None: # a mntGrp to be deleted pool = self._getPoolOfElement(mnt_grp) pool.DeleteElement(mnt_grp) else: try: mnt_grp_dev = Device(mnt_grp) except: # if the mnt_grp did not already exist, create it now chconfigs = getChannelConfigs(mnt_grp_cfg) chnames, chinfos = zip(*chconfigs) # unzipping # We assume that all the channels belong to the same # pool! pool = self._getPoolOfElement(chnames[0]) pool.createMeasurementGroup([mnt_grp] + list(chnames)) mnt_grp_dev = Device(mnt_grp) # TODO when we start using measurement group extension change the # code below with the following: # mnt_grp.setConfiguration(mnt_grp_cfg) data = codec.encode(('', mnt_grp_cfg))[1] mnt_grp_dev.write_attribute('configuration', data) except Exception, e: from taurus.core.util.log import error error( 'Could not create/delete/modify Measurement group "%s": %s', mnt_grp, repr(e))
def GetMacroInfo(self, macro_names): """Get macro information Returns a list of strings containing macro information. Each string is a JSON encoded. Args: macro_names (list(str)): macro(s) name(s) Returns: list(str): macro(s) information """ macro_server = self.macro_server codec = CodecFactory().getCodec('json') ret = [] for _, macro in list(macro_server.get_macros().items()): if macro.name in macro_names: ret.append(codec.encode(('', macro.serialize()))[1]) return ret
class PoolExpChannelDevice(PoolElementDevice): def __init__(self, dclass, name): """Constructor""" PoolElementDevice.__init__(self, dclass, name) self._codec = CodecFactory().getCodec('json') def _encode_value_chunk(self, value_chunk): """Prepare value chunk to be passed via communication channel. :param value_chunk: value chunk :type value_chunk: seq<SardanaValue> :return: json string representing value chunk :rtype: str""" data = [] index = [] for idx, sdn_value in value_chunk.iteritems(): index.append(idx) value = sdn_value.value # TODO: Improve it in the future # In case of big arrays e.g. 10k points and higher there are more # optimal solutions but they require complex changes on encoding # and decoding side. if isinstance(value, np.ndarray): value = value.tolist() data.append(value) data = dict(data=data, index=index) _, encoded_data = self._codec.encode(('', data)) return encoded_data def read_Data(self, attr): desc = "Data attribute is not foreseen for reading. It is used only "\ "as the communication channel for the continuous acquisitions." Except.throw_exception("UnsupportedFeature", desc, "PoolExpChannelDevice.read_Data", ErrSeverity.WARN)
def removeEnvironments(self, keys): obj = {'del': keys} codec = CodecFactory().getCodec('pickle') self.write_attribute('Environment', codec.encode(('', obj)))
def putEnvironments(self, obj): obj = dict(new=obj) codec = CodecFactory().getCodec('pickle') self.write_attribute('Environment', codec.encode(('', obj)))
def read_Synchronization(self, attr): synchronization = self.measurement_group.synchronization codec = CodecFactory().getCodec('json') data = codec.encode(('', synchronization)) attr.set_value(data[1])
def read_Configuration(self, attr): cfg = self.measurement_group.get_user_configuration() codec = CodecFactory().getCodec('json') data = codec.encode(('', cfg)) attr.set_value(data[1])
class PoolExpChannelDevice(PoolElementDevice): def __init__(self, dclass, name): """Constructor""" PoolElementDevice.__init__(self, dclass, name) codec_name = getattr(sardanacustomsettings, "VALUE_BUFFER_CODEC") self._value_buffer_codec = CodecFactory().getCodec(codec_name) codec_name = getattr(sardanacustomsettings, "VALUE_REF_BUFFER_CODEC") self._value_ref_buffer_codec = CodecFactory().getCodec(codec_name) def _encode_value_chunk(self, value_chunk): """Prepare value chunk to be passed via communication channel. :param value_chunk: value chunk :type value_chunk: seq<SardanaValue> :return: json string representing value chunk :rtype: str""" index = [] value = [] for idx, sdn_value in value_chunk.items(): index.append(idx) value.append(sdn_value.value) data = dict(index=index, value=value) encoded_data = self._value_buffer_codec.encode(('', data)) return encoded_data def _encode_value_ref_chunk(self, value_ref_chunk): """Prepare value ref chunk to be passed via communication channel. :param value_ref_chunk: value ref chunk :type value_ref_ chunk: seq<SardanaValue> :return: json string representing value chunk :rtype: str """ index = [] value_ref = [] for idx, sdn_value in value_ref_chunk.items(): index.append(idx) value_ref.append(sdn_value.value) data = dict(index=index, value_ref=value_ref) encoded_data = self._value_ref_buffer_codec.encode(('', data)) return encoded_data def initialize_dynamic_attributes(self): attrs = PoolElementDevice.initialize_dynamic_attributes(self) non_detect_evts = "integrationtime", for attr_name in non_detect_evts: if attr_name in attrs: self.set_change_event(attr_name, True, False) return attrs def read_ValueBuffer(self, _): desc = "ValueBuffer attribute is not foreseen for reading. It is " \ "used only as the communication channel for the continuous " \ "acquisitions." Except.throw_exception("UnsupportedFeature", desc, "PoolExpChannelDevice.read_ValueBuffer", ErrSeverity.WARN) def read_ValueRefBuffer(self, _): desc = ("ValueRefBuffer attribute is not foreseen for reading. " "It is used only as the communication channel for the " "continuous acquisitions.") Except.throw_exception("UnsupportedFeature", desc, "PoolExpChannelDevice.read_ValueRefBuffer", ErrSeverity.WARN) def read_IntegrationTime(self, attr): """Reads the integration time. :param attr: tango attribute :type attr: :class:`~PyTango.Attribute`""" attr.set_value(self.element.integration_time) def write_IntegrationTime(self, attr): """Sets the integration time. :param attr: tango attribute :type attr: :class:`~PyTango.Attribute`""" self.element.integration_time = attr.get_write_value()
def handle(self, func_name, *args, **kwargs): codec = CodecFactory().getCodec(self.format) data = dict(type='function', func_name=func_name, args=args, kwargs=kwargs) event_value = codec.encode(('', data)) self.door.set_attribute(self.attr, value=event_value)
def putEnvironments(self, obj): obj = dict(new=obj) codec = CodecFactory().getCodec('pickle') self.write_attribute('Environment', codec.encode(('', obj)))
def removeEnvironments(self, keys): obj = { 'del' : keys } codec = CodecFactory().getCodec('pickle') self.write_attribute('Environment', codec.encode(('', obj)))
class PoolExpChannelDevice(PoolElementDevice): def __init__(self, dclass, name): """Constructor""" PoolElementDevice.__init__(self, dclass, name) self._codec = CodecFactory().getCodec('json') def _encode_value_chunk(self, value_chunk): """Prepare value chunk to be passed via communication channel. :param value_chunk: value chunk :type value_chunk: seq<SardanaValue> :return: json string representing value chunk :rtype: str""" data = [] index = [] for idx, sdn_value in value_chunk.iteritems(): index.append(idx) value = sdn_value.value # TODO: Improve it in the future # In case of big arrays e.g. 10k points and higher there are more # optimal solutions but they require complex changes on encoding # and decoding side. if isinstance(value, np.ndarray): value = value.tolist() data.append(value) data = dict(data=data, index=index) _, encoded_data = self._codec.encode(('', data)) return encoded_data def initialize_dynamic_attributes(self): attrs = PoolElementDevice.initialize_dynamic_attributes(self) non_detect_evts = "integrationtime", for attr_name in non_detect_evts: if attr_name in attrs: self.set_change_event(attr_name, True, False) return attrs def read_Data(self, attr): desc = "Data attribute is not foreseen for reading. It is used only "\ "as the communication channel for the continuous acquisitions." Except.throw_exception("UnsupportedFeature", desc, "PoolExpChannelDevice.read_Data", ErrSeverity.WARN) def read_IntegrationTime(self, attr): """Reads the integration time. :param attr: tango attribute :type attr: :class:`~PyTango.Attribute`""" attr.set_value(self.element.integration_time) def write_IntegrationTime(self, attr): """Sets the integration time. :param attr: tango attribute :type attr: :class:`~PyTango.Attribute`""" self.element.integration_time = attr.get_write_value()