class Scan: configure_value = Value(Type([("method", "s")]), dict(method="configure")) abort_value = Value(Type([("method", "s")]), dict(method="abort")) reset_value = Value(Type([("method", "s")]), dict(method="reset")) def __init__(self, block_name: str) -> None: self.block_name = block_name self.context = context def reset(self) -> None: self.context.rpc(self.block_name, None, Scan.reset_value) def abort(self) -> None: self.context.rpc(self.block_name, None, Scan.abort_value) def run_scan( self, axes_params: Union[AxisParams, List[AxisParams]], duration: float, file_dir: str, ): self.report("Configuring scan...") def report(self, message): print(f"{self.block_name}: {message}")
def testRpcRoot(self): method = Value(Type([("method", "s")]), dict(method="zero")) self.ctxt.rpc("TESTCOUNTER", value=None, request=method) self.assertCounter(0) method = Value(Type([("method", "s")]), dict(method="increment")) ret = self.ctxt.rpc("TESTCOUNTER", EMPTY, method) self.assertEqual(ret.tolist(), []) self.assertCounter(1) method = Value(Type([("method", "s")]), dict(method="zero")) self.ctxt.rpc("TESTCOUNTER", EMPTY, method) self.assertCounter(0)
def hist(self, **kws): pv = kws['pv'] value = [{"secondsPastEpoch": 1, "values": 123.45, "nanoseconds": 0, "severity": 0, "status": 0}] pv_list = pv.split(",") if len(pv_list) == 1: return single_pv_struct.wrap(value) else: result_list = [] for p in pv_list: pv_data = Value(multi_pv_struct, {"pvName": p, "value": single_pv_struct.wrap(value)}) result_list.append(pv_data) return Value(multi_response_struct, {"value": result_list})
def convert_value_to_dict(v: Value) -> Dict: d = OrderedDict() # Fill in typeid if set typeid = v.getID() if typeid != "structure": d["typeid"] = typeid # Fill in all the fields for name, spec in v.type().items(): if typeid == NTTable.typeid and name == "labels": # NTTable might give us labels, ignore them continue d[name] = convert_from_type_spec(spec, v[name]) return d
def update(self): mask = self.fieldMask.current().get('value') names = self.fieldNames.current().get('value') types = self.fieldTypes.current().get('value') oid = self.payload.current().getID() nid = str(mask) if nid == oid: nid += 'a' ntypes = [] nvalues = {} ntypes.append(('valid', 'i')) nvalues['valid'] = 0 for i in range(31): if mask & 1: ntypes.append((names[i], chr(types[i]))) nvalues[names[i]] = 0 mask >>= 1 pvname = self.prefix + 'PAYLOAD' self.provider.remove(pvname) self.payload = SharedPV(initial=Value(Type(ntypes, id=nid), nvalues), handler=DefaultPVHandler(self)) print('Payload struct ID %s' % self.payload.current().getID()) self.provider.add(pvname, self.payload)
def _regenerate_block(self, block: BlockModel, value: Value, update_fields: Set[str]) -> None: # This is an initial update, generate the list of all fields # TODO: very similar to websocketclientcomms for field in list(block): if field not in ("health", "meta"): block.remove_endpoint(field) for k, v in value.items(): if k == "health": # Update health attribute block.health.set_value( value=v["value"], alarm=convert_value_to_dict(v["alarm"]), ts=convert_value_to_dict(v["timeStamp"]), ) elif k == "meta": # Update BlockMeta meta: BlockMeta = block.meta for n in meta.call_types: meta.apply_change([n], v[n]) else: # Add new Attribute/Method v = convert_value_to_dict(v) block.set_endpoint_data(k, v) # Update the list of fields self._update_settable_fields(update_fields, k, block[k])
def send_put(self, mri, attribute_name, value): """Abstract method to dispatch a Put to the server Args: mri (str): The mri of the Block attribute_name (str): The name of the Attribute within the Block value: The value to put """ path = attribute_name + ".value" typ, value = convert_to_type_tuple_value(value) if isinstance(typ, tuple): # Structure, make into a Value _, typeid, fields = typ value = Value(Type(fields, typeid), value) try: self._ctxt.put(mri, {path: value}, path) except RemoteError: if attribute_name == "exports": # TODO: use a tag instead of a name # This will change the structure of the block # Wait for reconnect self._queues[mri].get(timeout=DEFAULT_TIMEOUT) else: # Not expected, raise raise
def __init__(self, provider_name, prefix): self.provider = StaticProvider(provider_name) self.prefix = prefix self.pvs = [] self.fieldNames = SharedPV(initial=NTScalar('as').wrap( {'value': ['pid%02x' % i for i in range(31)]}), handler=DefaultPVHandler(self)) # 'i' (integer) or 'f' (float) self.fieldTypes = SharedPV(initial=NTScalar('aB').wrap( {'value': [ord('i')] * 31}), handler=DefaultPVHandler(self)) self.fieldMask = SharedPV(initial=NTScalar('I').wrap({'value': 0x1}), handler=DefaultPVHandler(self)) self.payload = SharedPV(initial=Value(Type([]), {}), handler=DefaultPVHandler(self)) self.provider.add(prefix + 'HPS:FIELDNAMES', self.fieldNames) self.provider.add(prefix + 'HPS:FIELDTYPES', self.fieldTypes) self.provider.add(prefix + 'HPS:FIELDMASK', self.fieldMask) self.provider.add(prefix + 'PAYLOAD', self.payload) self.update()
def update(self): self.app.Enable.set(0) mask = self.fieldMask.current().get('value') names = self.fieldNames.current().get('value') types = self.fieldTypes.current().get('value') oid = self.payload.current().getID() nid = str(mask) print('PVCtrls.update mask[{:x}] oid[{:}]'.format(mask, oid)) if nid == oid: nid += 'a' ntypes = [] nvalues = {} ntypes.append(('valid', 'i')) nvalues['valid'] = 0 mmask = mask for i in range(31): if mmask & 1: ntypes.append((names[i], chr(types[i]))) nvalues[names[i]] = 0 mmask >>= 1 pvname = self.prefix + 'PAYLOAD' self.provider.remove(pvname) self.payload = SharedPV(initial=Value(Type(ntypes, id=nid), nvalues), handler=DefaultPVHandler(self)) print('Payload struct ID %s' % self.payload.current().getID()) self.provider.add(pvname, self.payload) if mask: self.app.channelMask.set(mask) self.app.Enable.set(1)
def run(self): self.provider = StaticProvider(__name__) self.fieldNames = SharedPV(initial=NTScalar('as').wrap( {'value': ['pid%02x' % i for i in range(31)]}), handler=DefaultPVHandler(self)) # 'i' (integer) or 'f' (float) self.fieldTypes = SharedPV(initial=NTScalar('aB').wrap( {'value': [ord('i')] * 31}), handler=DefaultPVHandler(self)) self.fieldMask = SharedPV(initial=NTScalar('I').wrap({'value': 0x8000}), handler=DefaultPVHandler(self)) self.payload = SharedPV(initial=Value(Type([]), {}), handler=DefaultPVHandler(self)) print('Hosting {:}HPS:FIELDMASK'.format(self.prefix)) self.provider.add(self.prefix + 'HPS:FIELDNAMES', self.fieldNames) self.provider.add(self.prefix + 'HPS:FIELDTYPES', self.fieldTypes) self.provider.add(self.prefix + 'HPS:FIELDMASK', self.fieldMask) self.provider.add(self.prefix + 'PAYLOAD', self.payload) self.update() try: Server.forever(providers=[self.provider]) except: print('Server exited')
def _update_block(self, block: BlockModel, value: Value, update_fields: Set[str]) -> None: # This is a subsequent update changed = value.changedSet(parents=True, expand=False) for k in changed.intersection(update_fields): v = value[k] if isinstance(v, Value): v = convert_value_to_dict(v) block.apply_change(k.split("."), v)
def convert_dict_to_value(d: Dict) -> Value: if d is None: val = EMPTY else: (_, typeid, fields), value_for_set = convert_to_type_tuple_value(d) try: typ = Type(fields, typeid) except RuntimeError as e: raise RuntimeError(f"{e} when doing Type({fields}, {typeid})") val = Value(typ, value_for_set) return val
def wrap(self, value): """Wrap dictionary as Value """ S, NS = divmod(time.time(), 1.0) return Value( self.type, { 'value': np.frombuffer(dill.dumps(value), dtype=np.ubyte), 'timeStamp': { 'secondsPastEpoch': S, 'nanoseconds': NS * 1e9, }, })
def convert_dict_to_value(d): # type: (Dict) -> Value if d is None: val = EMPTY else: (_, typeid, fields), value_for_set = convert_to_type_tuple_value(d) try: typ = Type(fields, typeid) except RuntimeError as e: raise RuntimeError("%s when doing Type(%s, %s)" % (e, fields, typeid)) val = Value(typ, value_for_set) return val
def wrap(self, value): """Wrap dictionary as Value """ S, NS = divmod(time.time(), 1.0) for field in self.byte_fields: value[field] = np.frombuffer(value[field], np.ubyte) for field in self.object_fields: value[field] = np.frombuffer(dill.dumps(value[field]), np.ubyte) return Value( self.type, { 'value': value, 'timeStamp': { 'secondsPastEpoch': S, 'nanoseconds': NS * 1e9, }, })
def createPV(self, pvname, node): """ Takes a string prefix and a dict of pv definitions similar to pcaspy and creates PVAccess pv's for them. Example PV definitions: {'type' : 'int', 'count' : 2, 'value' : [0,0x0fffffff] } {'type' : 'float', 'value' : 156.25 } """ pvdef = {} logger.debug("Creating PV for %s", pvname) missing_specs = pvdef.keys() - set(['type', 'count', 'value', 'extra']) if missing_specs: raise Exception( "Do not have support for specifier {0} as of yet".format( ",".join(missing_specs))) try: tp = __pcastypes2p4ptype__[pvdef['type']] starting_val = pvdef.get('value', __pcastypes2startingval__[pvdef['type']]) if pvdef.get('count', 1) > 1: tp = 'a' + tp starting_val = pvdef.get( 'value', [__pcastypes2startingval__[pvdef['type']]] * pvdef['count']) init_val = {"value": starting_val} extra_defs = [] if 'extra' in pvdef: init_val.update({fn: fv for (fn, _, fv) in pvdef['extra']}) extra_defs = [(fn, __pcastypes2p4ptype__[ft]) for (fn, ft, _) in pvdef['extra']] logger.debug("NTScalar(%s, extra=%s).wrap(%s)", tp, extra_defs, init_val) pv = SharedPV(initial=NTScalar(tp, extra=extra_defs).wrap(init_val), handler=DefaultPVHandler()) except: pv = SharedPV(initial=Value(Type(pvdef['type']), pvdef['value']), handler=DefaultPVHandler()) self.pvs.append( pv ) # we must keep a reference in order to keep the Handler from being collected logger.debug("Created PV for %s", pvname) return pv
def testRpcArgumentsDotted(self): args = Value(Type([("name", "s")]), dict(name="me")) result = self.ctxt.rpc("TESTHELLO.greet", args) self.assertEqual(dict(result.items()), {"return": "Hello me"})
def testRpcArguments(self): args = Value(Type([("name", "s")]), dict(name="world")) method = Value(Type([("method", "s")]), dict(method="greet")) result = self.ctxt.rpc("TESTHELLO", args, method) self.assertEqual(dict(result.items()), {"return": "Hello world"})
from enum import Enum from typing import Any, Dict, List, Tuple, Union import numpy as np from annotypes import Array from p4p import Type, Value from malcolm.compat import OrderedDict from malcolm.core import AlarmSeverity, AlarmStatus from malcolm.core.models import NTTable EMPTY = Value(Type([])) # https://mdavidsaver.github.io/p4p/values.html type_specifiers = { np.bool_: "?", np.int8: "b", np.uint8: "B", np.int16: "h", np.uint16: "H", np.int32: "i", np.uint32: "I", np.int64: "l", np.uint64: "L", np.float32: "f", np.float64: "d", str: "s", } # Make the reverse lookup specifier_types = {v: k for k, v in type_specifiers.items()}
}, "increment": { "took": empty_method_log_dict, "returned": empty_method_log_dict, "meta": { "takes": empty_map_meta_dict, "description": "Add delta to the counter attribute", "tags": [], "writeable": True, "label": "Increment", "returns": empty_map_meta_dict, }, }, } counter_expected = Value(counter_block_t, counter_dict) string_meta = "malcolm:core/StringMeta:1.0" number_meta = "malcolm:core/NumberMeta:1.0" hello_block_t = Type( [ ("meta", block_meta_tuple), ("health", health_attribute_tuple), ( "greet", ( "S", "malcolm:core/Method:1.1", [ (
def MySharedPV(nt, cb=None): return SharedPV(initial=Value(Type(pvTypes(nt)), pvValues(nt)), handler=DefaultPVHandler(cb))