def dict_to_pv_object_structure(self, dict_in): structure = OrderedDict() typeid = None for item in dict_in: if item == "typeid": typeid = dict_in[item] else: if isinstance(dict_in[item], str): structure[item] = pvaccess.STRING elif isinstance(dict_in[item], bool): structure[item] = pvaccess.BOOLEAN elif isinstance(dict_in[item], float): structure[item] = pvaccess.FLOAT elif isinstance(dict_in[item], int): structure[item] = pvaccess.INT elif isinstance(dict_in[item], long_): structure[item] = pvaccess.LONG elif isinstance(dict_in[item], list): # self.log_debug("List found: %s", item) if not dict_in[item]: structure[item] = [pvaccess.STRING] else: if isinstance(dict_in[item][0], str): structure[item] = [pvaccess.STRING] elif isinstance(dict_in[item][0], bool): structure[item] = [pvaccess.BOOLEAN] elif isinstance(dict_in[item][0], float): structure[item] = [pvaccess.FLOAT] elif isinstance(dict_in[item][0], int): structure[item] = [pvaccess.INT] elif isinstance(dict_in[item][0], long_): structure[item] = [pvaccess.LONG] elif isinstance(dict_in[item][0], OrderedDict): structure[item] = [({}, )] elif isinstance(dict_in[item], OrderedDict): dict_structure = self.dict_to_pv_object_structure( dict_in[item]) if dict_structure: structure[item] = dict_structure try: if not structure: return None if not typeid: pv_object = pvaccess.PvObject(structure, "") else: pv_object = pvaccess.PvObject(structure, typeid) except: self.log_error( "Unable to create PvObject structure from OrderedDict") raise return pv_object
def pva_structure_from_value(value, empty_allowed=False): # Create pv structure if value is None: structure = pvaccess.PvObject({}) elif isinstance(value, str_): structure = pvaccess.STRING elif isinstance(value, bool): structure = pvaccess.BOOLEAN elif isinstance(value, (int, long_)): structure = pvaccess.LONG elif isinstance(value, float): structure = pvaccess.DOUBLE elif isinstance(value, np.number): structure = pva_dtypes[value.dtype.type] elif isinstance(value, np.ndarray): assert len(value.shape) == 1, \ "Expected 1d array, got {}".format(value.shape) structure = [pva_dtypes[value.dtype.type]] elif isinstance(value, StringArray): structure = [pvaccess.STRING] elif isinstance(value, list): # if not empty then determine its type structures = set() for v in value: v_structure = pva_structure_from_value(v) if isinstance(v_structure, pvaccess.PvObject): # variant union structures.add(()) else: structures.add(v_structure) structure = list(structures) if len(structure) == 0 or len(structure) > 1: # variant union structure = [()] elif isinstance(value, dict): # structure structure = OrderedDict() typeid = "" for k, v in value.items(): if k == "typeid": typeid = v else: subtyp = pva_structure_from_value(v) if subtyp is not None: structure[k] = subtyp if structure or empty_allowed: structure = pvaccess.PvObject(structure, typeid) else: structure = None else: raise ValueError("Cannot get pva type from %s %r" % (type(value), value)) return structure
def __init__(self, pv_files, macros): super().__init__(pv_files, macros) # On the A3200 we can read the number of encoder counts per rotation from the controller # Unfortunately the Ensemble does not support this pso_model = self.epics_pvs['PSOControllerModel'].get(as_string=True) if (pso_model == 'A3200'): pso_axis = self.epics_pvs['PSOAxisName'].get(as_string=True) self.epics_pvs['PSOCommand.BOUT'].put("UNITSTOCOUNTS(%s, 360.0)" % pso_axis, wait=True, timeout=10.0) reply = self.epics_pvs['PSOCommand.BINP'].get(as_string=True) counts_per_rotation = float(reply[1:]) self.epics_pvs['PSOCountsPerRotation'].put(counts_per_rotation) # Setting the pva servers to broadcast dark and flat fields if 'PvaStream' in self.pv_prefixes: prefix = self.pv_prefixes['PvaStream'] self.pva_stream_dark = pvaccess.PvObject({ 'value': [pvaccess.pvaccess.ScalarType.FLOAT], 'sizex': pvaccess.pvaccess.ScalarType.INT, 'sizey': pvaccess.pvaccess.ScalarType.INT }) self.pva_server_dark = pvaccess.PvaServer( prefix + 'StreamDarkFields', self.pva_stream_dark) self.pva_stream_flat = pvaccess.PvObject({ 'value': [pvaccess.pvaccess.ScalarType.FLOAT], 'sizex': pvaccess.pvaccess.ScalarType.INT, 'sizey': pvaccess.pvaccess.ScalarType.INT }) self.pva_server_flat = pvaccess.PvaServer( prefix + 'StreamFlatFields', self.pva_stream_flat) self.pva_stream_theta = pvaccess.PvObject({ 'value': [pvaccess.pvaccess.ScalarType.DOUBLE], 'sizex': pvaccess.pvaccess.ScalarType.INT }) self.pva_server_theta = pvaccess.PvaServer(prefix + 'StreamTheta', self.pva_stream_theta) self.stream_init()
def execute(self, args): self.log_debug("Execute %s method called on [%s] with: %s", self._method, self._block, args) self.log_debug("Structure: %s", args.getStructureDict()) # Acquire the lock with self._lock: # We now need to create the Post message and execute it endpoint = [self._block, self._method] request = Post(None, self._server.q, endpoint, self.parse_variants(args.toDict(True))) request.set_id(self._id) self._server.process.q.put(request) # Now wait for the Post reply self.log_debug("Waiting for reply") self.wait_for_reply() self.log_debug("Reply received") response_dict = OrderedDict() if isinstance(self._response, Return): response_dict = self._response["value"] self.log_debug("Response value : %s", self._response["value"]) elif isinstance(self._response, Error): response_dict = self._response.to_dict() response_dict.pop("id") if not response_dict: pv_object = pvaccess.PvObject(OrderedDict({}), 'malcolm:core/Map:1.0') else: #pv_object = self._server.dict_to_structure(response_dict) #self.log_debug("Pv Object structure created") #self.log_debug("%s", self._server.strip_type_id(response_dict)) #pv_object.set(self._server.strip_type_id(response_dict)) pv_object = self._server.dict_to_pv_object(response_dict) self.log_debug("Pv Object value set: %s", pv_object) # Add this RPC to the purge list #self._server.register_dead_rpc(self._id) return pv_object
def annotation(x): try: entity = x.getString("entity") starttime = x.getString("starttime") endtime = x.getString("endtime") except (pva.FieldNotFound, pva.InvalidRequest): return pva.PvString("error") str_sec = is_to_unixtime_seconds(starttime) end_sec = is_to_unixtime_seconds(endtime) time = [(int(end_sec) + int(str_sec)) // 2 * 1000] title = [entity] tags = ["test1,test2"] text = ["test text"] vals = { "column0": [pva.ULONG], "column1": [pva.STRING], "column2": [pva.STRING], "column3": [pva.STRING] } table = pva.PvObject({ "labels": [pva.STRING], "value": vals }, "epics:nt/NTTable:1.0") table.setScalarArray("labels", ["time", "title", "tags", "text"]) table.setStructure("value", { "column0": time, "column1": title, "column2": tags, "column3": text }) return table
def retrieveServiceConfigs(self, params): """ Retrieve all configurations existing in current masar database. All values for parameters have to be string since current NTNameValue accepts string only. A wildcast search is supported for service name, configuration name, and system, with "*" for multiple characters search, and "?" for single character. Parameters: a dictionary with predefined keys as below: 'servicename': [optional] the service name, which should be 'masar'. 'configname': [optional] the configuration name. 'system': [optional] retrieve configuration only belongs to given system. It will retrieve all configurations if it is empty. Result: list of list with the following format: id []: index value of each configuration name []: name of each configuration description []: description of each configuration created date []: date when each configuration was created version [] version number status []: status, active/inactive otherwise, False if nothing is found. """ if not isinstance(params, dict): raise RuntimeError("Parameters have to be a Python dictionary") pvobj = {'function': pvaccess.STRING} for k in params.keys(): pvobj[k] = pvaccess.STRING if not params.has_key('servicename'): pvobj['servicename'] = pvaccess.STRING params['servicename'] = 'masar' if not params.has_key('system'): pvobj['system'] = pvaccess.STRING params['system'] = 'all' elif params['system'] == "*": params['system'] = 'all' params['function'] = 'retrieveServiceConfigs' request = pvaccess.PvObject(pvobj) request.set(params) result = self.rpc.invoke(request) label = result.getScalarArray('labels') if self.__isFault(label, result): return False expectedlabel=['config_idx', 'config_name', 'config_desc', 'config_create_date', 'config_version', 'status'] if label != expectedlabel: raise RuntimeError("Data structure not as expected for retrieveServiceConfigs().") return (result.getStructure("value")[label[0]], result.getStructure("value")[label[1]], result.getStructure("value")[label[2]], result.getStructure("value")[label[3]], result.getStructure("value")[label[4]], result.getStructure("value")[label[5]])
def main(): server = pva.PvaServer() server.start() N_IMAGES = 100 NX = 1024 NY = 1024 COLOR_MODE = 0 IMAGE_RATE = 1.0 # Hz EXTRA_FIELDS_OBJECT = pva.PvObject( { 'customField1': pva.INT, 'customField2': pva.STRING }, { 'customField1': 10, 'customField2': 'GeneratedBy: PvaPy' }) CHANNEL = 'pvapy:image' for i in range(0, N_IMAGES): print('Image id: {}'.format(i)) image = createImage(i, NX, NY, COLOR_MODE, EXTRA_FIELDS_OBJECT) time.sleep(1 / IMAGE_RATE) if i == 0: server.addRecord(CHANNEL, image) else: server.update(CHANNEL, image)
def updateSnapshotEvent(self, params): """ Approve a particular snapshot. User can put name and give a comment to that snapshot. Parameters: a dictionary which can have any combination of the following predefined keys: 'eventid': id of event to be approval 'user': user name to identify who approves/takes this snapshot 'desc': any comment to describe this snapshot. result: True, otherwise, False if operation failed. """ if not isinstance(params, dict): raise RuntimeError("Parameters have to be a Python dictionary.") if not params.has_key('eventid'): raise RuntimeError("snapshot event id is not available.") pvobj = {'function': pvaccess.STRING} for k in params.keys(): pvobj[k] = pvaccess.STRING params['function'] = 'updateSnapshotEvent' request = pvaccess.PvObject(pvobj) request.set(params) nttable = self.rpc.invoke(request) label = nttable.getScalarArray('label') expectedlabel = ['status'] if label != expectedlabel: raise RuntimeError("Data structure not as expected for updateSnapshotEvent().") return nttable.getScalarArray('status')[0]
def get_timesrie(self, entity, str_sec, end_sec, param1): interval = (end_sec - str_sec) // 2 value = [] seconds = [] nano = [] for i in range(3): if entity == "string" or entity == "str": value.append(str(param1 + i)) else: value.append(param1 + i) seconds.append(str_sec + i * interval) nano.append(0) val_type = TYPES.get(entity, pva.LONG) vals = { "column0": [val_type], "column1": [pva.ULONG], "column2": [pva.ULONG] } table = pva.PvObject({ "labels": [pva.STRING], "value": vals }, "epics:nt/NTTable:1.0") table.setScalarArray("labels", ["value", "secondsPastEpoch", "nanoseconds"]) table.setStructure("value", { "column0": value, "column1": seconds, "column2": nano }) return table
def get_history_ann(self, arg): group = arg.getString("entity") if arg.hasField("entity") else "all" msg = arg.getString("message") if arg.hasField("message") else "" svr = arg.getString("severity") if arg.hasField("severity") else "" try: start, end = self._get_time_from_arg(arg) except (pva.FieldNotFound, pva.InvalidRequest, ValueError): print("Error: Invalid argumets") msg = "Arguments Error: starttime or endtime are invalid" msg += ". args = " + str(arg) ret = self._make_error_res(msg) return ret try: if group == "all": df = self._rdb.history_alarm_all(msg, start, end) else: df = self._rdb.history_alarm_group(group, msg, start, end) except psycopg2.Error: temp = ("RDB Error: entity = {}, msg = {}," "starttime = {}, endtime={}") msg = temp.format(entity, msg, starttime, endtime) ret = self._make_error_res(msg) return ret # Drop lines if it has NaN value df = df.dropna() df = df[df["severity"].str.match(svr)] vals = { "column0": [pva.ULONG], "column1": [pva.STRING], "column2": [pva.STRING], "column3": [pva.STRING] } table = pva.PvObject({ "labels": [pva.STRING], "value": vals }, 'epics:nt/NTTable:1.0') table.setScalarArray("labels", ["time", "title", "tags", "text"]) df["eventtime"] = pd.to_datetime(df["eventtime"]) if df["eventtime"].empty: time = df["eventtime"] else: time = df["eventtime"].dt.strftime("%s%f").str[:-3] value = { "column0": time.astype(int).tolist(), "column1": df["message"].tolist(), "column2": df["group"].tolist(), "column3": df["severity"].tolist() } table.setStructure("value", value) return table
def test_dict_to_stucture(self): self.PVA = PvaServerComms(self.p) val_dict = OrderedDict() val_dict["typeid"] = "type1" val_dict["val1"] = "1" val_dict["val2"] = np.int32(2) val_dict["val3"] = True val_dict["val4"] = np.int64(0) val_dict["val5"] = np.float64(0.5) val_dict["val6"] = StringArray('', '') val_dict["val7"] = np.array([5, 1], dtype=np.int32) val_dict["val8"] = [True, False] val_dict["val9"] = np.array([0, 1], dtype=np.int64) val_dict["val10"] = np.array([0.2, 0.6], dtype=np.float64) val = self.PVA.pva_structure_from_value(val_dict) test_dict = OrderedDict() test_dict["val1"] = pvaccess.STRING test_dict["val2"] = pvaccess.INT test_dict["val3"] = pvaccess.BOOLEAN test_dict["val4"] = pvaccess.LONG test_dict["val5"] = pvaccess.DOUBLE test_dict["val6"] = [pvaccess.STRING] test_dict["val7"] = [pvaccess.INT] test_dict["val8"] = [pvaccess.BOOLEAN] test_dict["val9"] = [pvaccess.LONG] test_dict["val10"] = [pvaccess.DOUBLE] test_val = pvaccess.PvObject(test_dict, "type1") self.assertEquals(val, test_val) # Test the variant union array type val = self.PVA.pva_structure_from_value( {"union_array": [{ "val1": 1 }, { "val2": "2" }]}) test_dict = OrderedDict() test_dict["union_array"] = [()] test_val = pvaccess.PvObject(test_dict, "") self.assertEquals(val, test_val) val = self.PVA.pva_structure_from_value({"union_array": []}) test_dict = OrderedDict() test_dict["union_array"] = [()] test_val = pvaccess.PvObject(test_dict, "") self.assertEquals(val, test_val)
def hash(x): md5 = hashlib.md5() md5.update(str(x)) h = md5.hexdigest() dict = x.getStructureDict() dict['hash'] = pvaccess.STRING response = pvaccess.PvObject(dict) response.setString('hash', h) return response
def retrieveServiceEvents(self, params): """ Retrieve event list which belong to a particular snapshot configuration. It retrieves event id, user name, comment, and date, but without real data from IOC. All values for parameters have to be string since current NTNameValue accepts string only. A wildcast search is supported for comment, and user name, with "*" for multiple characters search, and "?" for single character. Internally, the date is saved in UTC format. Parameters: a dictionary which can have any combination of the following predefined keys: 'configid': id to identify which configuration it belongs to 'start': The time window start string 'end': The time window end string 'comment': event contain given comment. 'user': who did that event result: list of list with the following format: id []: list of each event id comment: a list to show comment for each event user name: user name list date []: time list to show when that event happened in UTC format otherwise, False if nothing is found. """ if not isinstance(params, dict): raise RuntimeError("Parameters have to be a Python dictionary.") if not params.has_key('configid'): raise RuntimeError("service configuration ID is not available.") pvobj = {'function': pvaccess.STRING} for k in params.keys(): pvobj[k] = pvaccess.STRING params['function'] = 'retrieveServiceEvents' request = pvaccess.PvObject(pvobj) request.set(params) nttable = self.rpc.invoke(request) label = nttable.getScalarArray('labels') expectedlabel = ["event_id", "config_id", "comments", "event_time", "user_name"] if label != expectedlabel: raise RuntimeError("Data structure not as expected for retrieveServiceEvents().") if self.__isFault(label, nttable): return False # 0: service_event_id, # 1: service_config_id, # 2: service_event_user_tag, # 3: service_event_UTC_time, # 4: service_event_user_name return (nttable.getStructure("value")[label[0]], nttable.getStructure("value")[label[2]], nttable.getStructure("value")[label[3]], nttable.getStructure("value")[label[4]])
def _create_request_pvdate(self, query_type, query_val, path="", nturi=False): """Create RPC request pvData Parameters ---------- query_type : dict dict of pvaccess type for query query_val : dict dict of query value path : str path for nturi style path nturi : bool whether create request as nturi style or not Returns ------- pvaccess.PvObject pvAccess RPC request pvData """ if nturi: request = pva.PvObject( { "scheme": pva.STRING, "authority": pva.STRING, "path": pva.STRING, "query": query_type }, "epics:nt/NTURI:1.0") request["scheme"] = "pva" request["authority"] = "" request["path"] = str(path) request.setStructure("query", query_val) else: request = pva.PvObject(query_type) request.set(query_val) return request
def _execute_rpc(self, request): method = pvaccess.PvObject({'method': pvaccess.STRING}) method.set({'method': request.path[1]}) # Connect to the channel and create the RPC client rpc = pvaccess.RpcClient(request.path[0], method) # Construct the pv object from the parameters params = dict_to_pv_object(request.parameters) # Call the method on the RPC object value = rpc.invoke(params) # Now create the Return object and populate it with the response d = strip_tuples(value.toDict(True)) response = self._response_from_dict(request, d) return response
def search(x): try: query = x.getString("entity") name = x.getString("name") except (pva.FieldNotFound, pva.InvalidRequest): return pva.PvString("error") org_value = ENTITIES if str(name) == "entity" else [] value = [val for val in org_value if val.startswith(query)] pv = pva.PvObject({"value": [pva.STRING]}, "epics:nt/NTScalarArray:1.0") pv["value"] = value return pv
def get_current(self, arg): entity = arg.getString("entity") if arg.hasField("entity") else ".*" msg = arg.getString("message") if arg.hasField("message") else "" try: df = self._get_current_alarm(entity, msg) except (ValueError, DatabaseError): msg = "RDB Error: entity = {}, msg = {}".format(entity, msg) ret = self._make_error_res(msg) return ret except re.error as e: msg = "regex error ({}) entity = {}, msg = {}" msg = msg.format(e, entity, msg) ret = self._make_error_res(msg) return ret vals = { "column0": [pva.STRING], "column1": [pva.STRING], "column2": [pva.LONG], "column3": [pva.STRING], "column4": [pva.STRING], "column5": [pva.STRING], "column6": [pva.STRING] } table = pva.PvObject({ "labels": [pva.STRING], "value": vals }, 'epics:nt/NTTable:1.0') labels = [ "time", "group", "severity_id", "severity", "status", "message", "record" ] table.setScalarArray("labels", labels) value = { "column0": df["alarm_time"].astype(str).tolist(), "column1": df["groups"].tolist(), "column2": df["severity_id"].tolist(), "column3": df["severity"].tolist(), "column4": df["status"].tolist(), "column5": df["descr"].tolist(), "column6": df["pv_name"].tolist() } table.setStructure("value", value) return table
def search_nturi_style(self, x): try: query = x.getStructure("query") except (pva.FieldNotFound, pva.InvalidRequest): return pva.PvString("error") q_po = pva.PvObject({ "entity": pva.STRING, "name": pva.STRING, }) q_po.setString("entity", query["entity"]) q_po.setString("name", query["name"]) pv = self.search(q_po) return pv
def __init__(self,stype): [ntheta,nz,n] = [1024,1024,1024] rate = 5*1024**3#GB/s buffer_size = 100000 # queue self.data_queue = queue.Queue(maxsize=buffer_size) self.epics_pvs = {} # pva type channel that contains projection and metadata image_pv_name = "2bmbSP2:Pva1:" self.epics_pvs['PvaPImage'] = pva.Channel(image_pv_name + 'Image') self.epics_pvs['PvaPDataType_RBV'] = pva.Channel(image_pv_name + 'DataType_RBV') self.pva_plugin_image = self.epics_pvs['PvaPImage'] # create pva type pv for reconstrucion by copying metadata from the data pv, but replacing the sizes # This way the ADViewer (NDViewer) plugin can be also used for visualizing reconstructions. pva_image_data = self.pva_plugin_image.get('') pva_image_dict = pva_image_data.getStructureDict() self.pv_rec = pva.PvObject(pva_image_dict) # run server for reconstruction pv recon_pva_name = "2bmb:Rec" if(stype=='server'): self.server_rec = pva.PvaServer(recon_pva_name, self.pv_rec) pva_image_data = self.pva_plugin_image.get('') width = pva_image_data['dimension'][0]['size'] height = pva_image_data['dimension'][1]['size'] self.pv_rec['dimension'] = [{'size': width, 'fullSize': width, 'binning': 1}, {'size': height, 'fullSize': height, 'binning': 1}] self.epics_pvs['PvaPImage'] = pva.Channel(recon_pva_name) self.pva_rec_image = self.epics_pvs['PvaPImage'] #self.pv_rec['value'] = ({'floatValue': rec.flatten()},) # self.theta = self.epics_pvs['ThetaArray'].get()[:self.epics_pvs['NumAngles'].get()] # start monitoring projection data datatype_list = self.epics_pvs['PvaPDataType_RBV'].get()['value'] self.datatype = datatype_list['choices'][datatype_list['index']].lower() self.datatype='uint16' self.buffer_size=buffer_size self.height=height self.width=width self.cur_id=0 self.tmp=np.zeros([height*width],dtype='uint16')
def get_illegal_field(self, x): vals = { "value": [pva.ULONG], "seconds": [pva.ULONG], "nanoseconds": [pva.ULONG] } table = pva.PvObject({ "labels": [pva.STRING], "value": vals }, "epics:nt/NTTable:1.0") table.setScalarArray("labels", ["value", "seconds", "nanoseconds"]) table.setStructure("value", { "value": [0], "seconds": [0], "nanoseconds": [0] }) return table
def get(x): try: entity = x.getString("entity") starttime = x.getString("starttime") endtime = x.getString("endtime") except (pva.FieldNotFound, pva.InvalidRequest): return pva.PvString("error") param1 = x.getString("param1") if x.hasField("param1") else None str_sec = is_to_unixtime_seconds(starttime) end_sec = is_to_unixtime_seconds(endtime) if entity == "sine" or entity == "cos": data = get_sine_cos(entity, str_sec, end_sec, param1) elif entity == "string": data = get_string(str_sec, end_sec) elif entity == "point3": data = get_point3(str_sec, end_sec) else: data = get_ramp(str_sec, end_sec, param1) value = data["value"] seconds = data["secondsPastEpoch"] nano = data["nano"] val_type = data["type"] vals = { "column0": [val_type], "column1": [pva.DOUBLE], "column2": [pva.DOUBLE] } table = pva.PvObject({ "labels": [pva.STRING], "value": vals }, "epics:nt/NTTable:1.0") table.setScalarArray("labels", ["value", "secondsPastEpoch", "nanoseconds"]) table.setStructure("value", { "column0": value, "column1": seconds, "column2": nano }) return table
def get_inconsistent_field(self, x): vals = { "error1": [pva.ULONG], "error2": [pva.ULONG], "error3": [pva.ULONG] } table = pva.PvObject({ "labels": [pva.STRING], "value": vals }, "epics:nt/NTTable:1.0") table.setScalarArray("labels", ["value", "secondsPastEpoch", "nanoseconds"]) table.setStructure("value", { "error1": [0], "error2": [0], "error3": [0] }) return table
def get_full_machine_model(use_design=False): if not PVACCESS_AVAILABLE: raise NoPVAccessException request = pvaccess.PvObject(OrderedDict([('scheme', pvaccess.STRING), ('path', pvaccess.STRING)]), 'epics:nt/NTURI:1.0') model_type = "EXTANT" if use_design: model_type = "DESIGN" path = "MODEL:RMATS:{}:FULLMACHINE".format(model_type) rpc = pvaccess.RpcClient(path) request.set(OrderedDict([('scheme', 'pva'), ('path', path)])) response = rpc.invoke(request).getStructure() m = np.zeros(len(response['ELEMENT_NAME']), dtype=[('ordinal', 'i16'),('element_name', 'a60'), ('epics_channel_access_name', 'a60'), ('position_index', 'a6'), ('z_position', 'float32'), ('r_mat', 'float32', (6,6))]) m['ordinal'] = response['ORDINAL'] m['element_name'] = response['ELEMENT_NAME'] m['epics_channel_access_name'] = response['EPICS_CHANNEL_ACCESS_NAME'] m['position_index'] = response['POSITION_INDEX'] m['z_position'] = response['Z_POSITION'] m['r_mat'] = np.reshape(np.array([response['R11'], response['R12'], response['R13'], response['R14'], response['R15'], response['R16'], response['R21'], response['R22'], response['R23'], response['R24'], response['R25'], response['R26'], response['R31'], response['R32'], response['R33'], response['R34'], response['R35'], response['R36'], response['R41'], response['R42'], response['R43'], response['R44'], response['R45'], response['R46'], response['R51'], response['R52'], response['R53'], response['R54'], response['R55'], response['R56'], response['R61'], response['R62'], response['R63'], response['R64'], response['R65'], response['R66']]).T, (-1,6,6)) return m
def get_current_ann(self, arg): entity = arg.getString("entity") if arg.hasField("entity") else ".*" msg = arg.getString("message") if arg.hasField("message") else "" try: df = self._get_current_alarm(entity, msg) except (ValueError, DatabaseError): msg = "RDB Error: entity = {}, msg = {}".format(entity, msg) ret = self._make_error_res(msg) return ret except re.error as e: msg = "regex error ({}) entity = {}, msg = {}" msg = msg.format(e, entity, msg) ret = self._make_error_res(msg) return ret vals = { "column0": [pva.ULONG], "column1": [pva.STRING], "column2": [pva.STRING], "column3": [pva.STRING] } table = pva.PvObject({ "labels": [pva.STRING], "value": vals }, 'epics:nt/NTTable:1.0') table.setScalarArray("labels", ["time", "title", "tags", "text"]) if df["alarm_time"].empty: time = df["alarm_time"] else: time = df["alarm_time"].dt.strftime("%s%f").str[:-3] value = { "column0": time.astype(int).tolist(), "column1": df["descr"].tolist(), "column2": df["groups"].tolist(), "column3": df["pv_name"].tolist() } table.setStructure("value", value) return table
def retrieveSystemList(self): """ Retrieve all system defined for masar service. Parameters: None Result: list with all system name, otherwise, False if nothing is found. """ request = pvaccess.PvObject({'system': pvaccess.STRING, 'function': pvaccess.STRING}) request.set({'system': '*', 'function': 'retrieveServiceConfigProps'}) result = self.rpc.invoke(request) label = result.getScalarArray('labels') if self.__isFault(label, result): return False expectedlabel = ['config_prop_id', 'config_idx', 'system_key', 'system_val'] if label != expectedlabel: raise RuntimeError("Data structure not as expected for retrieveSystemList().") return (sorted(set(result.getStructure("value")[label[-1]])))
def get_nturi_style(self, x): try: query = x.getStructure("query") except (pva.FieldNotFound, pva.InvalidRequest): return pva.PvString("error") q_po = pva.PvObject({ "entity": pva.STRING, "starttime": pva.STRING, "endtime": pva.STRING, "param1": pva.STRING }) q_po.setString("entity", query["entity"]) q_po.setString("starttime", query["starttime"]) q_po.setString("endtime", query["endtime"]) q_po.setString("param1", query["param1"]) table = self.get(q_po) return table
def execute_rpc(self, request): method = pvaccess.PvObject({'method': pvaccess.STRING}) method.set({'method': request["endpoint"][1]}) # Connect to the channel and create the RPC client rpc = pvaccess.RpcClient(request["endpoint"][0], method) # Construct the pv object from the parameters params = self.dict_to_pv_object(request["parameters"]) self.log_debug("PvObject parameters: %s", params) # Call the method on the RPC object response = rpc.invoke(params) self.log_debug("Response: %s", response) # Now create the Return object and populate it with the response value = response.toDict(True) if 'typeid' in value: if value['typeid'] == 'malcolm:core/Error:1.0': return_object = Error(id_=request["id"], message=value['message']) else: return_object = Return(id_=request["id"], value=value) else: return_object = Error(id_=request["id"], message="No valid return typeid") return return_object
def get_table(self, entity, str_sec, end_sec, param1): interval = (end_sec - str_sec) // 2 value = [1.1, 1.2, 2.0] seconds = [1460589140, 1460589141, 1460589142] nano = [16235768, 164235245, 164235256] status = [0, 0, 1] severity = [0, 0, 3] time = [ "2016-04-04T08:10:14", "2016-04-04T08:10:15", "2016-04-04T08:10:16" ] vals = { "column0": [pva.DOUBLE], "column1": [pva.ULONG], "column2": [pva.ULONG], "column3": [pva.ULONG], "column4": [pva.ULONG], "column5": [pva.STRING] } table = pva.PvObject({ "labels": [pva.STRING], "value": vals }, "epics:nt/NTTable:1.0") table.setScalarArray("labels", [ "value", "secondsPastEpoch", "nanoseconds", "status", "severity", "time" ]) table.setStructure( "value", { "column0": value, "column1": seconds, "column2": nano, "column3": status, "column4": severity, "column5": time }) return table
def streaming(theta, nthetap): """ Main computational function, take data from pvdata ('2bmbSP1:Pva1:Image'), reconstruct orthogonal slices and write the result to pvrec ('AdImage') """ # init streaming pv for the detector c = pva.Channel('2bmbSP1:Pva1:Image') pvdata = c.get('') # take dimensions n = pvdata['dimension'][0]['size'] nz = pvdata['dimension'][1]['size'] # init streaming pv for reconstrucion with copuing dictionary from pvdata pvdatad = pvdata.getStructureDict() pvrec = pva.PvObject(pvdatad) # set dimensions for data pvrec['dimension'] = [{ 'size': 3 * n, 'fullSize': 3 * n, 'binning': 1 }, { 'size': n, 'fullSize': n, 'binning': 1 }] s = pva.PvaServer('AdImage', pvrec) # init with slices through the middle ix = n // 2 iy = n // 2 iz = nz // 2 # I suggest using buffers that has only certain number of angles, # e.g. nhetap=50, this buffer is continuously update with monitoring # the detector pv (function addProjection), called inside pv monitor databuffer = np.zeros([nthetap, nz, n], dtype='float32') thetabuffer = np.zeros(nthetap, dtype='float32') def addProjection(pv): curid = pv['uniqueId'] databuffer[np.mod(curid, nthetap)] = pv['value'][0]['ubyteValue'].reshape( nz, n).astype('float32') thetabuffer[np.mod(curid, nthetap)] = theta[np.mod( curid, ntheta)] # take some theta with respect to id c.monitor(addProjection, '') # solver class on gpu with OrthoRec(nthetap, n, nz) as slv: # memory for result slices recall = np.zeros([n, 3 * n], dtype='float32') while (True): # infinite loop over angular partitions # new = take ix,iy,iz from gui new = None flgx, flgy, flgz = 0, 0, 0 # recompute slice or not if (new != None): [newx, newy, newz] = new if (newx != ix): ix = newx # change slice id flgx = 1 # recompute flg if (newy != iy): iy = newy flgy = 1 if (newz != iz): iz = newz flgz = 1 # take interlaced projections and corresponding angles # I guess there should be read/write locks here. datap = databuffer.copy() thetap = thetabuffer.copy() print('data partition norm:', np.linalg.norm(datap)) print('partition angles:', thetap) # recover 3 ortho slices recx, recy, recz = slv.rec_ortho(datap, thetap, n // 2, ix, iy, iz, flgx, flgy, flgz) # concatenate (supposing nz<n) recall[:nz, :n] = recx recall[:nz, n:2 * n] = recy recall[:, 2 * n:] = recz # 1s reconstruction rate time.sleep(1) # write to pv pvrec['value'] = ({'floatValue': recall.flatten()}, )
def getLiveMachine(self, params): """ Get live data with given pv list, and uses pv name as both key and value. Same as retrieveSnapshot function, for a scalar pv, its value is carried in string, double and long. For a$ its value is carried in array_value. Client needs to check the pv is an array by checking is_array, and ch$ by checking dbr_type. Parameters: a dictionary which can have any combination of the following predefined keys: 'servicename': [optional] exact service name if given 'configname': exact configuration name 'comment': [optional] exact comment. result: list of list with the following format: pv name []: pv name list string value []: value list in string format double value [] value list in double format long value [] value list in long format dbr_type []: EPICS DBR types isConnected []: connection status, either True or False secondsPastEpoch []: seconds after EPOCH time nanoSeconds []: nano-seconds alarmSeverity []: EPICS IOC severity alarmStatus []: EPICS IOC status is_array []: whether value is array, either True or False array_value [[]]: if it is array, the value is stored here. otherwise, False if operation failed. """ if not isinstance(params, dict): raise RuntimeError("Parameters have to be a Python dictionary.") if not params.has_key('configname'): raise RuntimeError("service config name is not available.") pvobj = {'function': pvaccess.STRING} for k in params.keys(): pvobj[k] = pvaccess.STRING params['function'] = 'getLiveMachine' request = pvaccess.PvObject(pvobj) request.set(params) nttable = self.rpc.invoke(request) label = nttable.getScalarArray('labels') expectedlabel = ['pv name', 'string value', 'double value', 'long value', 'dbr type', 'isConnected', 'secondsPastEpoch', 'nanoSeconds', 'timeStampTag', 'alarmSeverity', 'alarmStatus', 'alarmMessage', 'is_array', 'array_value'] label4error = ['status'] if label == expectedlabel: # [pv name,string value,double value,long value, # dbr type,isConnected,secondsPastEpoch,nanoSeconds,timeStampTag, # alarmSeverity,alarmStatus,alarmMessage, is_array, array_value] return (nttable.getValue(0), nttable.getValue(1), nttable.getValue(2), nttable.getValue(3), nttable.getValue(4), nttable.getValue(5), nttable.getValue(6), nttable.getValue(7), nttable.getValue(9), nttable.getValue(10), nttable.getValue(12), nttable.getValue(13)) elif label == label4error: return nttable.getScalarArray('status')[0] else: raise RuntimeError("Data structure not as expected for getLiveMachine().")