async def _process_and_pyonize(self, target, obj): try: return pyon.encode({ "status": "ok", "ret": await self._process_action(target, obj) }) except (asyncio.CancelledError, SystemExit): raise except: return pyon.encode({ "status": "failed", "exception": current_exc_packed() })
def _show_devices(devices): clear_screen() table = PrettyTable(["Name", "Description"]) table.align["Description"] = "l" for k, v in sorted(devices.items(), key=itemgetter(0)): table.add_row([k, pyon.encode(v, True)]) print(table)
def test_encdec(self): for enc in pyon.encode, lambda x: pyon.encode(x, True): with self.subTest(enc=enc): self.assertEqual(pyon.decode(enc(_pyon_test_object)), _pyon_test_object) # NaNs don't compare equal, so test separately. assert np.isnan(pyon.decode(enc(np.nan)))
async def _handle_connection_cr(self, reader, writer): try: line = await reader.readline() if line != _protocol_banner: return line = await reader.readline() if not line: return notifier_name = line.decode()[:-1] try: notifier = self.notifiers[notifier_name] except KeyError: return obj = {"action": ModAction.init.value, "struct": notifier.raw_view} line = pyon.encode(obj) + "\n" writer.write(line.encode()) queue = asyncio.Queue() self._recipients[notifier_name].add(queue) try: while True: line = await queue.get() writer.write(line) # raise exception on connection error await writer.drain() finally: self._recipients[notifier_name].remove(queue) except (ConnectionError, TimeoutError): # subscribers disconnecting are a normal occurrence pass finally: writer.close()
def test_rpc_encode_function(self): """Test that `pc_rpc` can encode a function properly. Used in `get_rpc_method_list` part of :meth:`sipyco.pc_rpc.Server._process_action` """ def _annotated_function(arg1: str, arg2: np.ndarray = np.array([ 1, ])) -> np.ndarray: """Sample docstring.""" return arg1 argspec_documented, docstring = pc_rpc.Server._document_function( _annotated_function) self.assertEqual(docstring, "Sample docstring.") # purposefully ignore how argspec["annotations"] is treated. # allows option to change PYON later to encode annotations. argspec_master = dict( inspect.getfullargspec(_annotated_function)._asdict()) argspec_without_annotation = argspec_master.copy() del argspec_without_annotation["annotations"] # check if all items (excluding annotations) are same in both dictionaries self.assertLessEqual(argspec_without_annotation.items(), argspec_documented.items()) self.assertDictEqual(argspec_documented, pyon.decode(pyon.encode(argspec_documented)))
def write_results(): filename = "{:09}-{}.h5".format(rid, exp.__name__) with h5py.File(filename, "w") as f: dataset_mgr.write_hdf5(f) f["artiq_version"] = artiq_version f["rid"] = rid f["start_time"] = start_time f["run_time"] = run_time f["expid"] = pyon.encode(expid)
def test_encdec_array(self): orig = { k: (np.array(v), np.array([v])) for k, v in _pyon_test_object.items() if np.isscalar(v) } for enc in pyon.encode, lambda x: pyon.encode(x, True): result = pyon.decode(enc(orig)) for k in orig: with self.subTest(enc=enc, k=k, v=orig[k]): np.testing.assert_equal(result[k], orig[k])
def format_influxdb(v): if np.issubdtype(type(v), np.bool_): return "bool={}".format(v) if np.issubdtype(type(v), np.integer): return "int={}i".format(v) if np.issubdtype(type(v), np.floating): return "float={}".format(v) if np.issubdtype(type(v), np.str_): return "str=\"{}\"".format(v.replace('\\', '\\\\').replace('"', '\\"')) return "pyon=\"{}\"".format(pyon.encode(v).replace('"', '\\"'))
async def _handle_connection_cr(self, reader, writer): try: line = await reader.readline() if line != _init_string: return obj = { "targets": sorted(self.targets.keys()), "description": self.description } line = pyon.encode(obj) + "\n" writer.write(line.encode()) line = await reader.readline() if not line: return target_name = line.decode()[:-1] try: target = self.targets[target_name] except KeyError: return if callable(target): target = target() valid_methods = inspect.getmembers(target, inspect.ismethod) valid_methods = {m[0] for m in valid_methods} if self.builtin_terminate: valid_methods.add("terminate") writer.write((pyon.encode(valid_methods) + "\n").encode()) while True: line = await reader.readline() if not line: break reply = await self._process_action(target, pyon.decode(line.decode())) writer.write((pyon.encode(reply) + "\n").encode()) except (ConnectionResetError, ConnectionAbortedError, BrokenPipeError): # May happens on Windows when client disconnects pass finally: writer.close()
def broadcast(self, name, obj): if name in self._recipients: line = pyon.encode(obj) + "\n" line = line.encode() for recipient in self._recipients[name]: try: recipient.put_nowait(line) except asyncio.QueueFull: # do not log: log messages may be sent back to us # as broadcasts, and cause infinite recursion. pass
def test_wrong_fqn_override(self): exp = self.create(ScanAddOneExp, env_args={ PARAMS_ARG_KEY: pyon.encode({ "overrides": { "non_existent": [{ "path": "*", "value": 3 }] } }) }) with self.assertRaises(KeyError): exp.prepare()
def format_influxdb(v, tag=True): if np.issubdtype(type(v), np.bool_): return "{}".format(v) if np.issubdtype(type(v), np.integer): return "{}i".format(v) if np.issubdtype(type(v), np.floating): return "{}".format(v) if not np.issubdtype(type(v), np.str_): v = pyon.encode(v) if tag: for i in ",= ": v = v.replace(i, "\\" + i) return v else: return "\"{}\"".format(v.replace('"', '\\"'))
def __init__(self, name, specs, scheduler, parent): QtWidgets.QFrame.__init__(self) self.setFrameStyle(QtWidgets.QFrame.Panel | QtWidgets.QFrame.Raised) self.parent = parent self.name = name self.freq = parent.all_dds_specs[name]["frequency"] * 1e-6 self.att = parent.all_dds_specs[name]["att"] self.state = parent.all_dds_specs[name]["state"] self.cpld = specs.urukul self.amplitude = parent.all_dds_specs[name]["amplitude"] unum = str(int(specs.urukul)) min_att, max_att = specs.min_att, specs.max_att min_freq, max_freq = specs.min_freq, specs.max_freq self.scheduler = scheduler self.expid = {"arguments": {"specs": pyon.encode(self.parent.all_dds_specs), "urukul_number": unum, "dds_name": name}, "class_name": "change_cw", "file": "misc/manual_dds_control.py", "log_level": 30, "repo_rev": None, "priority": 1} self.parameters_changed() layout = QtWidgets.QGridLayout() layout.addWidget(boldLabel(name), 0, 0, 1, 3) layout.addWidget(QtWidgets.QLabel("Frequency"), 1, 0) layout.addWidget(QtWidgets.QLabel("Amplitude"), 3, 0) layout.addWidget(QtWidgets.QLabel("Attenuation"), 3, 1) self.freq_spin = customSpinBox(self.freq, (min_freq, max_freq), " MHz") self.freq_spin.editingFinished.connect(self.freq_spin_changed) self.amp_spin = customSpinBox(1, (0, 1), None) self.amp_spin.editingFinished.connect(self.amp_spin_changed) self.att_spin = customSpinBox(self.att, (min_att, max_att), " dB") self.att_spin.editingFinished.connect(self.att_spin_changed) self.state_button = QtWidgets.QPushButton("O") self.state_button.setCheckable(True) self.state_button.toggled.connect(self.button_clicked) self.state_button.setChecked(self.state) layout.addWidget(self.freq_spin, 2, 0) layout.addWidget(self.amp_spin, 4, 0) layout.addWidget(self.att_spin, 4, 1) layout.addWidget(self.state_button, 2, 1) self.setLayout(layout)
def test_no_path_match_override(self): exp = self.create(ScanAddOneExp, env_args={ PARAMS_ARG_KEY: pyon.encode({ "overrides": { "fixtures.AddOneFragment.value": [{ "path": "non_existent", "value": 3 }] } }) }) with self.assertRaises(ValueError): exp.prepare()
def parameters_changed(self): new_values = {"frequency": float(self.freq) * 1e6, "att": float(self.att), "state": self.state, "cpld": int(self.cpld), "amplitude": float(self.amplitude)} self.parent.all_dds_specs.update({self.name: new_values}) self.parent.expid_dds["arguments"].update( {"specs": pyon.encode(self.parent.all_dds_specs)}) if os.name == "nt": # Skip automatic DDS param updates when running from Windows return self.scheduler.submit("main", self.parent.expid_dds, priority=1) cxn = labrad.connect() p = cxn.parametervault p.set_parameter(["dds_cw_parameters", self.name, [str(self.freq), str(self.amplitude), str(int(self.state)), str(self.att)]]) cxn.disconnect()
async def _send(self, obj, cancellable=True): assert self.io_lock.locked() line = pyon.encode(obj) self.ipc.write((line + "\n").encode()) ifs = [self.ipc.drain()] if cancellable: ifs.append(self.closed.wait()) fs = await asyncio_wait_or_cancel( ifs, timeout=self.send_timeout, return_when=asyncio.FIRST_COMPLETED) if all(f.cancelled() for f in fs): raise WorkerTimeout( "Timeout sending data to worker (RID {})".format(self.rid)) for f in fs: if not f.cancelled() and f.done(): f.result() # raise any exceptions if cancellable and self.closed.is_set(): raise WorkerError( "Data transmission to worker cancelled (RID {})".format( self.rid))
def write_pyon(self, obj): self.write(pyon.encode(obj).encode() + b"\n")
def main(): global ipc multiline_log_config(level=int(sys.argv[2])) ipc = pipe_ipc.ChildComm(sys.argv[1]) start_time = None run_time = None rid = None expid = None exp = None exp_inst = None repository_path = None device_mgr = DeviceManager(ParentDeviceDB, virtual_devices={ "scheduler": Scheduler(), "ccb": CCB() }) dataset_mgr = DatasetManager(ParentDatasetDB) import_cache.install_hook() try: while True: obj = get_object() action = obj["action"] if action == "build": start_time = time.time() rid = obj["rid"] expid = obj["expid"] if obj["wd"] is not None: # Using repository experiment_file = os.path.join(obj["wd"], expid["file"]) repository_path = obj["wd"] else: experiment_file = expid["file"] repository_path = None setup_diagnostics(experiment_file, repository_path) exp = get_exp(experiment_file, expid["class_name"]) device_mgr.virtual_devices["scheduler"].set_run_info( rid, obj["pipeline_name"], expid, obj["priority"]) start_local_time = time.localtime(start_time) dirname = os.path.join( "results", time.strftime("%Y-%m-%d", start_local_time), time.strftime("%H", start_local_time)) os.makedirs(dirname, exist_ok=True) os.chdir(dirname) argument_mgr = ProcessArgumentManager(expid["arguments"]) exp_inst = exp((device_mgr, dataset_mgr, argument_mgr, {})) put_object({"action": "completed"}) elif action == "prepare": exp_inst.prepare() put_object({"action": "completed"}) elif action == "run": run_time = time.time() exp_inst.run() put_object({"action": "completed"}) elif action == "analyze": try: exp_inst.analyze() except: # make analyze failure non-fatal, as we may still want to # write results afterwards put_exception_report() else: put_object({"action": "completed"}) elif action == "write_results": filename = "{:09}-{}.h5".format(rid, exp.__name__) with h5py.File(filename, "w") as f: dataset_mgr.write_hdf5(f) f["artiq_version"] = artiq_version f["rid"] = rid f["start_time"] = start_time f["run_time"] = run_time f["expid"] = pyon.encode(expid) put_object({"action": "completed"}) elif action == "examine": examine(ExamineDeviceMgr, ExamineDatasetMgr, obj["file"]) put_object({"action": "completed"}) elif action == "terminate": break except: put_exception_report() finally: device_mgr.close_devices() ipc.close()
def describe(self): d = {"ty": self.__class__.__name__} if hasattr(self, "default_value"): d["default"] = pyon.encode(self.default_value) return d
def main(): global ipc multiline_log_config(level=int(sys.argv[2])) ipc = pipe_ipc.ChildComm(sys.argv[1]) start_time = None run_time = None rid = None expid = None exp = None exp_inst = None repository_path = None def write_results(): filename = "{:09}-{}.h5".format(rid, exp.__name__) with h5py.File(filename, "w") as f: dataset_mgr.write_hdf5(f) f["artiq_version"] = artiq_version f["rid"] = rid f["start_time"] = start_time f["run_time"] = run_time f["expid"] = pyon.encode(expid) device_mgr = DeviceManager(ParentDeviceDB, virtual_devices={ "scheduler": Scheduler(), "ccb": CCB() }) dataset_mgr = DatasetManager(ParentDatasetDB) import_cache.install_hook() try: while True: obj = get_object() action = obj["action"] if action == "build": start_time = time.time() rid = obj["rid"] expid = obj["expid"] if obj["wd"] is not None: # Using repository experiment_file = os.path.join(obj["wd"], expid["file"]) repository_path = obj["wd"] else: experiment_file = expid["file"] repository_path = None setup_diagnostics(experiment_file, repository_path) exp = get_experiment(experiment_file, expid["class_name"]) device_mgr.virtual_devices["scheduler"].set_run_info( rid, obj["pipeline_name"], expid, obj["priority"]) start_local_time = time.localtime(start_time) rootdir = os.path.join(os.path.expanduser("~"), "data") dirname = os.path.join( rootdir, time.strftime("%Y-%m-%d", start_local_time)) os.makedirs(dirname, exist_ok=True) os.chdir(dirname) argument_mgr = ProcessArgumentManager(expid["arguments"]) exp_inst = exp((device_mgr, dataset_mgr, argument_mgr, {})) put_completed() elif action == "prepare": exp_inst.prepare() put_completed() elif action == "run": current_time = datetime.datetime.now().strftime("%H%M_%S") run_time = time.time() try: exp_inst.run() except: # Only write results in run() on failure; on success wait # for end of analyze stage. write_results() raise put_completed() elif action == "analyze": try: exp_inst.analyze() except: # make analyze failure non-fatal, as we may still want to # write results afterwards put_exception_report() else: put_object({"action": "completed"}) elif action == "write_results": if hasattr(exp_inst, "archive"): if not exp_inst.archive: put_object({"action": "completed"}) continue path = os.path.join(dirname, exp.__name__) if not os.path.exists(path): os.mkdir(path) if hasattr(exp_inst, "filename"): filename = list(exp_inst.filename.values())[0] else: filename = "raw-data_{}.h5".format(current_time) file_ = os.path.join(path, filename) with h5py.File(file_, "a") as f: dataset_mgr.write_hdf5(f) f["artiq_version"] = artiq_version f["rid"] = rid f["start_time"] = start_time f["run_time"] = run_time f["expid"] = pyon.encode(expid) put_object({"action": "completed"}) elif action == "examine": examine(ExamineDeviceMgr, ExamineDatasetMgr, obj["file"]) put_completed() elif action == "terminate": break except: put_exception_report() finally: device_mgr.close_devices() ipc.close()
def publish(self, notifier, mod): line = pyon.encode(mod) + "\n" line = line.encode() notifier_name = self._notifier_names[id(notifier)] for recipient in self._recipients[notifier_name]: recipient.put_nowait(line)
def put_object(obj): ds = pyon.encode(obj) ipc.write((ds + "\n").encode())
def test_encdec_array_order(self): """Test encoding of non c-contiguous arrays (see #5)""" array = np.reshape(np.arange(6), (2, 3), order='F') np.testing.assert_array_equal(array, pyon.decode(pyon.encode(array)))
def test_encdec(self): for enc in pyon.encode, lambda x: pyon.encode(x, True), json.dumps: for dec in pyon.decode, json.loads: self.assertEqual(dec(enc(_json_test_object)), _json_test_object)
def __send(self, obj): line = pyon.encode(obj) + "\n" self.__socket.sendall(line.encode())
def __send(self, obj): line = pyon.encode(obj) + "\n" self.__writer.write(line.encode())
def _update_ndscan_params(arguments, params): arguments[PARAMS_ARG_KEY]["state"] = pyon.encode(params)