def test_encdec(self): for enc in pyon.encode, lambda x: pyon.encode(x, True): with self.subTest(enc=enc): self.assertEqual(pyon.decode(enc(_pyon_test_object)), _pyon_test_object) # NaNs don't compare equal, so test separately. assert np.isnan(pyon.decode(enc(np.nan)))
async def load_hdf5_task(self, filename=None): if filename is None: if self._area.dataset is None: return filename = self._area.dataset try: with h5py.File(filename, "r") as f: expid = f["expid"][()] expid = pyon.decode(expid) arguments = expid["arguments"] except: logger.error("Could not retrieve expid from HDF5 file", exc_info=True) return try: self.log_level.setCurrentIndex(log_levels.index( log_level_to_name(expid["log_level"]))) except: logger.error("Could not set submission options from HDF5 expid", exc_info=True) return await self._recompute_arguments(arguments)
async def _handle_connection_cr(self, reader, writer): try: line = await reader.readline() if line != _init_string: return obj = { "targets": sorted(self.targets.keys()), "description": self.description } line = pyon.encode(obj) + "\n" writer.write(line.encode()) line = await reader.readline() if not line: return target_name = line.decode()[:-1] try: target = self.targets[target_name] except KeyError: return if callable(target): target = target() while True: line = await reader.readline() if not line: break reply = await self._process_action(target, pyon.decode(line.decode())) writer.write((pyon.encode(reply) + "\n").encode()) except (ConnectionResetError, ConnectionAbortedError, BrokenPipeError): # May happens on Windows when client disconnects pass finally: writer.close()
def _action_set_dataset(remote, args): persist = None if args.persist: persist = True if args.no_persist: persist = False remote.set(args.name, pyon.decode(args.value), persist)
def list_current_changed(self, current, previous): info = self.model.fileInfo(current) f = open_h5(info) if not f: return logger.debug("loading datasets from %s", info.filePath()) with f: try: expid = pyon.decode(f["expid"].value) start_time = datetime.fromtimestamp(f["start_time"].value) v = { "artiq_version": f["artiq_version"].value, "repo_rev": expid["repo_rev"], "file": expid["file"], "class_name": expid["class_name"], "rid": f["rid"].value, "start_time": start_time, } self.metadata_changed.emit(v) except: logger.warning("unable to read metadata from %s", info.filePath(), exc_info=True) rd = dict() if "archive" in f: rd = {k: (True, v.value) for k, v in f["archive"].items()} if "datasets" in f: for k, v in f["datasets"].items(): if k in rd: logger.warning("dataset '%s' is both in archive and " "outputs", k) rd[k] = (True, v.value) if rd: self.datasets.init(rd) self.dataset_changed.emit(info.filePath())
def test_rpc_encode_function(self): """Test that `pc_rpc` can encode a function properly. Used in `get_rpc_method_list` part of :meth:`artiq.protocols.pc_rpc.Server._process_action` """ def _annotated_function( arg1: str, arg2: np.ndarray = np.array([1,]) ) -> np.ndarray: """Sample docstring.""" return arg1 argspec_documented, docstring = pc_rpc.Server._document_function( _annotated_function ) self.assertEqual(docstring, "Sample docstring.") # purposefully ignore how argspec["annotations"] is treated. # allows option to change PYON later to encode annotations. argspec_master = dict(inspect.getfullargspec(_annotated_function)._asdict()) argspec_without_annotation = argspec_master.copy() del argspec_without_annotation["annotations"] # check if all items (excluding annotations) are same in both dictionaries self.assertLessEqual( argspec_without_annotation.items(), argspec_documented.items() ) self.assertDictEqual( argspec_documented, pyon.decode(pyon.encode(argspec_documented)) )
async def load_hdf5_task(self, filename=None): if filename is None: if self._area.dataset is None: return filename = self._area.dataset try: with h5py.File(filename, "r") as f: expid = f["expid"][()] expid = pyon.decode(expid) arguments = expid["arguments"] except: logger.error("Could not retrieve expid from HDF5 file", exc_info=True) return try: self.log_level.setCurrentIndex( log_levels.index(log_level_to_name(expid["log_level"]))) except: logger.error("Could not set submission options from HDF5 expid", exc_info=True) return await self._recompute_arguments(arguments)
async def _load_hdf5_task(self): try: filename = await get_open_file_name( self.manager.main_window, "Load HDF5", self.hdf5_load_directory, "HDF5 files (*.h5 *.hdf5);;All files (*.*)") except asyncio.CancelledError: return self.hdf5_load_directory = os.path.dirname(filename) try: with h5py.File(filename, "r") as f: expid = f["expid"][()] expid = pyon.decode(expid) arguments = expid["arguments"] except: logger.error("Could not retrieve expid from HDF5 file", exc_info=True) return try: self.log_level.setCurrentIndex(log_levels.index( log_level_to_name(expid["log_level"]))) if ("repo_rev" in expid and expid["repo_rev"] != "N/A" and hasattr(self, "repo_rev")): self.repo_rev.setText(expid["repo_rev"]) except: logger.error("Could not set submission options from HDF5 expid", exc_info=True) return await self._recompute_arguments_task(arguments)
def list_current_changed(self, current, previous): info = self.model.fileInfo(current) f = open_h5(info) if not f: return logger.debug("loading datasets from %s", info.filePath()) with f: try: expid = pyon.decode(f["expid"].value) start_time = datetime.fromtimestamp(f["start_time"].value) v = { "artiq_version": f["artiq_version"].value, "repo_rev": expid["repo_rev"], "file": expid["file"], "class_name": expid["class_name"], "rid": f["rid"].value, "start_time": start_time, } self.metadata_changed.emit(v) except: logger.warning("unable to read metadata from %s", info.filePath(), exc_info=True) if "datasets" in f: rd = {k: (True, v.value) for k, v in f["datasets"].items()} self.datasets.init(rd) self.dataset_changed.emit(info.filePath())
def __recv(self): buf = self.__socket.recv(4096).decode() while "\n" not in buf: more = self.__socket.recv(4096) if not more: break buf += more.decode() return pyon.decode(buf)
def test_encdec_array(self): orig = {k: (np.array(v), np.array([v])) for k, v in _pyon_test_object.items() if np.isscalar(v)} for enc in pyon.encode, lambda x: pyon.encode(x, True): result = pyon.decode(enc(orig)) for k in orig: with self.subTest(enc=enc, k=k, v=orig[k]): np.testing.assert_equal(result[k], orig[k])
def test_encdec_array(self): orig = { k: (np.array(v), np.array([v])) for k, v in _pyon_test_object.items() if np.isscalar(v) } for enc in pyon.encode, lambda x: pyon.encode(x, True): result = pyon.decode(enc(orig)) for k in orig: with self.subTest(enc=enc, k=k, v=orig[k]): np.testing.assert_equal(result[k], orig[k])
async def _receive_cr(self): target = None while True: line = await self.reader.readline() if not line: return obj = pyon.decode(line.decode()) for notify_cb in self.notify_cbs: notify_cb(obj)
def _action_set_dataset(remote, args): if args.persist and args.no_persist: print("Options --persist and --no-persist cannot be specified " "at the same time") sys.exit(1) persist = None if args.persist: persist = True if args.no_persist: persist = False remote.set(args.name, pyon.decode(args.value), persist)
async def _receive_cr(self): try: target = None while True: line = await self.reader.readline() if not line: return obj = pyon.decode(line.decode()) for notify_cb in self.notify_cbs: notify_cb(obj) finally: if self.disconnect_cb is not None: self.disconnect_cb()
def _recv(self, timeout): try: line = yield from asyncio.wait_for(self.process.stdout.readline(), timeout=timeout) except asyncio.TimeoutError: raise WorkerFailed("Timeout receiving data from worker") if not line: raise WorkerFailed( "Worker ended unexpectedly while trying to receive data") try: obj = pyon.decode(line.decode()) except: raise WorkerFailed("Worker sent invalid PYON data") return obj
def _try_extract_ndscan_params(arguments): if not arguments: return None, arguments arg = arguments.get(PARAMS_ARG_KEY, None) if not arg: return None, arguments if arg["desc"]["ty"] != "PYONValue": return None, arguments state = arg.get("state", None) params = pyon.decode(state if state else arg["desc"]["default"]) vanilla_args = arguments.copy() del vanilla_args[PARAMS_ARG_KEY] return params, vanilla_args
async def _receive_cr(self): target = None while True: line = await self.reader.readline() if not line: return mod = pyon.decode(line.decode()) if mod["action"] == "init": target = self.target_builder(mod["struct"]) else: process_mod(target, mod) for notify_cb in self.notify_cbs: notify_cb(mod)
def _receive_cr(self): target = None while True: line = yield from self._reader.readline() if not line: return mod = pyon.decode(line.decode()) if mod["action"] == "init": target = self.target_builder(mod["struct"]) else: process_mod(target, mod) if self.notify_cb is not None: self.notify_cb(mod)
def _receive_cr(self): targets = [] while True: line = yield from self.reader.readline() if not line: return mod = pyon.decode(line.decode()) if mod["action"] == "init": targets = [tb(mod["struct"]) for tb in self.target_builders] else: for target in targets: process_mod(target, mod) if self.notify_cb is not None: self.notify_cb(mod)
async def _receive_cr(self): targets = [] while True: line = await self.reader.readline() if not line: return mod = pyon.decode(line.decode()) if mod["action"] == "init": targets = [tb(mod["struct"]) for tb in self.target_builders] else: for target in targets: process_mod(target, mod) if self.notify_cb is not None: self.notify_cb(mod)
async def _recv(self, timeout): assert self.io_lock.locked() fs = await asyncio_wait_or_cancel( [self.ipc.readline(), self.closed.wait()], timeout=timeout, return_when=asyncio.FIRST_COMPLETED) if all(f.cancelled() for f in fs): raise WorkerTimeout("Timeout receiving data from worker") if self.closed.is_set(): raise WorkerError("Data transmission to worker cancelled") line = fs[0].result() if not line: raise WorkerError("Worker ended while attempting to receive data") try: obj = pyon.decode(line.decode()) except: raise WorkerError("Worker sent invalid PYON data") return obj
def data(self, idx, role): if role == QtCore.Qt.ToolTipRole: info = self.fileInfo(idx) h5 = open_h5(info) if h5 is not None: try: expid = pyon.decode(h5["expid"].value) start_time = datetime.fromtimestamp(h5["start_time"].value) v = ("artiq_version: {}\nrepo_rev: {}\nfile: {}\n" "class_name: {}\nrid: {}\nstart_time: {}").format( h5["artiq_version"].value, expid["repo_rev"], expid["file"], expid["class_name"], h5["rid"].value, start_time) return v except: logger.warning("unable to read metadata from %s", info.filePath(), exc_info=True) return QtWidgets.QFileSystemModel.data(self, idx, role)
async def _handle_connection_cr(self, reader, writer): try: line = await reader.readline() if line != _init_string: return obj = { "targets": sorted(self.targets.keys()), "description": self.description } line = pyon.encode(obj) + "\n" writer.write(line.encode()) line = await reader.readline() if not line: return target_name = line.decode()[:-1] try: target = self.targets[target_name] except KeyError: return if callable(target): target = target() valid_methods = inspect.getmembers(target, inspect.ismethod) valid_methods = {m[0] for m in valid_methods} if self.builtin_terminate: valid_methods.add("terminate") writer.write((pyon.encode(valid_methods) + "\n").encode()) while True: line = await reader.readline() if not line: break reply = await self._process_action(target, pyon.decode(line.decode())) writer.write((pyon.encode(reply) + "\n").encode()) except (ConnectionResetError, ConnectionAbortedError, BrokenPipeError): # May happens on Windows when client disconnects pass finally: writer.close()
async def _receive_cr(self): try: target = None while True: line = await self.reader.readline() if not line: return mod = pyon.decode(line.decode()) if mod["action"] == "init": target = self.target_builder(mod["struct"]) else: process_mod(target, mod) for notify_cb in self.notify_cbs: notify_cb(mod) except ConnectionError: pass finally: if self.disconnect_cb is not None: self.disconnect_cb()
def load_result(day=None, rid=None, experiment=None, root_path=None): """Load an Artiq results file. The results file is described by a rid and a day (provided datestring or defaults to today). See find_results for a full description of the arguments. Returns a dictionary containing the logical contents of the HDF5 file, including: * start_time : the unix timestamp when the experiment was built * expid : experiment description, including submission arguments * datasets : dictionary containing all set datasets """ rs = find_results(day=day, rid=rid, experiment=experiment, root_path=root_path) if len(rs) == 0: raise IOError("No results file found") if len(rs) > 1: raise IOError("More than one matching results file found") r = {} try: with h5py.File(rs[rid].path, "r") as f: expid = pyon.decode(f["expid"][()]) r["expid"] = expid for k in ["artiq_version", "start_time"]: r[k] = f[k].value # Load datasets ds = {} r["datasets"] = ds for k in f["datasets"]: ds[k] = f["datasets"][k].value except: raise IOError("Failure parsing results file") return r
def _handle_connection_cr(self, reader, writer): try: line = yield from reader.readline() if line != _init_string: return obj = { "targets": sorted(self.targets.keys()), "parameters": self.id_parameters } line = pyon.encode(obj) + "\n" writer.write(line.encode()) line = yield from reader.readline() if not line: return target_name = line.decode()[:-1] try: target = self.targets[target_name] except KeyError: return while True: line = yield from reader.readline() if not line: break obj = pyon.decode(line.decode()) try: method = getattr(target, obj["name"]) ret = method(*obj["args"], **obj["kwargs"]) obj = {"status": "ok", "ret": ret} except Exception: obj = {"status": "failed", "message": traceback.format_exc()} line = pyon.encode(obj) + "\n" writer.write(line.encode()) finally: writer.close()
async def __recv(self): line = await self.__reader.readline() return pyon.decode(line.decode())
def _parse_list_pyon(values: str) -> List[float]: return pyon.decode("[" + values + "]")
async def read_pyon(self): line = await self.readline() return pyon.decode(line.decode())
def get_argument_value(self): return pyon.decode(self.text())
def load( obj: dynaconf.LazySettings, env: str = None, silent: bool = True, key: str = None, filename: str = None, ) -> None: """ Read and load in to `obj` a single key or all keys from ARTIQ datasets. If you would like different name mappings, they need to be specified in a settings file (either set as an environment variable TODO, or defaults to TODO). This defaults to connecting to an ARTIQ master instance on the local machine, but can be changed with the environment variable: TODO Args: obj (dynaconf.LazySettings): the settings instance env (str): settings current env (upper case) default='DEVELOPMENT' silent (bool): if errors should raise key (str): if defined load a single key, else load all from `env` filename (str): Custom filename to load (useful for tests) Returns: None """ # pylint: disable=unused-argument # REQUIRED FILES: # ? ARTIQ config settings (master IP address)?? # ? ARTIQ dataset mappings to variable names?? Env var? # Load data from your custom data source (file, database, memory etc) # use `obj.set(key, value)` or `obj.update(dict)` to load data # use `obj.logger.debug` to log your loader activities # use `obj.find_file('filename.ext')` to find the file in search tree # Return nothing port = 3250 ip = "::1" # TODO: make ip, port, mapping_file to be dynamic-set mapping_file_path = pathlib.Path(obj.find_file("artiq_dataset_map.pyon")) if mapping_file_path.exists() and mapping_file_path.is_file(): obj.logger.debug("Using key mapping file `%s`") key_to_dataset_map = pyon.decode(obj.read_file(mapping_file_path)) dynaconf_keys = list(sorted(key_to_dataset_map.keys())) if key is not None and key in dynaconf_keys: dynaconf_keys = [key] elif key is not None: # TODO: handle silent raise KeyError( "Key {} not in ARTIQ mapping file {}".format(key, mapping_file_path) ) try: artiq_datasets = artiq_connection.Client( ip, port, "master_dataset_db", timeout=10 ) dynaconf_values = list(artiq_datasets.get(k) for k in dynaconf_keys) finally: artiq_datasets.close_rpc() obj.update(dict(zip(dynaconf_keys, dynaconf_values))) else: # TODO if silent: obj.logger.debug("Key mapping file not found. Cannot load ARTIQ datasets") else: raise NotImplementedError( "Loading ARTIQ datasets without mapping file isn't handled" ) obj._loaded_files.append(filename) # pylint: disable=protected-access return
def parse_arguments(arguments): d = {} for argument in arguments: name, eq, value = argument.partition("=") d[name] = pyon.decode(value) return d
async def _handle_connection_cr(self, reader, writer): try: line = await reader.readline() if line != _init_string: return obj = { "targets": sorted(self.targets.keys()), "description": self.description } line = pyon.encode(obj) + "\n" writer.write(line.encode()) line = await reader.readline() if not line: return target_name = line.decode()[:-1] try: target = self.targets[target_name] except KeyError: return while True: line = await reader.readline() if not line: break obj = pyon.decode(line.decode()) try: if obj["action"] == "get_rpc_method_list": members = inspect.getmembers(target, inspect.ismethod) doc = { "docstring": inspect.getdoc(target), "methods": {} } for name, method in members: if name.startswith("_"): continue method = getattr(target, name) argspec = inspect.getfullargspec(method) doc["methods"][name] = (dict(argspec._asdict()), inspect.getdoc(method)) if self.builtin_terminate: doc["methods"]["terminate"] = ( { "args": ["self"], "defaults": None, "varargs": None, "varkw": None, "kwonlyargs": [], "kwonlydefaults": [], }, "Terminate the server.") obj = {"status": "ok", "ret": doc} elif obj["action"] == "call": logger.debug("calling %s", _PrettyPrintCall(obj)) if self.builtin_terminate and obj["name"] == "terminate": self._terminate_request.set() obj = {"status": "ok", "ret": None} else: method = getattr(target, obj["name"]) ret = method(*obj["args"], **obj["kwargs"]) if inspect.iscoroutine(ret): ret = await ret obj = {"status": "ok", "ret": ret} else: raise ValueError("Unknown action: {}" .format(obj["action"])) except Exception: obj = {"status": "failed", "message": traceback.format_exc()} line = pyon.encode(obj) + "\n" writer.write(line.encode()) except (ConnectionResetError, ConnectionAbortedError, BrokenPipeError): # May happens on Windows when client disconnects pass finally: writer.close()
def _action_set_dataset(remote, args): remote.set(args.name, pyon.decode(args.value), args.persist)
def _action_set_parameter(remote, args): remote.set(args.name, pyon.decode(args.value))
def process(self, x): return pyon.decode(x)
def test_encdec(self): for enc in pyon.encode, lambda x: pyon.encode(x, True): self.assertEqual(pyon.decode(enc(_pyon_test_object)), _pyon_test_object)
def get_object(): line = ipc.readline().decode() return pyon.decode(line)
def get_object(): line = sys.__stdin__.readline() return pyon.decode(line)
def _action_set_device(remote, args): remote.set(args.name, pyon.decode(args.description))
def _parse_arguments(arguments): d = {} for argument in arguments: name, value = argument.split("=") d[name] = pyon.decode(value) return d
def __recv(self): line = yield from self.__reader.readline() return pyon.decode(line.decode())