def test_encdec(self): for enc in pyon.encode, lambda x: pyon.encode(x, True): with self.subTest(enc=enc): self.assertEqual(pyon.decode(enc(_pyon_test_object)), _pyon_test_object) # NaNs don't compare equal, so test separately. assert np.isnan(pyon.decode(enc(np.nan)))
def _action_set_dataset(remote, args): persist = None if args.persist: persist = True if args.no_persist: persist = False remote.set(args.name, pyon.decode(args.value), persist)
async def load_hdf5_task(self, filename=None): if filename is None: if self._area.dataset is None: return filename = self._area.dataset try: with h5py.File(filename, "r") as f: expid = f["expid"][()] expid = pyon.decode(expid) arguments = expid["arguments"] except: logger.error("Could not retrieve expid from HDF5 file", exc_info=True) return try: self.log_level.setCurrentIndex( log_levels.index(log_level_to_name(expid["log_level"]))) except: logger.error("Could not set submission options from HDF5 expid", exc_info=True) return await self._recompute_arguments(arguments)
async def _load_hdf5_task(self): try: filename = await get_open_file_name( self.manager.main_window, "Load HDF5", self.hdf5_load_directory, "HDF5 files (*.h5 *.hdf5);;All files (*.*)") except asyncio.CancelledError: return self.hdf5_load_directory = os.path.dirname(filename) try: with h5py.File(filename, "r") as f: expid = f["expid"][()] expid = pyon.decode(expid) arguments = expid["arguments"] except: logger.error("Could not retrieve expid from HDF5 file", exc_info=True) return try: self.log_level.setCurrentIndex( log_levels.index(log_level_to_name(expid["log_level"]))) if ("repo_rev" in expid and expid["repo_rev"] != "N/A" and hasattr(self, "repo_rev")): self.repo_rev.setText(expid["repo_rev"]) except: logger.error("Could not set submission options from HDF5 expid", exc_info=True) return await self._recompute_arguments_task(arguments)
async def _receive_cr(self): try: target = None while True: line = await self.reader.readline() if not line: return mod = pyon.decode(line.decode()) if mod["action"] == "init": target = self.target_builder(mod["struct"]) else: process_mod(target, mod) try: for notify_cb in self.notify_cbs: notify_cb(mod) except: logger.error("Exception in notifier callback", exc_info=True) break except ConnectionError: pass finally: if self.disconnect_cb is not None: self.disconnect_cb()
def test_rpc_encode_function(self): """Test that `pc_rpc` can encode a function properly. Used in `get_rpc_method_list` part of :meth:`sipyco.pc_rpc.Server._process_action` """ def _annotated_function(arg1: str, arg2: np.ndarray = np.array([ 1, ])) -> np.ndarray: """Sample docstring.""" return arg1 argspec_documented, docstring = pc_rpc.Server._document_function( _annotated_function) self.assertEqual(docstring, "Sample docstring.") # purposefully ignore how argspec["annotations"] is treated. # allows option to change PYON later to encode annotations. argspec_master = dict( inspect.getfullargspec(_annotated_function)._asdict()) argspec_without_annotation = argspec_master.copy() del argspec_without_annotation["annotations"] # check if all items (excluding annotations) are same in both dictionaries self.assertLessEqual(argspec_without_annotation.items(), argspec_documented.items()) self.assertDictEqual(argspec_documented, pyon.decode(pyon.encode(argspec_documented)))
def __recv(self): buf = self.__socket.recv(4096).decode() while "\n" not in buf: more = self.__socket.recv(4096) if not more: break buf += more.decode() return pyon.decode(buf)
def accept(self): key = self.name_widget.text() value = self.value_widget.text() persist = self.box_widget.isChecked() asyncio.ensure_future( exc_to_warning( self.dataset_ctl.set(key, pyon.decode(value), persist))) QtWidgets.QDialog.accept(self)
def test_encdec_array(self): orig = { k: (np.array(v), np.array([v])) for k, v in _pyon_test_object.items() if np.isscalar(v) } for enc in pyon.encode, lambda x: pyon.encode(x, True): result = pyon.decode(enc(orig)) for k in orig: with self.subTest(enc=enc, k=k, v=orig[k]): np.testing.assert_equal(result[k], orig[k])
def extract_param_schema(arguments: Dict[str, Any]) -> Dict[str, Any]: """Extract ndscan parameter data from the given ARTIQ arguments directory. :param arguments: The arguments for an ARTIQ experiment, as e.g. obtained using ``oitg.results.load_hdf5_file(…)["expid"]["arguments"]``. """ try: string = arguments[PARAMS_ARG_KEY] except KeyError: raise KeyError(f"ndscan argument ({PARAMS_ARG_KEY}) not found") return pyon.decode(string)
def dtype(self): txt = self.value_widget.text() try: result = pyon.decode(txt) except: pixmap = self.style().standardPixmap( QtWidgets.QStyle.SP_MessageBoxWarning) self.data_type.setPixmap(pixmap) self.ok.setEnabled(False) else: self.data_type.setText(type(result).__name__) self.ok.setEnabled(True)
async def _receive_cr(self): try: target = None while True: line = await self.reader.readline() if not line: return obj = pyon.decode(line.decode()) for notify_cb in self.notify_cbs: notify_cb(obj) finally: if self.disconnect_cb is not None: self.disconnect_cb()
def data(self, idx, role): if role == QtCore.Qt.ToolTipRole: info = self.fileInfo(idx) h5 = open_h5(info) if h5 is not None: try: expid = pyon.decode(h5["expid"][()]) start_time = datetime.fromtimestamp(h5["start_time"][()]) v = ("artiq_version: {}\nrepo_rev: {}\nfile: {}\n" "class_name: {}\nrid: {}\nstart_time: {}").format( h5["artiq_version"][()], expid["repo_rev"], expid.get("file", "<none>"), expid["class_name"], h5["rid"][()], start_time) return v except: logger.warning("unable to read metadata from %s", info.filePath(), exc_info=True) return QtWidgets.QFileSystemModel.data(self, idx, role)
def list_current_changed(self, current, previous): info = self.model.fileInfo(current) f = open_h5(info) if not f: return logger.debug("loading datasets from %s", info.filePath()) with f: try: expid = pyon.decode(f["expid"][()]) start_time = datetime.fromtimestamp(f["start_time"][()]) v = { "artiq_version": f["artiq_version"][()], "repo_rev": expid["repo_rev"], "file": expid.get("file", "<none>"), "class_name": expid["class_name"], "rid": f["rid"][()], "start_time": start_time, } self.metadata_changed.emit(v) except: logger.warning("unable to read metadata from %s", info.filePath(), exc_info=True) rd = {} if "archive" in f: def visitor(k, v): if isinstance(v, h5py.Dataset): rd[k] = (True, v[()]) f["archive"].visititems(visitor) if "datasets" in f: def visitor(k, v): if isinstance(v, h5py.Dataset): if k in rd: logger.warning("dataset '%s' is both in archive " "and outputs", k) rd[k] = (True, v[()]) f["datasets"].visititems(visitor) self.datasets.init(rd) self.dataset_changed.emit(info.filePath())
async def _handle_connection_cr(self, reader, writer): try: line = await reader.readline() if line != _init_string: return obj = { "targets": sorted(self.targets.keys()), "description": self.description } line = pyon.encode(obj) + "\n" writer.write(line.encode()) line = await reader.readline() if not line: return target_name = line.decode()[:-1] try: target = self.targets[target_name] except KeyError: return if callable(target): target = target() valid_methods = inspect.getmembers(target, inspect.ismethod) valid_methods = {m[0] for m in valid_methods} if self.builtin_terminate: valid_methods.add("terminate") writer.write((pyon.encode(valid_methods) + "\n").encode()) while True: line = await reader.readline() if not line: break reply = await self._process_action(target, pyon.decode(line.decode())) writer.write((pyon.encode(reply) + "\n").encode()) except (ConnectionResetError, ConnectionAbortedError, BrokenPipeError): # May happens on Windows when client disconnects pass finally: writer.close()
def _try_extract_ndscan_params( arguments: Dict[str, Any]) -> Tuple[Optional[Dict[str, Any]], Dict[str, Any]]: """From a passed dictionary of upstream ARTIQ arguments, extracts the ndscan arguments, if there are any. :return: A tuple of the (decoded) ndscan parameter schema (``None`` if there wasn't one), and the remaining dictionary with that argument (if any) removed. """ if not arguments: return None, arguments arg = arguments.get(PARAMS_ARG_KEY, None) if not arg: return None, arguments if arg["desc"]["ty"] != "PYONValue": return None, arguments state = arg.get("state", None) params = pyon.decode(state if state else arg["desc"]["default"]) vanilla_args = arguments.copy() del vanilla_args[PARAMS_ARG_KEY] return params, vanilla_args
async def _recv(self, timeout): assert self.io_lock.locked() fs = await asyncio_wait_or_cancel( [self.ipc.readline(), self.closed.wait()], timeout=timeout, return_when=asyncio.FIRST_COMPLETED) if all(f.cancelled() for f in fs): raise WorkerTimeout( "Timeout receiving data from worker (RID {})".format(self.rid)) if self.closed.is_set(): raise WorkerError( "Receiving data from worker cancelled (RID {})".format( self.rid)) line = fs[0].result() if not line: raise WorkerError( "Worker ended while attempting to receive data (RID {})". format(self.rid)) try: obj = pyon.decode(line.decode()) except: raise WorkerError("Worker sent invalid PYON data (RID {})".format( self.rid)) return obj
def list_current_changed(self, current, previous): info = self.model.fileInfo(current) f = open_h5(info) if not f: return logger.debug("loading datasets from %s", info.filePath()) with f: try: expid = pyon.decode(f["expid"].value) start_time = datetime.fromtimestamp(f["start_time"].value) v = { "artiq_version": f["artiq_version"].value, "repo_rev": expid["repo_rev"], "file": expid["file"], "class_name": expid["class_name"], "rid": f["rid"].value, "start_time": start_time, } self.metadata_changed.emit(v) except: logger.warning("unable to read metadata from %s", info.filePath(), exc_info=True) rd = dict() if "archive" in f: rd = {k: (True, v.value) for k, v in f["archive"].items()} if "datasets" in f: for k, v in f["datasets"].items(): if k in rd: logger.warning( "dataset '%s' is both in archive and " "outputs", k) rd[k] = (True, v.value) if rd: self.datasets.init(rd) self.dataset_changed.emit(info.filePath())
async def read_pyon(self): line = await self.readline() return pyon.decode(line.decode())
def process(self, x): return pyon.decode(x)
def main(): args = get_argparser().parse_args() app = QtWidgets.QApplication(sys.argv) loop = QEventLoop(app) asyncio.set_event_loop(loop) magic_spec = results.parse_magic(args.path) if magic_spec is not None: paths = results.find_results(day="auto", **magic_spec) if len(paths) != 1: QtWidgets.QMessageBox.critical( None, "Unable to resolve experiment path", f"Could not resolve '{args.path}: {paths}'") sys.exit(1) path = next(iter(paths.values())).path else: path = args.path try: file = h5py.File(path, "r") except Exception as e: QtWidgets.QMessageBox.critical(None, "Unable to load file", str(e)) sys.exit(1) try: datasets = file["datasets"] except KeyError: QtWidgets.QMessageBox.critical( None, "Not an ARTIQ results file", "No ARTIQ dataset records found in file: '{}'".format(args.path)) sys.exit(1) prefix = fetch_explicit_prefix(args) if prefix is not None: try: # 'axes' existed since the earliest schema revisions, so we can use it to # detect whether the file/prefix the user specified vaguely looks like it # has been generated by ndscan. datasets[prefix + "axes"][()] except KeyError: QtWidgets.QMessageBox.critical( None, "Not an ndscan file", "Datasets '{}*' in file '{}' do not look like ndscan results.". format(prefix, args.path)) sys.exit(1) prefixes = [prefix] else: prefixes = find_ndscan_roots(datasets) if not prefixes: QtWidgets.QMessageBox.critical( None, "Not an ndscan file", "No ndscan result datasets found in file: '{}'".format( args.path)) sys.exit(1) try: schema = extract_param_schema( pyon.decode(file["expid"][()])["arguments"]) except Exception as e: print("No ndscan parameter arguments found:", e) schema = None if schema is not None: print("Scan settings") print("=============") print() for s in dump_scan(schema): print(s) print() print() print("Overrides") print("=========") print() for s in dump_overrides(schema): print(s) print() try: context = Context() context.set_title(os.path.basename(args.path)) # Take source_id from first prefix. This is pretty arbitrary, but for # experiment-generated files, they will all be the same anyway. if (prefixes[0] + "source_id") in datasets: source = datasets[prefixes[0] + "source_id"][()] if isinstance(source, bytes): # h5py 3+ – can use datasets[…].asstr() as soon as we don't support # version 2 any longer. source = source.decode("utf-8") context.set_source_id(source) else: # Old ndscan versions had a rid dataset instead of source_id. context.set_source_id("rid_{}".format(datasets[prefixes[0] + "rid"][()])) roots = [HDF5Root(datasets, p, context) for p in prefixes] except Exception as e: QtWidgets.QMessageBox.critical( None, "Error parsing ndscan file", "Error parsing datasets in '{}': {}".format(args.path, e)) sys.exit(2) if len(roots) == 1: widget = PlotContainerWidget(roots[0].get_model()) else: label_map = shorten_to_unambiguous_suffixes( prefixes, lambda fqn, n: ".".join(fqn.split(".")[-(n + 1):])) widget = MultiRootWidget( OrderedDict( zip((strip_suffix(label_map[p], ".") for p in prefixes), roots)), context) widget.setWindowTitle(f"{context.get_title()} – ndscan.show") widget.show() widget.resize(800, 600) sys.exit(app.exec_())
def parse_arguments(arguments): d = {} for argument in arguments: name, eq, value = argument.partition("=") d[name] = pyon.decode(value) return d
def get_object(): line = ipc.readline().decode() return pyon.decode(line)
def test_encdec_array_order(self): """Test encoding of non c-contiguous arrays (see #5)""" array = np.reshape(np.arange(6), (2, 3), order='F') np.testing.assert_array_equal(array, pyon.decode(pyon.encode(array)))
async def __recv(self): line = await self.__reader.readline() return pyon.decode(line.decode())
def _parse_list_pyon(values: str) -> List[float]: return pyon.decode("[" + values + "]")