def run(with_file=False): args = get_argparser(with_file).parse_args() init_logger(args) device_mgr = DeviceManager(DeviceDB(args.device_db), virtual_devices={"scheduler": DummyScheduler()}) dataset_db = DatasetDB(args.dataset_db) dataset_mgr = DatasetManager(dataset_db) try: exp_inst = _build_experiment(device_mgr, dataset_mgr, args) exp_inst.prepare() exp_inst.run() exp_inst.analyze() except CompileError as error: return except Exception as exn: if hasattr(exn, "artiq_core_exception"): print(exn.artiq_core_exception, file=sys.stderr) raise exn finally: device_mgr.close_devices() if args.hdf5 is not None: with h5py.File(args.hdf5, "w") as f: dataset_mgr.write_hdf5(f) else: for k, v in sorted(dataset_mgr.local.items(), key=itemgetter(0)): print("{}: {}".format(k, v)) dataset_db.save()
def run(with_file=False): args = get_argparser(with_file).parse_args() init_logger(args) device_mgr = DeviceManager(DeviceDB(args.device_db), virtual_devices={"scheduler": DummyScheduler()}) dataset_db = DatasetDB(args.dataset_db) dataset_mgr = DatasetManager(dataset_db) try: exp_inst = _build_experiment(device_mgr, dataset_mgr, args) exp_inst.prepare() exp_inst.run() exp_inst.analyze() finally: device_mgr.close_devices() if args.hdf5 is not None: with h5py.File(args.hdf5, "w") as f: dataset_mgr.write_hdf5(f) else: for k, v in sorted(dataset_mgr.local.items(), key=itemgetter(0)): print("{}: {}".format(k, v)) dataset_db.save()
class TestDatasetDB(unittest.TestCase): def setUp(self): # empty dataset persistance file self.persist_file = tempfile.NamedTemporaryFile(mode="w+") print("{}", file=self.persist_file, flush=True) self.ddb = DatasetDB(self.persist_file.name) self.ddb.set(KEY1, DATA, persist=True) self.ddb.set(KEY2, DATA, persist=True, hdf5_options=dict(compression=COMP)) self.ddb.set(KEY3, DATA, hdf5_options=dict(shuffle=True)) self.save_ddb_to_disk() def save_ddb_to_disk(self): self.ddb.save() self.persist_file.flush() def load_ddb_from_disk(self): return pyon.load_file(self.persist_file.name) def test_persist_format(self): data = pyon.load_file(self.persist_file.name) for key in [KEY1, KEY2]: self.assertTrue(data[key]["persist"]) self.assertEqual(data[key]["value"], DATA) self.assertEqual(data[KEY2]["hdf5_options"]["compression"], COMP) self.assertEqual(data[KEY1]["hdf5_options"], dict()) def test_only_persist_marked_datasets(self): data = self.load_ddb_from_disk() with self.assertRaises(KeyError): data[KEY3] def test_memory_format(self): ds = self.ddb.get(KEY2) self.assertTrue(ds["persist"]) self.assertEqual(ds["value"], DATA) self.assertEqual(ds["hdf5_options"]["compression"], COMP) ds = self.ddb.get(KEY3) self.assertFalse(ds["persist"]) self.assertEqual(ds["value"], DATA) self.assertTrue(ds["hdf5_options"]["shuffle"]) def test_delete(self): self.ddb.delete(KEY1) self.save_ddb_to_disk() data = self.load_ddb_from_disk() with self.assertRaises(KeyError): data[KEY1] self.assertTrue(data[KEY2]["persist"]) def test_update(self): self.assertFalse(self.ddb.get(KEY3)["persist"]) mod = { "action": "setitem", "path": [KEY3], "key": "persist", "value": True, } self.ddb.update(mod) self.assertTrue(self.ddb.get(KEY3)["persist"]) def test_update_hdf5_options(self): with self.assertRaises(KeyError): self.ddb.get(KEY1)["hdf5_options"]["shuffle"] mod = { "action": "setitem", "path": [KEY1, "hdf5_options"], "key": "shuffle", "value": False, } self.ddb.update(mod) self.assertFalse(self.ddb.get(KEY1)["hdf5_options"]["shuffle"]) def test_reset_copies_persist(self): self.assertTrue(self.ddb.get(KEY1)["persist"]) self.ddb.set(KEY1, DATA) self.assertTrue(self.ddb.get(KEY1)["persist"]) self.assertFalse(self.ddb.get(KEY3)["persist"]) self.ddb.set(KEY3, DATA) self.assertFalse(self.ddb.get(KEY3)["persist"]) self.ddb.set(KEY3, DATA, persist=True) self.assertTrue(self.ddb.get(KEY3)["persist"])