def test_harvest_cases_new(self, fn3_fba_runner, fn3_fba_ds, dict_cases): if dict_cases: cases = [{ 'a': 1, 'b': 3 }, { 'a': 1, 'b': 4 }, { 'a': 2, 'b': 3 }, { 'a': 2, 'b': 4 }] else: cases = [(1, 3), (1, 4), (2, 3), (2, 4)] with tempfile.TemporaryDirectory() as tmpdir: fl_pth = os.path.join(tmpdir, 'test.h5') h = Harvester(fn3_fba_runner, fl_pth) h.harvest_cases(cases) hds = load_ds(fl_pth) assert h.last_ds.identical(fn3_fba_ds) assert h.full_ds.identical(fn3_fba_ds) assert hds.identical(fn3_fba_ds)
def test_io_only_real(self, ds_real, engine_save, engine_load): with tempfile.TemporaryDirectory() as tmpdir: save_ds(ds_real, os.path.join(tmpdir, "test.h5"), engine=engine_save) ds2 = load_ds(os.path.join(tmpdir, "test.h5"), engine=engine_load) assert ds_real.equals(ds2)
def test_h5netcdf_dask(self, ds1): ds = ds1.chunk() with tempfile.TemporaryDirectory() as tmpdir: save_ds(ds, os.path.join(tmpdir, "test.h5"), engine='h5netcdf') ds2 = load_ds(os.path.join(tmpdir, "test.h5")) assert ds1.identical(ds2) ds2.close()
def test_dask_load(self, ds_real, engine_load): with tempfile.TemporaryDirectory() as tmpdir: save_ds(ds_real, os.path.join(tmpdir, "test.nc")) ds2 = load_ds(os.path.join(tmpdir, "test.nc"), engine=engine_load, chunks=1) assert ds2.chunks['b'] == (1, 1) assert ds_real.identical(ds2) ds2.close()
def test_harvest_combos_new(self, fn3_fba_runner, fn3_fba_ds): with tempfile.TemporaryDirectory() as tmpdir: fl_pth = os.path.join(tmpdir, 'test.h5') h = Harvester(fn3_fba_runner, fl_pth) h.harvest_combos((('a', (1, 2)), ('b', (3, 4)))) hds = load_ds(fl_pth) assert h.last_ds.identical(fn3_fba_ds) assert h.full_ds.identical(fn3_fba_ds) assert hds.identical(fn3_fba_ds)
def test_harvest_cases_merge(self, fn3_fba_runner, fn3_fba_ds): with tempfile.TemporaryDirectory() as tmpdir: fl_pth = os.path.join(tmpdir, 'test.h5') h = Harvester(fn3_fba_runner, fl_pth) h.harvest_cases([(1, 3), (2, 4)]) h.harvest_cases([(1, 4), (2, 3)]) hds = load_ds(fl_pth) assert not h.last_ds.identical(fn3_fba_ds) assert h.full_ds.identical(fn3_fba_ds) assert hds.identical(fn3_fba_ds)
def test_save_merge_ds(self, ds1, ds2, ds3): with tempfile.TemporaryDirectory() as tmpdir: fname = os.path.join(tmpdir, "test.h5") save_merge_ds(ds1, fname) save_merge_ds(ds2, fname) with raises(xr.MergeError): save_merge_ds(ds3, fname) save_merge_ds(ds3, fname, overwrite=True) exp = ds3.combine_first(xr.merge([ds1, ds2])) assert load_ds(fname).identical(exp)
def test_harvest_cases_new(self, fn3_fba_runner, fn3_fba_ds, dict_cases): if dict_cases: cases = [{'a': 1, 'b': 3}, {'a': 1, 'b': 4}, {'a': 2, 'b': 3}, {'a': 2, 'b': 4}] else: cases = [(1, 3), (1, 4), (2, 3), (2, 4)] with tempfile.TemporaryDirectory() as tmpdir: fl_pth = os.path.join(tmpdir, 'test.h5') h = Harvester(fn3_fba_runner, fl_pth) h.harvest_cases(cases) hds = load_ds(fl_pth) assert h.last_ds.identical(fn3_fba_ds) assert h.full_ds.identical(fn3_fba_ds) assert hds.identical(fn3_fba_ds)
def test_harvest_combos_new_sow_reap_separate(self, fn3_fba_runner, fn3_fba_ds): with tempfile.TemporaryDirectory() as tmpdir: fl_pth = os.path.join(tmpdir, 'test.h5') h = Harvester(fn3_fba_runner, fl_pth) crop = h.Crop(parent_dir=tmpdir, num_batches=2) crop.sow_combos((('a', (1, 2)), ('b', (3, 4)))) for i in [1, 2]: grow(i, crop) crop.reap() hds = load_ds(fl_pth) assert h.last_ds.identical(fn3_fba_ds) assert h.full_ds.identical(fn3_fba_ds) assert hds.identical(fn3_fba_ds)
def test_harvest_combos_new_sow_and_reap(self, fn3_fba_runner, fn3_fba_ds): with tempfile.TemporaryDirectory() as tmpdir: fl_pth = os.path.join(tmpdir, 'test.h5') h = Harvester(fn3_fba_runner, fl_pth) crop = h.Crop(parent_dir=tmpdir, num_batches=2) def concurrent_grow(): # wait for cases to be sown time.sleep(0.5) for i in [1, 2]: grow(i, crop) with ThreadPoolExecutor(1) as pool: pool.submit(concurrent_grow) crop.sow_combos((('a', (1, 2)), ('b', (3, 4)))) crop.reap(wait=True) hds = load_ds(fl_pth) assert h.last_ds.identical(fn3_fba_ds) assert h.full_ds.identical(fn3_fba_ds) assert hds.identical(fn3_fba_ds)
def test_io_complex_data(self, ds1, engine_save, engine_load): with tempfile.TemporaryDirectory() as tmpdir: save_ds(ds1, os.path.join(tmpdir, "test.h5"), engine=engine_save) ds2 = load_ds(os.path.join(tmpdir, "test.h5"), engine=engine_load) assert ds1.identical(ds2)