def default_mib(): scan_size = (32, 32) ds = MIBDataSet(path=MIB_TESTDATA_PATH, tileshape=(1, 3, 256, 256), scan_size=scan_size) ds = ds.initialize() return ds
def test_mib_dist(dist_ctx): scan_size = (32, 32) ds = MIBDataSet(path=MIB_TESTDATA_PATH, scan_size=scan_size) ds = ds.initialize(dist_ctx.executor) analysis = dist_ctx.create_sum_analysis(dataset=ds) results = dist_ctx.run(analysis) assert results[0].raw_data.shape == (256, 256)
def test_mib_dist(dist_ctx): scan_size = (32, 32) ds = MIBDataSet(path="/data/default.mib", tileshape=(1, 3, 256, 256), scan_size=scan_size) ds = ds.initialize(dist_ctx.executor) analysis = dist_ctx.create_sum_analysis(dataset=ds) results = dist_ctx.run(analysis) assert results[0].raw_data.shape == (256, 256)
def test_not_too_many_files(lt_ctx): ds = MIBDataSet(path=MIB_TESTDATA_PATH, scan_size=(32, 32)) with mock.patch( 'libertem.io.dataset.mib.glob', side_effect=lambda p: ["/a/%d.mib" % i for i in range(256)]): with pytest.warns(None) as record: ds._filenames() assert len(record) == 0
def test_too_many_files(lt_ctx): ds = MIBDataSet(path=MIB_TESTDATA_PATH, scan_size=(32, 32)) with mock.patch( 'libertem.io.dataset.mib.glob', side_effect=lambda p: ["/a/%d.mib" % i for i in range(256 * 256)]): with pytest.warns(RuntimeWarning) as record: ds._filenames() assert len(record) == 1 assert "Saving data in many small files" in record[0].message.args[0]
def test_read_at_boundaries(default_mib, lt_ctx): scan_size = (32, 32) ds_odd = MIBDataSet(path=MIB_TESTDATA_PATH, scan_size=scan_size) ds_odd = ds_odd.initialize(lt_ctx.executor) sumjob_odd = lt_ctx.create_sum_analysis(dataset=ds_odd) res_odd = lt_ctx.run(sumjob_odd) sumjob = lt_ctx.create_sum_analysis(dataset=default_mib) res = lt_ctx.run(sumjob) assert np.allclose(res[0].raw_data, res_odd[0].raw_data)
def test_detect(lt_ctx): params = MIBDataSet.detect_params(MIB_TESTDATA_PATH, lt_ctx.executor)["parameters"] assert params == { "path": MIB_TESTDATA_PATH, "nav_shape": (1024,), "sig_shape": (256, 256) }
def test_positive_sync_offset(default_mib, lt_ctx): udf = SumSigUDF() sync_offset = 2 ds_with_offset = MIBDataSet( path=MIB_TESTDATA_PATH, nav_shape=(32, 32), sync_offset=sync_offset ) ds_with_offset.set_num_cores(4) ds_with_offset = ds_with_offset.initialize(lt_ctx.executor) ds_with_offset.check_valid() p0 = next(ds_with_offset.get_partitions()) assert p0._start_frame == 2 assert p0.slice.origin == (0, 0, 0) tileshape = Shape( (16,) + tuple(ds_with_offset.shape.sig), sig_dims=ds_with_offset.shape.sig.dims ) tiling_scheme = TilingScheme.make_for_shape( tileshape=tileshape, dataset_shape=ds_with_offset.shape, ) t0 = next(p0.get_tiles(tiling_scheme)) assert tuple(t0.tile_slice.origin) == (0, 0, 0) for p in ds_with_offset.get_partitions(): for t in p.get_tiles(tiling_scheme=tiling_scheme): pass assert p.slice.origin == (768, 0, 0) assert p.slice.shape[0] == 256 result = lt_ctx.run_udf(dataset=default_mib, udf=udf) result = result['intensity'].raw_data[sync_offset:] result_with_offset = lt_ctx.run_udf(dataset=ds_with_offset, udf=udf) result_with_offset = result_with_offset['intensity'].raw_data[ :ds_with_offset._meta.image_count - sync_offset ] assert np.allclose(result, result_with_offset)
def test_too_many_frames(): """ mib files can contain more frames than the intended scanning dimensions """ # one full row of additional frames in the file scan_size = (31, 32) ds = MIBDataSet(path=MIB_TESTDATA_PATH, tileshape=(1, 3, 256, 256), scan_size=scan_size) ds = ds.initialize() ds.check_valid() for p in ds.get_partitions(): for t in p.get_tiles(): pass
def test_missing_frames(lt_ctx): """ there can be some frames missing at the end """ # one full row of additional frames in the data set than in the file scan_size = (33, 32) ds = MIBDataSet(path=MIB_TESTDATA_PATH, tileshape=(1, 3, 256, 256), scan_size=scan_size) ds = ds.initialize() ds.check_valid() for p in ds.get_partitions(): for t in p.get_tiles(): pass
def test_too_many_frames(lt_ctx): """ mib files can contain more frames than the intended scanning dimensions """ # one full row of additional frames in the file scan_size = (31, 32) ds = MIBDataSet(path=MIB_TESTDATA_PATH, scan_size=scan_size) ds = ds.initialize(lt_ctx.executor) ds.check_valid() tileshape = Shape((16, ) + tuple(ds.shape.sig), sig_dims=ds.shape.sig.dims) tiling_scheme = TilingScheme.make_for_shape( tileshape=tileshape, dataset_shape=ds.shape, ) for p in ds.get_partitions(): for t in p.get_tiles(tiling_scheme=tiling_scheme): pass
def test_missing_frames(lt_ctx): """ there can be some frames missing at the end """ # one full row of additional frames in the data set than in the file scan_size = (33, 32) ds = MIBDataSet(path=MIB_TESTDATA_PATH, scan_size=scan_size) ds = ds.initialize(lt_ctx.executor) ds.check_valid() tileshape = Shape((16, ) + tuple(ds.shape.sig), sig_dims=ds.shape.sig.dims) tiling_scheme = TilingScheme.make_for_shape( tileshape=tileshape, dataset_shape=ds.shape, ) for p in ds.get_partitions(): for t in p.get_tiles(tiling_scheme=tiling_scheme): pass
def open(self): ds = MIBDataSet(path=self._path, nav_shape=self._nav_shape) ds.initialize(MITExecutor()) print("dataset shape: %s" % (ds.shape, )) self._ds = ds self._warmup()
def test_detect(lt_ctx): params = MIBDataSet.detect_params(MIB_TESTDATA_PATH, lt_ctx.executor)["parameters"] assert params == { "path": MIB_TESTDATA_PATH, }
def default_mib(lt_ctx): scan_size = (32, 32) ds = MIBDataSet(path=MIB_TESTDATA_PATH, scan_size=scan_size) ds = ds.initialize(lt_ctx.executor) return ds
def test_detect(): params = MIBDataSet.detect_params(MIB_TESTDATA_PATH) assert params == {"path": MIB_TESTDATA_PATH, "tileshape": (1, 3, 256, 256)}
def test_detect(lt_ctx): params = MIBDataSet.detect_params(MIB_TESTDATA_PATH, lt_ctx.executor) assert params == {"path": MIB_TESTDATA_PATH, "tileshape": (1, 3, 256, 256)}