def test_positive_sync_offset(lt_ctx, raw_dataset_8x8x8x8, raw_data_8x8x8x8_path, io_backend): udf = SumSigUDF() sync_offset = 2 ds_with_offset = RawFileDataSet( path=raw_data_8x8x8x8_path, nav_shape=(8, 8), sig_shape=(8, 8), dtype="float32", enable_direct=False, sync_offset=sync_offset, io_backend=io_backend, ) ds_with_offset.set_num_cores(4) ds_with_offset = ds_with_offset.initialize(lt_ctx.executor) ds_with_offset.check_valid() p0 = next(ds_with_offset.get_partitions()) assert p0._start_frame == 2 assert p0.slice.origin == (0, 0, 0) tileshape = Shape((4, ) + tuple(ds_with_offset.shape.sig), sig_dims=ds_with_offset.shape.sig.dims) tiling_scheme = TilingScheme.make_for_shape( tileshape=tileshape, dataset_shape=ds_with_offset.shape, ) t0 = next(p0.get_tiles(tiling_scheme)) assert tuple(t0.tile_slice.origin) == (0, 0, 0) for p in ds_with_offset.get_partitions(): for t in p.get_tiles(tiling_scheme=tiling_scheme): pass assert p.slice.origin == (48, 0, 0) assert p.slice.shape[0] == 16 result = lt_ctx.run_udf(dataset=raw_dataset_8x8x8x8, udf=udf) result = result['intensity'].raw_data[sync_offset:] result_with_offset = lt_ctx.run_udf(dataset=ds_with_offset, udf=udf) result_with_offset = result_with_offset[ 'intensity'].raw_data[:ds_with_offset._meta.image_count - sync_offset] assert np.allclose(result, result_with_offset)
def test_missing_frames(lt_ctx, raw_data_8x8x8x8_path, io_backend): ds = RawFileDataSet( path=raw_data_8x8x8x8_path, nav_shape=(10, 8), sig_shape=(8, 8), dtype="float32", io_backend=io_backend, ) ds.set_num_cores(4) ds = ds.initialize(lt_ctx.executor) tileshape = Shape((4, ) + tuple(ds.shape.sig), sig_dims=ds.shape.sig.dims) tiling_scheme = TilingScheme.make_for_shape( tileshape=tileshape, dataset_shape=ds.shape, ) for p in ds.get_partitions(): for t in p.get_tiles(tiling_scheme=tiling_scheme): pass assert p._start_frame == 60 assert p._num_frames == 20 assert p.slice.origin == (60, 0, 0) assert p.slice.shape[0] == 20 assert t.tile_slice.origin == (60, 0, 0) assert t.tile_slice.shape[0] == 4
def large_raw(tmpdir_factory): datadir = tmpdir_factory.mktemp('data') filename = datadir + '/raw-test-large-sparse' shape = (100, 100, 1216, 1216) dtype = np.uint16 size = np.prod(np.int64(shape)) * np.dtype(dtype).itemsize if platform.system() == "Windows": os.system('FSUtil File CreateNew "%s" 0x%X' % (filename, size)) os.system('FSUtil Sparse SetFlag "%s"' % filename) os.system('FSUtil Sparse SetRange "%s" 0 0x%X' % (filename, size)) else: with open(filename, 'wb') as f: f.truncate(size) stat = os.stat(filename) assert stat.st_blocks == 0 ds = RawFileDataSet( path=str(filename), scan_size=shape[:2], dtype=dtype, detector_size=shape[2:], ) ds.set_num_cores(2) ds = ds.initialize(InlineJobExecutor()) yield ds
def test_positive_sync_offset_raw(lt_ctx, benchmark, raw_data_8x8x8x8_path): ds = RawFileDataSet(path=raw_data_8x8x8x8_path, nav_shape=(8, 8), sig_shape=(8, 8), dtype="float32", enable_direct=False, sync_offset=2) ds.set_num_cores(4) ds = ds.initialize(lt_ctx.executor) tileshape = Shape((4, ) + tuple(ds.shape.sig), sig_dims=ds.shape.sig.dims) tiling_scheme = TilingScheme.make_for_shape( tileshape=tileshape, dataset_shape=ds.shape, ) p0 = next(ds.get_partitions()) benchmark(get_first_tile, p0=p0, tiling_scheme=tiling_scheme)
def test_too_many_frames(lt_ctx, raw_data_8x8x8x8_path, io_backend): ds = RawFileDataSet( path=raw_data_8x8x8x8_path, nav_shape=(6, 8), sig_shape=(8, 8), dtype="float32", io_backend=io_backend, ) ds.set_num_cores(4) ds = ds.initialize(lt_ctx.executor) tileshape = Shape((4, ) + tuple(ds.shape.sig), sig_dims=ds.shape.sig.dims) tiling_scheme = TilingScheme.make_for_shape( tileshape=tileshape, dataset_shape=ds.shape, ) for p in ds.get_partitions(): for t in p.get_tiles(tiling_scheme=tiling_scheme): pass