Exemplo n.º 1
0
def main():
    ctx = api.Context(executor=InlineJobExecutor())

    ds = RawFileDataSet(path="/home/clausen/Data/EMPAD/scan_11_x256_y256.raw",
                        scan_size=(256, 256),
                        detector_size_raw=(130, 128),
                        crop_detector_to=(128, 128),
                        tileshape=(1, 8, 128, 128),
                        dtype="float32")
    ds.initialize()

    job = ctx.create_mask_analysis(dataset=ds,
                                   factories=[lambda: np.ones(ds.shape.sig)])

    result = ctx.run(job)
Exemplo n.º 2
0
def test_missing_frames(lt_ctx, raw_data_8x8x8x8_path, io_backend):
    ds = RawFileDataSet(
        path=raw_data_8x8x8x8_path,
        nav_shape=(10, 8),
        sig_shape=(8, 8),
        dtype="float32",
        io_backend=io_backend,
    )
    ds.set_num_cores(4)
    ds = ds.initialize(lt_ctx.executor)

    tileshape = Shape((4, ) + tuple(ds.shape.sig), sig_dims=ds.shape.sig.dims)
    tiling_scheme = TilingScheme.make_for_shape(
        tileshape=tileshape,
        dataset_shape=ds.shape,
    )

    for p in ds.get_partitions():
        for t in p.get_tiles(tiling_scheme=tiling_scheme):
            pass

    assert p._start_frame == 60
    assert p._num_frames == 20
    assert p.slice.origin == (60, 0, 0)
    assert p.slice.shape[0] == 20
    assert t.tile_slice.origin == (60, 0, 0)
    assert t.tile_slice.shape[0] == 4
Exemplo n.º 3
0
def medium_raw_float32(medium_raw_file_float32):
    filename, shape, dtype = medium_raw_file_float32
    ds = RawFileDataSet(path=str(filename),
                        nav_shape=shape[:2],
                        dtype=dtype,
                        sig_shape=shape[2:],
                        io_backend=MMapBackend())
    ds = ds.initialize(InlineJobExecutor())
    yield ds
Exemplo n.º 4
0
def large_raw(large_raw_file):
    filename, shape, dtype = large_raw_file
    ds = RawFileDataSet(
        path=str(filename),
        nav_shape=shape[:2],
        dtype=dtype,
        sig_shape=shape[2:],
    )
    ds = ds.initialize(InlineJobExecutor())
    yield ds
Exemplo n.º 5
0
def large_raw(large_raw_file):
    filename, shape, dtype = large_raw_file
    ds = RawFileDataSet(
        path=str(filename),
        scan_size=shape[:2],
        dtype=dtype,
        detector_size=shape[2:],
    )
    ds.set_num_cores(2)
    ds = ds.initialize(InlineJobExecutor())
    yield ds
Exemplo n.º 6
0
def raw_dataset_8x8x8x8(lt_ctx, raw_data_8x8x8x8_path):
    ds = RawFileDataSet(
        path=raw_data_8x8x8x8_path,
        nav_shape=(8, 8),
        sig_shape=(8, 8),
        dtype="float32",
    )
    ds.set_num_cores(4)
    ds = ds.initialize(lt_ctx.executor)

    return ds
Exemplo n.º 7
0
def raw_same_dataset_4d(tmpdir_factory, hdf5_4d_data):
    datadir = tmpdir_factory.mktemp('data')
    filename = datadir + '/raw-same-data-4d'
    hdf5_4d_data.tofile(str(filename))
    ds = RawFileDataSet(
        path=str(filename),
        nav_shape=(2, 10),
        dtype="float32",
        sig_shape=(26, 26),
    )
    ds.set_num_cores(4)
    ds = ds.initialize(InlineJobExecutor())
    yield ds
Exemplo n.º 8
0
def uint16_raw(tmpdir_factory):
    datadir = tmpdir_factory.mktemp('data')
    filename = datadir + '/raw-test-default'
    data = utils._mk_random(size=(16, 16, 128, 128), dtype='uint16')
    data.tofile(str(filename))
    del data
    ds = RawFileDataSet(
        path=str(filename),
        scan_size=(16, 16),
        dtype="uint16",
        detector_size=(128, 128),
    )
    ds = ds.initialize(InlineJobExecutor())
    yield ds
Exemplo n.º 9
0
def default_raw(tmpdir_factory, default_raw_data):
    datadir = tmpdir_factory.mktemp('data')
    filename = datadir + '/raw-test-default'
    default_raw_data.tofile(str(filename))
    del default_raw_data
    ds = RawFileDataSet(
        path=str(filename),
        scan_size=(16, 16),
        dtype="float32",
        detector_size=(128, 128),
    )
    ds.set_num_cores(2)
    ds = ds.initialize(InlineJobExecutor())
    yield ds
Exemplo n.º 10
0
def default_raw(tmpdir_factory):
    datadir = tmpdir_factory.mktemp('data')
    filename = datadir + '/raw-test-default'
    data = _mk_random(size=(16, 16, 128, 128), dtype='float32')
    data.tofile(str(filename))
    del data
    ds = RawFileDataSet(
        path=str(filename),
        scan_size=(16, 16),
        dtype="float32",
        detector_size=(128, 128),
    )
    ds = ds.initialize()
    yield ds
Exemplo n.º 11
0
def raw_with_zeros(tmpdir_factory):
    datadir = tmpdir_factory.mktemp('data')
    filename = datadir + '/raw-with-zeros'
    data = np.zeros((16, 16, 128, 128), dtype='float32')
    data.tofile(str(filename))
    del data
    ds = RawFileDataSet(
        path=str(filename),
        scan_size=(16, 16),
        dtype="float32",
        detector_size=(128, 128),
    )
    ds.set_num_cores(2)
    ds = ds.initialize(InlineJobExecutor())
    yield ds
Exemplo n.º 12
0
def big_endian_raw(tmpdir_factory):
    datadir = tmpdir_factory.mktemp('data')
    filename = datadir + '/raw-test-default-big-endian'
    data = utils._mk_random(size=(16, 16, 128, 128), dtype='>u2')
    data.tofile(str(filename))
    del data
    ds = RawFileDataSet(
        path=str(filename),
        nav_shape=(16, 16),
        dtype=">u2",
        sig_shape=(128, 128),
    )
    ds.set_num_cores(2)
    ds = ds.initialize(InlineJobExecutor())
    yield ds
Exemplo n.º 13
0
def test_negative_sync_offset(lt_ctx, raw_dataset_8x8x8x8,
                              raw_data_8x8x8x8_path, io_backend):
    udf = SumSigUDF()
    sync_offset = -2

    ds_with_offset = RawFileDataSet(
        path=raw_data_8x8x8x8_path,
        nav_shape=(8, 8),
        sig_shape=(8, 8),
        dtype="float32",
        enable_direct=False,
        sync_offset=sync_offset,
        io_backend=io_backend,
    )
    ds_with_offset.set_num_cores(4)
    ds_with_offset = ds_with_offset.initialize(lt_ctx.executor)
    ds_with_offset.check_valid()

    p0 = next(ds_with_offset.get_partitions())
    assert p0._start_frame == -2
    assert p0.slice.origin == (0, 0, 0)

    tileshape = Shape((4, ) + tuple(ds_with_offset.shape.sig),
                      sig_dims=ds_with_offset.shape.sig.dims)
    tiling_scheme = TilingScheme.make_for_shape(
        tileshape=tileshape,
        dataset_shape=ds_with_offset.shape,
    )

    t0 = next(p0.get_tiles(tiling_scheme))
    assert tuple(t0.tile_slice.origin) == (2, 0, 0)

    for p in ds_with_offset.get_partitions():
        for t in p.get_tiles(tiling_scheme=tiling_scheme):
            pass

    assert p.slice.origin == (48, 0, 0)
    assert p.slice.shape[0] == 16

    result = lt_ctx.run_udf(dataset=raw_dataset_8x8x8x8, udf=udf)
    result = result['intensity'].raw_data[:raw_dataset_8x8x8x8._meta.
                                          image_count - abs(sync_offset)]

    result_with_offset = lt_ctx.run_udf(dataset=ds_with_offset, udf=udf)
    result_with_offset = result_with_offset['intensity'].raw_data[
        abs(sync_offset):]

    assert np.allclose(result, result_with_offset)
Exemplo n.º 14
0
def test_positive_sync_offset_raw(lt_ctx, benchmark, raw_data_8x8x8x8_path):
    ds = RawFileDataSet(path=raw_data_8x8x8x8_path,
                        nav_shape=(8, 8),
                        sig_shape=(8, 8),
                        dtype="float32",
                        enable_direct=False,
                        sync_offset=2)
    ds.set_num_cores(4)
    ds = ds.initialize(lt_ctx.executor)

    tileshape = Shape((4, ) + tuple(ds.shape.sig), sig_dims=ds.shape.sig.dims)
    tiling_scheme = TilingScheme.make_for_shape(
        tileshape=tileshape,
        dataset_shape=ds.shape,
    )

    p0 = next(ds.get_partitions())
    benchmark(get_first_tile, p0=p0, tiling_scheme=tiling_scheme)
Exemplo n.º 15
0
def test_too_many_frames(lt_ctx, raw_data_8x8x8x8_path, io_backend):
    ds = RawFileDataSet(
        path=raw_data_8x8x8x8_path,
        nav_shape=(6, 8),
        sig_shape=(8, 8),
        dtype="float32",
        io_backend=io_backend,
    )
    ds.set_num_cores(4)
    ds = ds.initialize(lt_ctx.executor)

    tileshape = Shape((4, ) + tuple(ds.shape.sig), sig_dims=ds.shape.sig.dims)
    tiling_scheme = TilingScheme.make_for_shape(
        tileshape=tileshape,
        dataset_shape=ds.shape,
    )

    for p in ds.get_partitions():
        for t in p.get_tiles(tiling_scheme=tiling_scheme):
            pass
Exemplo n.º 16
0
def large_raw(tmpdir_factory):
    datadir = tmpdir_factory.mktemp('data')
    filename = datadir + '/raw-test-large-sparse'
    shape = (100, 100, 1216, 1216)
    dtype = np.uint16
    size = np.prod(np.int64(shape)) * np.dtype(dtype).itemsize
    if platform.system() == "Windows":
        os.system('FSUtil File CreateNew "%s" 0x%X' % (filename, size))
        os.system('FSUtil Sparse SetFlag "%s"' % filename)
        os.system('FSUtil Sparse SetRange "%s" 0 0x%X' % (filename, size))
    else:
        with open(filename, 'wb') as f:
            f.truncate(size)
        stat = os.stat(filename)
        assert stat.st_blocks == 0
    ds = RawFileDataSet(
        path=str(filename),
        scan_size=shape[:2],
        dtype=dtype,
        detector_size=shape[2:],
    )
    ds.set_num_cores(2)
    ds = ds.initialize(InlineJobExecutor())
    yield ds