コード例 #1
0
def test_downsample_multi_channel():
    offset = (0, 0, 0)
    num_channels = 3
    size = (32, 32, 10)
    source_data = (128 * np.random.randn(num_channels, size[0], size[1],
                                         size[2])).astype("uint8")
    file_len = 32

    source_info = WkwDatasetInfo(
        "testoutput/multi-channel-test",
        "color",
        1,
        wkw.Header(np.uint8, num_channels, file_len=file_len),
    )
    target_info = WkwDatasetInfo(
        "testoutput/multi-channel-test",
        "color",
        2,
        wkw.Header(np.uint8, file_len=file_len),
    )
    try:
        shutil.rmtree(source_info.dataset_path)
        shutil.rmtree(target_info.dataset_path)
    except:
        pass

    with open_wkw(source_info) as wkw_dataset:
        print("writing source_data shape", source_data.shape)
        wkw_dataset.write(offset, source_data)
    assert np.any(source_data != 0)

    downsample_args = (
        source_info,
        target_info,
        (2, 2, 2),
        InterpolationModes.MAX,
        tuple(a * WKW_CUBE_SIZE for a in offset),
        CUBE_EDGE_LEN,
        False,
        True,
    )
    downsample_cube_job(downsample_args)

    channels = []
    for channel_index in range(num_channels):
        channels.append(
            downsample_cube(source_data[channel_index], (2, 2, 2),
                            InterpolationModes.MAX))
    joined_buffer = np.stack(channels)

    target_buffer = read_wkw(
        target_info,
        tuple(a * WKW_CUBE_SIZE for a in offset),
        list(map(lambda x: x // 2, size)),
    )
    assert np.any(target_buffer != 0)

    assert np.all(target_buffer == joined_buffer)
コード例 #2
0
def test_buffered_slice_writer() -> None:
    test_img = np.arange(24 * 24).reshape(24, 24).astype(np.uint16) + 1
    dtype = test_img.dtype
    origin = Vec3Int.zeros()
    layer_name = "color"
    mag = Mag(1)
    dataset_dir = TESTOUTPUT_DIR / "buffered_slice_writer"
    dataset_path = str(dataset_dir / layer_name / mag.to_layer_name())

    rmtree(dataset_dir)
    ds = Dataset(dataset_dir, voxel_size=(1, 1, 1))
    mag_view = ds.add_layer("color", COLOR_CATEGORY,
                            dtype_per_channel=dtype).add_mag(mag)

    with mag_view.get_buffered_slice_writer(absolute_offset=origin) as writer:
        for i in range(13):
            writer.send(test_img)
        with wkw.Dataset.open(dataset_path, wkw.Header(dtype)) as data:
            try:
                read_data = data.read(origin, (24, 24, 13))
                if read_data[read_data.nonzero()].size != 0:
                    raise AssertionError(
                        "Nothing should be written on the disk. But found data with shape: {}"
                        .format(read_data.shape))
            except wkw.wkw.WKWException:
                pass

        for i in range(13, 32):
            writer.send(test_img)
        with wkw.Dataset.open(dataset_path, wkw.Header(dtype)) as data:
            read_data = data.read(origin, (24, 24, 32))
            assert np.squeeze(read_data).shape == (24, 24, 32), (
                "The read data should have the shape: (24, 24, 32) "
                "but has a shape of: {}".format(np.squeeze(read_data).shape))
            assert read_data.size == read_data[read_data.nonzero()].size, (
                "The read data contains zeros while the "
                "written image has no zeros")

        for i in range(32, 35):
            writer.send(test_img)

    with wkw.Dataset.open(dataset_path, wkw.Header(dtype)) as data:
        read_data = data.read(origin, (24, 24, 35))
        read_data = np.squeeze(read_data)
        assert read_data.shape == (24, 24, 35), (
            "The read data should have the shape: (24, 24, 35) "
            "but has a shape of: {}".format(np.squeeze(read_data).shape))
        assert read_data.size == read_data[read_data.nonzero()].size, (
            "The read data contains zeros while the "
            "written image has no zeros")
        test_img_3d = np.zeros((test_img.shape[0], test_img.shape[1], 35))
        for i in np.arange(35):
            test_img_3d[:, :, i] = test_img
        # check if the data are correct
        assert np.array_equal(
            test_img_3d, read_data), ("The data from the disk is not the same "
                                      "as the data that should be written.")
コード例 #3
0
def test_buffered_slice_writer():
    test_img = np.arange(24 * 24).reshape(24, 24).astype(np.uint16) + 1
    dtype = test_img.dtype
    bbox = {"topleft": (0, 0, 0), "size": (24, 24, 35)}
    origin = [0, 0, 0]
    dataset_dir = "testoutput/buffered_slice_writer"
    layer_name = "color"
    mag = Mag(1)
    dataset_path = os.path.join(dataset_dir, layer_name, mag.to_layer_name())

    with BufferedSliceWriter(dataset_dir, layer_name, dtype, origin,
                             mag=mag) as writer:
        for i in range(13):
            writer.write_slice(i, test_img)
        with wkw.Dataset.open(dataset_path, wkw.Header(dtype)) as data:
            try:
                read_data = data.read(origin, (24, 24, 13))
                if read_data[read_data.nonzero()].size != 0:
                    raise AssertionError(
                        "Nothing should be written on the disk. But found data with shape: {}"
                        .format(read_data.shape))
            except wkw.wkw.WKWException:
                pass

        for i in range(13, 32):
            writer.write_slice(i, test_img)
        with wkw.Dataset.open(dataset_path, wkw.Header(dtype)) as data:
            read_data = data.read(origin, (24, 24, 32))
            assert np.squeeze(read_data).shape == (24, 24, 32), (
                "The read data should have the shape: (24, 24, 32) "
                "but has a shape of: {}".format(np.squeeze(read_data).shape))
            assert read_data.size == read_data[read_data.nonzero()].size, (
                "The read data contains zeros while the "
                "written image has no zeros")

        for i in range(32, 35):
            writer.write_slice(i, test_img)

    with wkw.Dataset.open(dataset_path, wkw.Header(dtype)) as data:
        read_data = data.read(origin, (24, 24, 35))
        read_data = np.squeeze(read_data)
        assert read_data.shape == (24, 24, 35), (
            "The read data should have the shape: (24, 24, 35) "
            "but has a shape of: {}".format(np.squeeze(read_data).shape))
        assert read_data.size == read_data[read_data.nonzero()].size, (
            "The read data contains zeros while the "
            "written image has no zeros")
        test_img_3d = np.zeros((test_img.shape[0], test_img.shape[1], 35))
        for i in np.arange(35):
            test_img_3d[:, :, i] = test_img
        # transpose because the slice writer takes [y, x] data and transposes it to [x, y] before writing
        test_img_3d = np.transpose(test_img_3d, (1, 0, 2))
        # check if the data are correct
        assert np.array_equal(
            test_img_3d, read_data), ("The data from the disk is not the same "
                                      "as the data that should be written.")
コード例 #4
0
def convert_knossos(source_path,
                    target_path,
                    layer_name,
                    dtype,
                    mag=1,
                    jobs=1,
                    args=None):
    source_knossos_info = KnossosDatasetInfo(source_path, dtype)
    target_wkw_info = WkwDatasetInfo(
        target_path, layer_name, mag,
        wkw.Header(convert_element_class_to_dtype(dtype)))

    ensure_wkw(target_wkw_info)

    with open_knossos(source_knossos_info) as source_knossos:
        with get_executor_for_args(args) as executor:
            knossos_cubes = list(source_knossos.list_cubes())
            if len(knossos_cubes) == 0:
                logging.error("No input KNOSSOS cubes found.")
                exit(1)

            knossos_cubes.sort()
            job_args = []
            for cube_xyz in knossos_cubes:
                job_args.append(
                    (cube_xyz, source_knossos_info, target_wkw_info))

            wait_and_ensure_success(
                executor.map_to_futures(convert_cube_job, job_args))
コード例 #5
0
def upload_wkw(seg, datasource_path, axes='zyx'):
    """Upload segmentation to datasource_path, and refresh datasource-properties.json."""
    # write
    ds_path = join(datasource_path, 'segmentation', '1')
    os.makedirs(ds_path, exist_ok=True)
    if axes == 'zyx':
        seg = np.transpose(seg, [2, 1, 0])
    elif axes == 'xyz':
        pass
    else:
        raise ValueError('axes has to be xyz or zyx')

#   try:
    with wkw.Dataset.create(ds_path, wkw.Header(np.uint32)) as ds:
        ds.write([0, 0, 0], np.uint32(seg))


#   except:
#     ValueError('Already Written')

    mt = read_metadata_for_layer(datasource_path, 'color')
    bbox = mt[0]['boundingBox']

    refresh_metadata(datasource_path,
                     compute_max_id=True,
                     exact_bounding_box=bbox)
コード例 #6
0
def downsample_mag(
    path,
    layer_name,
    source_mag: Mag,
    target_mag: Mag,
    interpolation_mode="default",
    compress=False,
    buffer_edge_len=None,
    args=None,
):
    interpolation_mode = parse_interpolation_mode(interpolation_mode, layer_name)

    source_wkw_info = WkwDatasetInfo(path, layer_name, source_mag.to_layer_name(), None)
    with open_wkw(source_wkw_info) as source:
        target_wkw_info = WkwDatasetInfo(
            path,
            layer_name,
            target_mag.to_layer_name(),
            wkw.Header(source.header.voxel_type),
        )

    downsample(
        source_wkw_info,
        target_wkw_info,
        source_mag,
        target_mag,
        interpolation_mode,
        compress,
        buffer_edge_len,
        args,
    )
コード例 #7
0
def test_row_major_order():
    data_shape = (4, 5, 6)
    data = generate_test_data(np.uint8, data_shape)
    with wkw.Dataset.create("tests/tmp", wkw.Header(np.uint8)) as dataset:
        dataset.write((0, 0, 0), data)
        read_data = dataset.read((0, 0, 0), data_shape)

    assert np.all(data == read_data)

    with wkw.Dataset.create("tests/tmp2", wkw.Header(np.uint8)) as dataset:
        fortran_data = np.asfortranarray(data)
        dataset.write((0, 0, 0), fortran_data)
        fortran_read_data = dataset.read((0, 0, 0), data_shape)

    assert np.all(fortran_read_data == read_data)
    assert np.all(fortran_read_data == fortran_data)
コード例 #8
0
    def __init__(
            self,
            dataset_path: str,
            layer_name: str,
            dtype,
            origin: Union[Tuple[int, int, int], List[int]],
            # buffer_size specifies, how many slices should be aggregated until they are flushed.
            buffer_size: int = 32,
            # file_len specifies, how many buckets written per dimension into a wkw cube. Using 32,
            # results in 1 GB/wkw file for 8-bit data
            file_len: int = 32,
            mag: Mag = Mag("1"),
    ):

        self.dataset_path = dataset_path
        self.layer_name = layer_name
        self.buffer_size = buffer_size

        layer_path = path.join(self.dataset_path, self.layer_name,
                               mag.to_layer_name())

        self.dtype = dtype
        self.dataset = wkw.Dataset.open(layer_path,
                                        wkw.Header(dtype, file_len=file_len))
        self.origin = origin

        self.buffer = []
        self.current_z = None
        self.buffer_start_z = None
コード例 #9
0
ファイル: test_wkw.py プロジェクト: mgschm/webknossos-wrap
def test_readwrite_live_compression_should_truncate():
    SIZE128 = (128, 128, 128)
    file_len = 4
    header = wkw.Header(np.uint8,
                        block_type=wkw.Header.BLOCK_TYPE_LZ4,
                        file_len=file_len)
    test_data = generate_test_data(header.voxel_type, SIZE128)
    ones_data = np.ones(SIZE128).astype(header.voxel_type)

    with wkw.Dataset.create('tests/tmp', header) as dataset:
        dataset.write(POSITION, test_data)

    random_compressed_size = path.getsize(
        path.join('tests/tmp', 'z0', 'y0', 'x0.wkw'))

    with wkw.Dataset.open('tests/tmp') as dataset:
        dataset.write(POSITION, ones_data)

    empty_compressed_size = path.getsize(
        path.join('tests/tmp', 'z0', 'y0', 'x0.wkw'))

    assert empty_compressed_size < random_compressed_size

    with wkw.Dataset.open('tests/tmp') as dataset:
        assert np.all(dataset.read(POSITION, SIZE128) == ones_data)
コード例 #10
0
def test_element_class_conversion(tmp_path: Path) -> None:
    test_wkw_path = tmp_path / "test_metadata"
    prediction_layer_name = "prediction"
    prediction_wkw_info = WkwDatasetInfo(
        test_wkw_path,
        prediction_layer_name,
        1,
        wkw.Header(np.float32, num_channels=3, file_len=1),
    )
    ensure_wkw(prediction_wkw_info)

    write_custom_layer(test_wkw_path, "prediction", np.float32, num_channels=3)
    write_webknossos_metadata(
        test_wkw_path,
        "test_metadata",
        (11.24, 11.24, 28),
        compute_max_id=True,
        exact_bounding_box={"topLeft": [0, 0, 0], "width": 4, "height": 4, "depth": 4},
    )
    write_custom_layer(test_wkw_path, "segmentation", np.float64, num_channels=1)
    write_custom_layer(test_wkw_path, "color", np.uint8, num_channels=3)

    refresh_metadata(test_wkw_path)

    check_element_class_of_layer(
        test_wkw_path, "prediction", "float", np.dtype(np.float32)
    )
    check_element_class_of_layer(
        test_wkw_path, "segmentation", "double", np.dtype(np.float64)
    )
    check_element_class_of_layer(test_wkw_path, "color", "uint24", np.dtype(np.uint8))
コード例 #11
0
ファイル: compress.py プロジェクト: fossabot/webknossos-cuber
def compress_mag(source_path, layer_name, target_path, mag: Mag, args=None):
    if path.exists(path.join(target_path, layer_name, str(mag))):
        logging.error("Target path '{}' already exists".format(target_path))
        exit(1)

    if args is not None and hasattr(args, "dtype"):
        header = wkw.Header(convert_element_class_to_dtype(args.dtype))
    else:
        header = None
    source_wkw_info = WkwDatasetInfo(source_path, layer_name, mag, header)
    target_mag_path = path.join(target_path, layer_name, str(mag))
    logging.info("Compressing mag {0} in '{1}'".format(str(mag),
                                                       target_mag_path))

    with open_wkw(source_wkw_info) as source_wkw:
        source_wkw.compress(target_mag_path)
        with get_executor_for_args(args) as executor:
            job_args = []
            for file in source_wkw.list_files():
                rel_file = path.relpath(file, source_wkw.root)
                job_args.append((file, path.join(target_mag_path, rel_file)))

            wait_and_ensure_success(
                executor.map_to_futures(compress_file_job, job_args))

    logging.info("Mag {0} successfully compressed".format(str(mag)))
コード例 #12
0
ファイル: cubing.py プロジェクト: fossabot/webknossos-cuber
def cubing(source_path, target_path, layer_name, dtype, batch_size, args) -> dict:

    source_files = find_source_filenames(source_path)

    # All images are assumed to have equal dimensions
    num_x, num_y = image_reader.read_dimensions(source_files[0])
    num_channels = image_reader.read_channel_count(source_files[0])
    num_z = len(source_files)

    target_mag = Mag(args.target_mag)
    target_wkw_info = WkwDatasetInfo(
        target_path,
        layer_name,
        target_mag,
        wkw.Header(
            convert_element_class_to_dtype(dtype),
            num_channels,
            file_len=args.wkw_file_len,
        ),
    )
    interpolation_mode = parse_interpolation_mode(
        args.interpolation_mode, target_wkw_info.layer_name
    )
    if target_mag != Mag(1):
        logging.info(
            f"Downsampling the cubed image to {target_mag} in memory with interpolation mode {interpolation_mode}."
        )

    logging.info("Found source files: count={} size={}x{}".format(num_z, num_x, num_y))

    ensure_wkw(target_wkw_info)

    start_z = args.start_z

    with get_executor_for_args(args) as executor:
        job_args = []
        # We iterate over all z sections
        for z in range(start_z, num_z + start_z, BLOCK_LEN):
            # Prepare z batches
            max_z = min(num_z + start_z, z + BLOCK_LEN)
            z_batch = list(range(z, max_z))
            # Prepare job
            job_args.append(
                (
                    target_wkw_info,
                    z_batch,
                    target_mag,
                    interpolation_mode,
                    source_files[z - start_z : max_z - start_z],
                    batch_size,
                    (num_x, num_y),
                    args.pad,
                )
            )

        wait_and_ensure_success(executor.map_to_futures(cubing_job, job_args))

    # Return Bounding Box
    return {"topLeft": [0, 0, 0], "width": num_x, "height": num_y, "depth": num_z}
コード例 #13
0
def test_row_major_order_with_different_voxel_size():
    data_shape = (4, 3, 9)
    data = generate_test_data(np.uint16, data_shape)
    with wkw.Dataset.create("tests/tmp", wkw.Header(np.uint16)) as dataset:
        dataset.write((3, 1, 0), data)
        read_data = dataset.read((3, 1, 0), data_shape)

    assert np.all(data == read_data)
コード例 #14
0
ファイル: test_wkw.py プロジェクト: mgschm/webknossos-wrap
def test_readwrite_live_compression_should_enforce_full_file_write():
    with pytest.raises(Exception):
        with wkw.Dataset.create(
                'tests/tmp', wkw.Header(np.uint8,
                                        block_type=BLOCK_TYPE_LZ4)) as dataset:

            test_data = generate_test_data(dataset.header.voxel_type)
            dataset.write(POSITION, test_data)
コード例 #15
0
def test_row_major_order_with_offset():
    data_shape = (17, 1, 4)
    data = generate_test_data(np.uint8, data_shape)
    with wkw.Dataset.create("tests/tmp", wkw.Header(np.uint8)) as dataset:
        dataset.write((15, 2, 0), data)
        read_data = dataset.read((15, 2, 0), data_shape)

    assert np.all(data == read_data)
コード例 #16
0
def test_row_major_order_with_channels():
    data_shape = (2, 4, 3, 9)
    data = generate_test_data(np.uint8, data_shape)
    with wkw.Dataset.create("tests/tmp",
                            wkw.Header(np.uint8, num_channels=2)) as dataset:
        dataset.write((3, 1, 0), data)
        read_data = dataset.read((3, 1, 0), data_shape[1:])

    assert np.all(data == read_data)
コード例 #17
0
def test_view_on_np_array():
    data_shape = (4, 4, 9)
    data = generate_test_data(np.uint16, data_shape)
    data = data[:, ::2]
    with wkw.Dataset.create("tests/tmp", wkw.Header(np.uint16)) as dataset:
        dataset.write((3, 1, 0), data)
        read_data = dataset.read((3, 1, 0), data.shape)

    assert np.all(data == read_data)
コード例 #18
0
def test_column_major_order_with_channels_and_different_voxel_size():
    data_shape = (2, 4, 3, 9)
    data = generate_test_data(np.uint16, data_shape, order="F")
    with wkw.Dataset.create("tests/tmp",
                            wkw.Header(np.uint16, num_channels=2)) as dataset:
        dataset.write((3, 1, 0), data)
        read_data = dataset.read((3, 1, 0), data_shape[1:])

    assert np.all(data == read_data)
コード例 #19
0
ファイル: test_wkw.py プロジェクト: mgschm/webknossos-wrap
def test_readwrite():
    with wkw.Dataset.create('tests/tmp', wkw.Header(np.uint8)) as dataset:

        header_size = path.getsize(path.join('tests/tmp', 'header.wkw'))
        test_data = generate_test_data(dataset.header.voxel_type)

        dataset.write(POSITION, test_data)
        assert path.getsize(path.join('tests/tmp', 'z0', 'y0', 'x0.wkw')) == \
               np.prod(SIZE) * (dataset.header.file_len ** 3) + header_size
        assert np.all(dataset.read(POSITION, SIZE) == test_data)
コード例 #20
0
def convert_nifti(source_nifti_path,
                  target_path,
                  layer_name,
                  dtype,
                  scale,
                  mag=1):
    target_wkw_info = WkwDatasetInfo(str(target_path.resolve()), layer_name,
                                     mag, wkw.Header(np.dtype(dtype)))
    ensure_wkw(target_wkw_info)

    ref_time = time.time()
    # Assume no translation
    offset = (0, 0, 0)

    with open_wkw(target_wkw_info) as target_wkw:
        source_nifti = nib.load(str(source_nifti_path.resolve()))
        cube_data = np.array(source_nifti.get_fdata())

        if len(source_nifti.shape) == 3:
            size = list(source_nifti.shape)
            cube_data = cube_data.reshape((1, ) + source_nifti.shape)

        elif len(source_nifti.shape) == 4:
            size = list(source_nifti.shape[:-1])
            cube_data = np.transpose(cube_data, (3, 0, 1, 2))

        else:
            logging.warning(
                "Converting of {} failed! Too many or too less dimensions".
                format(source_nifti_path))

            return

        if scale is None:
            scale = tuple(map(float, source_nifti.header["pixdim"][:3]))

        cube_data = to_target_datatype(cube_data, dtype)
        target_wkw.write(offset, cube_data)

    logging.debug("Converting of {} took {:.8f}s".format(
        source_nifti_path,
        time.time() - ref_time))

    write_webknossos_metadata(
        str(target_path),
        source_nifti_path.stem,
        scale=scale,
        exact_bounding_box={
            "topLeft": offset,
            "width": size[0],
            "height": size[1],
            "depth": size[2],
        },
    )
コード例 #21
0
def test_readwrite():
    with wkw.Dataset.create("tests/tmp", wkw.Header(np.uint8)) as dataset:

        header_size = path.getsize(path.join("tests/tmp", "header.wkw"))
        test_data = generate_test_data(dataset.header.voxel_type)

        dataset.write(POSITION, test_data)
        assert (path.getsize(path.join("tests/tmp", "z0", "y0",
                                       "x0.wkw")) == np.prod(SIZE) *
                (dataset.header.file_len**3) + header_size)
        assert np.all(dataset.read(POSITION, SIZE) == test_data)
コード例 #22
0
def write_custom_layer(target_path, layer_name, dtype, num_channels):
    data = (
        np.arange(4 * 4 * 4 * num_channels)
        .reshape((num_channels, 4, 4, 4))
        .astype(dtype)
    )
    prediction_wkw_info = WkwDatasetInfo(
        target_path, layer_name, 1, wkw.Header(dtype, num_channels)
    )
    ensure_wkw(prediction_wkw_info)
    with open_wkw(prediction_wkw_info) as dataset:
        dataset.write(off=(0, 0, 0), data=data)
コード例 #23
0
def test_big_read():
    data = np.ones((10, 10, 764), order="C", dtype=np.uint8)
    offset = np.array([0, 0, 640])
    bottom = (2000, 2000, 2000)
    mem_buffer = np.zeros(bottom, dtype=np.uint8, order="F")

    with wkw.Dataset.create("tests/tmp", wkw.Header(np.uint8)) as dataset:
        dataset.write(offset, data)
        mem_buffer[offset[0]:offset[0] + data.shape[0],
                   offset[1]:offset[1] + data.shape[1],
                   offset[2]:offset[2] + data.shape[2], ] = data
        read_data = dataset.read((0, 0, 0), bottom)
        assert np.all(read_data == mem_buffer)
コード例 #24
0
def tile_cubing(target_path,
                layer_name,
                dtype,
                batch_size,
                input_path_pattern,
                args=None):
    decimal_lengths = get_digit_counts_for_dimensions(input_path_pattern)
    (
        min_dimensions,
        max_dimensions,
        arbitrary_file,
        file_count,
    ) = detect_interval_for_dimensions(input_path_pattern, decimal_lengths)

    if not arbitrary_file:
        logging.error(
            f"No source files found. Maybe the input_path_pattern was wrong. You provided: {input_path_pattern}"
        )
        return

    # Determine tile size from first matching file
    tile_size = image_reader.read_dimensions(arbitrary_file)
    num_channels = image_reader.read_channel_count(arbitrary_file)
    tile_size = (tile_size[0], tile_size[1], num_channels)
    logging.info("Found source files: count={} with tile_size={}x{}".format(
        file_count, tile_size[0], tile_size[1]))

    target_wkw_info = WkwDatasetInfo(
        target_path,
        layer_name,
        1,
        wkw.Header(convert_element_class_to_dtype(dtype), num_channels),
    )
    ensure_wkw(target_wkw_info)
    with get_executor_for_args(args) as executor:
        job_args = []
        # Iterate over all z batches
        for z_batch in get_regular_chunks(min_dimensions["z"],
                                          max_dimensions["z"], BLOCK_LEN):
            job_args.append((
                target_wkw_info,
                list(z_batch),
                input_path_pattern,
                batch_size,
                tile_size,
                min_dimensions,
                max_dimensions,
                decimal_lengths,
            ))
        wait_and_ensure_success(
            executor.map_to_futures(tile_cubing_job, job_args))
コード例 #25
0
    def write_and_test_in_given_order(wkw_path, order):
        data_shape = (35, 35, 35)
        data = generate_test_data(np.uint8, data_shape, order=order)
        with wkw.Dataset.create(wkw_path, wkw.Header(np.uint8)) as dataset:
            dataset.write((0, 0, 0), np.ones((35, 35, 64), dtype=np.uint8))
            dataset.write((1, 2, 3), data)

            read_data = dataset.read((1, 2, 3), (35, 35, 35))
            before = dataset.read((0, 0, 0), (1, 2, 3))
            after = dataset.read((0, 0, 38), (35, 35, 26))

        assert np.all(data == read_data)
        assert np.all(before == 1)
        assert np.all(after == 1)
コード例 #26
0
ファイル: test_wkw.py プロジェクト: mgschm/webknossos-wrap
def test_readwrite_live_compression_should_not_allow_inconsistent_writes():
    SIZE129 = (129, 128, 128)
    file_len = 4
    header = wkw.Header(np.uint8,
                        block_type=wkw.Header.BLOCK_TYPE_LZ4,
                        file_len=file_len)
    test_data = generate_test_data(header.voxel_type, SIZE129)
    empty_data = np.zeros(SIZE129).astype(header.voxel_type)

    with wkw.Dataset.create('tests/tmp', header) as dataset:
        with pytest.raises(Exception):
            dataset.write(POSITION, test_data)

    with wkw.Dataset.open('tests/tmp') as dataset:
        assert np.all(dataset.read(POSITION, SIZE129) == empty_data)
コード例 #27
0
def test_multiple_writes_and_reads():

    mem_buffer = np.zeros((200, 200, 200), dtype=np.uint8, order="F")
    with wkw.Dataset.create("tests/tmp", wkw.Header(np.uint8)) as dataset:
        for i in range(10):
            offset = np.random.randint(100, size=(3))
            size = np.random.randint(1, 100, size=(3))
            order = np.random.choice(["F", "C"])
            data = generate_test_data(np.uint8, [1] + list(size), order)
            dataset.write(offset, data)
            mem_buffer[offset[0]:offset[0] + size[0],
                       offset[1]:offset[1] + size[1],
                       offset[2]:offset[2] + size[2], ] = data

            read_data = dataset.read((0, 0, 0), (200, 200, 200))
            assert np.all(mem_buffer == read_data)
コード例 #28
0
ファイル: test_wkw.py プロジェクト: mgschm/webknossos-wrap
def test_compress():
    with wkw.Dataset.create('tests/tmp', wkw.Header(np.uint8)) as dataset:

        test_data = generate_test_data(dataset.header.voxel_type)
        dataset.write(POSITION, test_data)

        with dataset.compress('tests/tmp2', compress_files=True) as dataset2:
            assert dataset2.header.voxel_type == np.uint8
            assert dataset2.header.block_type == wkw.Header.BLOCK_TYPE_LZ4HC

            header_size = path.getsize(path.join('tests/tmp2', 'header.wkw'))

            assert path.exists(path.join('tests/tmp2', 'header.wkw'))
            assert path.getsize(path.join('tests/tmp2', 'z0', 'y0', 'x0.wkw')) < \
                   np.prod(SIZE) * (dataset2.header.file_len ** 3) + header_size
            assert np.all(dataset2.read(POSITION, SIZE) == test_data)
コード例 #29
0
def test_non_negative_offsets():
    wkw.Dataset.create("tests/tmp", wkw.Header(np.uint8)).close()

    with pytest.raises(AssertionError):
        with wkw.Dataset.open("tests/tmp") as dataset:
            dataset.read((-1, 0, 0), (0, 0, 0))

    with pytest.raises(AssertionError):
        with wkw.Dataset.open("tests/tmp") as dataset:
            dataset.read((0, -1, 0), (0, 0, 0))

    with pytest.raises(AssertionError):
        with wkw.Dataset.open("tests/tmp") as dataset:
            dataset.read((0, 0, -1), (0, 0, 0))

    with pytest.raises(AssertionError):
        with wkw.Dataset.open("tests/tmp") as dataset:
            dataset.read((0, 0, 0), (-1, -1, -1))
コード例 #30
0
ファイル: test_wkw.py プロジェクト: mgschm/webknossos-wrap
def test_readwrite_live_compression():
    SIZE128 = (128, 128, 128)
    file_len = 4
    header = wkw.Header(np.uint8,
                        block_type=wkw.Header.BLOCK_TYPE_LZ4,
                        file_len=file_len)
    with wkw.Dataset.create('tests/tmp', header) as dataset:
        header_size = path.getsize(path.join('tests/tmp', 'header.wkw'))
        test_data = generate_test_data(dataset.header.voxel_type, SIZE128)

        dataset.write(POSITION, test_data)

        # The size should be less than if it was not compressed
        assert path.getsize(path.join('tests/tmp', 'z0', 'y0', 'x0.wkw')) < \
               np.prod(SIZE128) * (dataset.header.file_len ** 3) + header_size

    with wkw.Dataset.open('tests/tmp') as dataset:
        assert np.all(dataset.read(POSITION, SIZE128) == test_data)