Exemplo n.º 1
0
def upload_wkw(seg, datasource_path, axes='zyx'):
    """Upload segmentation to datasource_path, and refresh datasource-properties.json."""
    # write
    ds_path = join(datasource_path, 'segmentation', '1')
    os.makedirs(ds_path, exist_ok=True)
    if axes == 'zyx':
        seg = np.transpose(seg, [2, 1, 0])
    elif axes == 'xyz':
        pass
    else:
        raise ValueError('axes has to be xyz or zyx')

#   try:
    with wkw.Dataset.create(ds_path, wkw.Header(np.uint32)) as ds:
        ds.write([0, 0, 0], np.uint32(seg))


#   except:
#     ValueError('Already Written')

    mt = read_metadata_for_layer(datasource_path, 'color')
    bbox = mt[0]['boundingBox']

    refresh_metadata(datasource_path,
                     compute_max_id=True,
                     exact_bounding_box=bbox)
Exemplo n.º 2
0
def check_element_class_of_layer(
    test_wkw_path, layer_name, expected_element_class, expected_dtype
):
    datasource_properties = read_datasource_properties(test_wkw_path)
    layer_to_check = None
    for layer in datasource_properties["dataLayers"]:
        if layer["name"] == layer_name:
            layer_to_check = layer

    assert (
        layer_to_check
    ), f"Did not find layer {layer_name} in datasource_properties.json."
    assert layer_to_check["elementClass"] == expected_element_class
    _, converted_dtype, _, _ = read_metadata_for_layer(test_wkw_path, layer_name)
    assert converted_dtype == expected_dtype
def export_wkw_as_tiff(args):
    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)

    if args.bbox is None:
        _, _, bbox, origin = read_metadata_for_layer(args.source_path, args.layer_name)
        bbox = {"topleft": origin, "size": bbox}
    else:
        bbox = [int(s.strip()) for s in args.bbox.split(",")]
        assert len(bbox) == 6
        bbox = {"topleft": bbox[0:3], "size": bbox[3:6]}

    logging.info(f"Starting tiff export for bounding box: {bbox}")

    if args.tiles_per_dimension is not None:
        args.tile_size = [int(s.strip()) for s in args.tiles_per_dimension.split(",")]
        assert len(args.tile_size) == 2
        logging.info(
            f"Using tiling with {args.tile_size[0]},{args.tile_size[1]} tiles in the dimensions."
        )
        args.tile_size[0] = ceil(bbox["size"][0] / args.tile_size[0])
        args.tile_size[1] = ceil(bbox["size"][1] / args.tile_size[1])

    elif args.tile_size is not None:
        args.tile_size = [int(s.strip()) for s in args.tile_size.split(",")]
        assert len(args.tile_size) == 2
        logging.info(
            f"Using tiling with the size of {args.tile_size[0]},{args.tile_size[1]}."
        )
    args.batch_size = int(args.batch_size)

    export_tiff_stack(
        wkw_file_path=args.source_path,
        wkw_layer=args.layer_name,
        bbox=bbox,
        mag=Mag(args.mag),
        destination_path=args.destination_path,
        name=args.name,
        tiling_slice_size=args.tile_size,
        batch_size=args.batch_size,
        downsample=args.downsample,
        args=args,
    )
Exemplo n.º 4
0
def read_downloaded_wkw(zip_path,
                        datasource_path,
                        axes='xyz',
                        resolution=(6, 6, 40)):
    """Return wkw segmentation in xyz shape"""

    target_dir = zip_path.rstrip('.zip')
    with zipfile.ZipFile(zip_path, 'r') as zip_ref:
        zip_ref.extractall(target_dir)

    if exists(join(target_dir, 'data.zip')):
        with zipfile.ZipFile(join(target_dir, 'data.zip'), 'r') as zip_ref:
            zip_ref.extractall(join(target_dir, 'data'))

    # assert nml and data exists
    nml_path = glob.glob(join(target_dir, '*.nml'))[0]
    seg_path = join(target_dir, 'data')
    assert exists(nml_path)
    assert exists(seg_path)
    mt = read_metadata_for_layer(datasource_path, 'color')
    bbox = mt[0]['boundingBox']
    off = bbox['topLeft']
    shape = (bbox['width'], bbox['height'], bbox['depth'])
    print(off)
    print(shape)
    # scaling=(6,6,40)

    # read raw data
    raw_ds = wkw.Dataset.open(join(datasource_path, 'color', '1'))
    raw = raw_ds.read(off, shape)

    ds = wkw.Dataset.open(join(seg_path, '1'))
    segmentation = ds.read(off, shape)
    print(np.mean(segmentation))
    if axes == 'xyz':
        raw = raw[0]
        seg = segmentation[0]
    elif axes == 'zyx':
        raw = np.transpose(raw[0], [2, 1, 0])
        seg = np.transpose(segmentation[0], [2, 1, 0])
    return raw, seg, nml_path
equiv_classes = [
  set(ds_in.read(node.position, (1,1,1))[0,0,0,0] for node in tree.nodes)
    for tree in nml.trees
]

equiv_map = {}
for klass in equiv_classes:
  base = next(iter(klass))
  for id in klass:
    equiv_map[id] = base

print("Found {} equivalence classes with {} nodes".format(len(equiv_classes), len(equiv_map)))
print(equiv_classes)

# Rewrite segmentation layer
_, _, bbox, origin = read_metadata_for_layer(args.input, args.layer_name)

makedirs(args.output, exist_ok=True)

for z_start in range(origin[2], origin[2] + bbox[2], 32):
  z_end = min(origin[2] + z_start + 32, origin[2] + bbox[2])
  offset = (origin[0], origin[1], z_start)
  size = (bbox[0], bbox[1], z_end - z_start)

  print("Processing cube offset={} size={}".format(offset, size))
  cube_in = ds_in.read(offset, size)[0]

  cube_out = np.zeros(size, dtype=np.uint32)
  if not args.set_zero:
      cube_out[:, :, :] = cube_in
  for from_id, to_id in equiv_map.items():