Beispiel #1
0
    def check_bounded(self, bbox, mip):
        if self.bounded and not self.meta.bounds(mip).contains_bbox(bbox):
            raise exceptions.OutOfBoundsError("""
        Requested cutout not contained within dataset bounds.

        Cloudpath: {}
        Requested: {}
        Bounds: {}
        Mip: {}
        Resolution: {}

        Set bounded=False to disable this warning.
      """.format(self.meta.cloudpath, bbox, self.meta.bounds(mip), mip,
                 self.meta.resolution(mip)))
Beispiel #2
0
    def make_shard(self, img, bbox, mip=None, spec=None, progress=False):
        """
    Convert an image that represents a single complete shard 
    into a shard file.
  
    img: a volumetric numpy array image
    bbox: the bbox it represents in voxel coordinates
    mip: if specified, use the sharding specification from 
      this mip level, otherwise use the sharding spec from
      the current implicit mip level in config.
    spec: use the provided specification (overrides mip parameter)

    Returns: (filename, shard_file)
    """
        mip = mip if mip is not None else self.config.mip
        scale = self.meta.scale(mip)

        if spec is None:
            if 'sharding' in scale:
                spec = sharding.ShardingSpecification.from_dict(
                    scale['sharding'])
            else:
                raise ValueError(
                    "mip {} does not have a sharding specification.".format(
                        mip))

        bbox = Bbox.create(bbox)
        if bbox.subvoxel():
            raise ValueError(
                "Bounding box is too small to make a shard. Got: {}".format(
                    bbox))

        # Alignment Checks:
        # 1. Aligned to atomic chunks - required for grid point generation
        aligned_bbox = bbox.expand_to_chunk_size(
            self.meta.chunk_size(mip), offset=self.meta.voxel_offset(mip))
        if bbox != aligned_bbox:
            raise exceptions.AlignmentError(
                "Unable to create shard from a non-chunk aligned bounding box. Requested: {}, Aligned: {}"
                .format(bbox, aligned_bbox))

        # 2. Covers the dataset at least partially
        aligned_bbox = Bbox.clamp(aligned_bbox, self.meta.bounds(mip))
        if aligned_bbox.subvoxel():
            raise exceptions.OutOfBoundsError(
                "Shard completely outside dataset: Requested: {}, Dataset: {}".
                format(bbox, self.meta.bounds(mip)))

        grid_size = self.grid_size(mip)
        chunk_size = self.meta.chunk_size(mip)
        reader = sharding.ShardReader(self.meta, self.cache, spec)

        # 3. Gridpoints all within this one shard
        gpts = list(gridpoints(aligned_bbox, self.meta.bounds(mip),
                               chunk_size))
        morton_codes = compressed_morton_code(gpts, grid_size)
        all_same_shard = bool(
            reduce(lambda a, b: operator.eq(a, b) and a,
                   map(reader.get_filename, morton_codes)))

        if not all_same_shard:
            raise exceptions.AlignmentError(
                "The gridpoints for this image did not all correspond to the same shard. Got: {}"
                .format(bbox))

        labels = {}
        pt_anchor = gpts[0] * chunk_size
        for pt_abs, morton_code in zip(gpts, morton_codes):
            cutout_bbx = Bbox(pt_abs * chunk_size, (pt_abs + 1) * chunk_size)

            # Neuroglancer expects border chunks not to extend beyond dataset bounds
            cutout_bbx.maxpt = cutout_bbx.maxpt.clip(
                None, self.meta.volume_size(mip))
            cutout_bbx -= pt_anchor

            chunk = img[cutout_bbx.to_slices()]
            labels[morton_code] = chunks.encode(
                chunk,
                self.meta.encoding(mip),
                block_size=self.meta.compressed_segmentation_block_size(mip),
            )

        shard_filename = reader.get_filename(first(labels.keys()))

        return (shard_filename, spec.synthesize_shard(labels,
                                                      progress=progress))