def __init__(self, path: Path, *, location: Point5D = Point5D.zero(), filesystem: FS): url = filesystem.geturl(path.as_posix()) match = re.search(r"[^/]+\.n5/.*$", url, re.IGNORECASE) if not match: raise UnsupportedUrlException(url) name = match.group(0) self.filesystem = filesystem.opendir(path.as_posix()) with self.filesystem.openbin("attributes.json", "r") as f: attributes_json_bytes = f.read() attributes = json.loads(attributes_json_bytes.decode("utf8")) dimensions = attributes["dimensions"][::-1] blockSize = attributes["blockSize"][::-1] axiskeys = "".join(attributes["axes"]).lower( )[::-1] if "axes" in attributes else guess_axiskeys(dimensions) super().__init__( url=url, name=name, tile_shape=Shape5D.create(raw_shape=blockSize, axiskeys=axiskeys), shape=Shape5D.create(raw_shape=dimensions, axiskeys=axiskeys), dtype=np.dtype(attributes["dataType"]).newbyteorder(">"), location=location, axiskeys=axiskeys, ) self.compression_type = attributes["compression"]["type"] if self.compression_type not in N5Block.DECOMPRESSORS.keys(): raise NotImplementedError( f"Don't know how to decompress from {self.compression_type}")
def __init__(self, path: Path, *, location: Point5D = Point5D.zero(), chunk_size: Optional[Shape5D] = None, filesystem: FS): """A DataSource that handles Neurogancer's precomputed chunks path: a path all the pay down to the scale, i.e., if some scale has "key": "my_scale" then your path should end in "my_scale" chunk_size: a valid chunk_size for the scale selected by 'path' """ self.filesystem = filesystem.opendir(path.parent.as_posix()) self.info = PrecomputedChunksInfo.load(path=Path("info"), filesystem=self.filesystem) self.scale = self.info.get_scale(key=path.name) super().__init__( url="precomputed://" + filesystem.desc(path.as_posix()), tile_shape=self.scale.get_tile_shape_5d( self.info.num_channels, tile_shape_hint=chunk_size), shape=self.scale.get_shape_5d(self.info.num_channels), dtype=self.info.data_type, name=path.name, location=location, axiskeys=self.scale. axiskeys[:: -1], # externally reported axiskeys are always c-ordered ) encoding_type = self.scale.encoding if encoding_type == "raw": noop = lambda data: data self.decompressor = noop self.compressor = noop else: raise NotImplementedError( f"Don't know how to decompress {compression_type}")
def __init__(self, root_fs: FS, config: Config): super(DryRunSideEffects, self).__init__(root_fs) self.fragments_fs = read_only( root_fs.opendir(config.change_fragments_path))
def __init__(self, base_fs: FS, ds_name="train"): self.split = ds_name self.base_fs = base_fs.opendir("images/{}".format(ds_name)) self.length = len(self.base_fs.listdir(""))