def test_add_argparse_options(write_chunks, write_files): # Test default values parser = argparse.ArgumentParser() add_argparse_options(parser, write_chunks=write_chunks, write_files=write_files) args = parser.parse_args([]) get_accessor_for_url(".", vars(args))
def test_precomputed_IO_validate_chunk_coords(tmpdir): accessor = get_accessor_for_url(str(tmpdir)) # Minimal info file io = get_IO_for_new_dataset(DUMMY_INFO, accessor) good_chunk_coords = (0, 8, 0, 3, 0, 8) bad_chunk_coords = (0, 8, 1, 4, 0, 8) assert io.validate_chunk_coords("key", good_chunk_coords) is True assert io.validate_chunk_coords("key", bad_chunk_coords) is False
def test_add_argparse_options(): import argparse # Test default values parser = argparse.ArgumentParser() add_argparse_options(parser, write=False) args = parser.parse_args([]) get_accessor_for_url(".", vars(args)) parser = argparse.ArgumentParser() add_argparse_options(parser, write=True) args = parser.parse_args([]) get_accessor_for_url(".", vars(args)) # Test correct parsing parser = argparse.ArgumentParser() add_argparse_options(parser, write=True) args = parser.parse_args(["--flat"]) assert args.flat is True args = parser.parse_args(["--no-gzip"]) assert args.gzip is False
def test_precomputed_IO_chunk_roundtrip(tmpdir): accessor = get_accessor_for_url(str(tmpdir)) # Minimal info file io = get_IO_for_new_dataset(DUMMY_INFO, accessor) dummy_chunk = np.arange(8 * 3 * 7, dtype="uint16").reshape(1, 7, 3, 8) chunk_coords = (0, 8, 0, 3, 8, 15) io.write_chunk(dummy_chunk, "key", chunk_coords) assert np.array_equal(io.read_chunk("key", chunk_coords), dummy_chunk) io2 = get_IO_for_existing_dataset(accessor) assert io2.info == DUMMY_INFO assert np.array_equal(io2.read_chunk("key", chunk_coords), dummy_chunk)
def test_get_accessor_for_url(accessor_options): assert isinstance(get_accessor_for_url(""), Accessor) a = get_accessor_for_url(".", accessor_options) assert isinstance(a, FileAccessor) assert a.base_path == pathlib.Path(".") a = get_accessor_for_url("file:///absolute", accessor_options) assert isinstance(a, FileAccessor) assert a.base_path == pathlib.Path("/absolute") a = get_accessor_for_url("http://example/", accessor_options) assert isinstance(a, HttpAccessor) assert a.base_url == "http://example/" with pytest.raises(URLError, match="scheme"): get_accessor_for_url("weird://", accessor_options) with pytest.raises(URLError, match="decod"): get_accessor_for_url("file:///%ff", accessor_options)
def test_jpeg_encoding_lossy_info(tmpdir): accessor = get_accessor_for_url(str(tmpdir)) io = get_IO_for_new_dataset( { "type": "image", "data_type": "uint8", "num_channels": 1, "scales": [{ "key": "key", "size": [8, 3, 15], "resolution": [1e6, 1e6, 1e6], "voxel_offset": [0, 0, 0], "chunk_sizes": [[8, 8, 8]], "encoding": "jpeg", }] }, accessor) assert io.scale_is_lossy("key")
def test_precomputed_IO_info_error(tmpdir): with (tmpdir / "info").open("w") as f: f.write("invalid JSON") accessor = get_accessor_for_url(str(tmpdir)) with pytest.raises(InvalidInfoError): get_IO_for_existing_dataset(accessor)
def test_raw_encoding_lossy_info(tmpdir): accessor = get_accessor_for_url(str(tmpdir)) # Minimal info file io_raw = get_IO_for_new_dataset(DUMMY_INFO, accessor) assert not io_raw.scale_is_lossy("key")
def _bootstrap(self): accessor = get_accessor_for_url(self.url) self._io = get_IO_for_existing_dataset(accessor) self._scales_cached = sorted( [NeuroglancerScale(self, i) for i in self._io.info["scales"]])