Exemple #1
0
    def test_checksum_on_write(self):
        """
        Generate a tileset consisting of a single TIFF tile.  Load it and then write it back out
        as a numpy tile, which should be written with different checksums.  Then verify that the
        numpy version can load without an error.
        """
        # write the tiff file
        with tempfile.TemporaryDirectory() as tempdir:
            tempdir_path = Path(tempdir)
            file_path = tempdir_path / "tile.tiff"
            data = np.random.randint(0, 65535, size=(120, 80), dtype=np.uint16)
            imwrite(file_path, data, format="tiff")
            with open(fspath(file_path), "rb") as fh:
                checksum = hashlib.sha256(fh.read()).hexdigest()

            manifest = build_skeleton_manifest()
            manifest['tiles'].append(
                {
                    "coordinates": {
                        DimensionNames.X.value: [
                            0.0,
                            0.0001,
                        ],
                        DimensionNames.Y.value: [
                            0.0,
                            0.0001,
                        ]
                    },
                    "indices": {
                        "hyb": 0,
                        "ch": 0,
                    },
                    "file": "tile.tiff",
                    "format": "tiff",
                    "sha256": checksum,
                },
            )
            with open(fspath(tempdir_path / "tileset.json"), "w") as fh:
                fh.write(json.dumps(manifest))

            image = slicedimage.Reader.parse_doc(
                "tileset.json",
                tempdir_path.as_uri(),
                {"cache": {"size_limit": 0}},  # disabled
            )

            with tempfile.TemporaryDirectory() as output_tempdir:
                with tempfile.NamedTemporaryFile(
                        suffix=".json", dir=output_tempdir, delete=False) as partition_file:
                    partition_file_path = Path(partition_file.name)
                    partition_doc = slicedimage.v0_0_0.Writer().generate_partition_document(
                        image, partition_file_path.as_uri())

                    writer = codecs.getwriter("utf-8")
                    json.dump(partition_doc, writer(partition_file))

                loaded = slicedimage.Reader.parse_doc(
                    partition_file_path.name, partition_file_path.parent.as_uri())

                loaded.tiles()[0].numpy_array
Exemple #2
0
    def test_local_path_with_spaces(self):
        with tempfile.TemporaryDirectory(prefix="c d") as td:
            with tempfile.NamedTemporaryFile(dir=td,
                                             prefix="a b",
                                             delete=False) as tfn:
                pass
            abspath = Path(tfn.name).resolve()
            backend, name, baseurl = resolve_path_or_url(fspath(abspath))
            self.assertEqual(name, abspath.name)
            self.assertTrue(isinstance(backend, DiskBackend))
            self.assertEqual(fspath(abspath.parent), backend._basedir)
            self.assertEqual(abspath.parent.as_uri(), baseurl)

            with backend.read_contextmanager(name) as rcm:
                rcm.read()

            cwd = os.getcwd()
            try:
                os.chdir(fspath(abspath.parent))
                backend, name, baseurl = resolve_path_or_url(abspath.name)
                self.assertEqual(name, abspath.name)
                self.assertTrue(isinstance(backend, DiskBackend))
                self.assertEqual(fspath(abspath.parent), backend._basedir)
                self.assertEqual(abspath.parent.as_uri(), baseurl)

                with backend.read_contextmanager(name) as rcm:
                    rcm.read()
            finally:
                os.chdir(cwd)
Exemple #3
0
    def test_valid_local_path(self):
        with tempfile.NamedTemporaryFile() as tfn:
            abspath = Path(tfn.name).resolve()
            _, name, baseurl = resolve_path_or_url(fspath(abspath))
            self.assertEqual(name, abspath.name)
            self.assertEqual(abspath.parent.as_uri(), baseurl)

            cwd = os.getcwd()
            try:
                os.chdir(fspath(abspath.parent))
                _, name, baseurl = resolve_path_or_url(abspath.name)
                self.assertEqual(name, abspath.name)
                self.assertEqual(abspath.parent.as_uri(), baseurl)
            finally:
                os.chdir(cwd)
def _test_checksum_setup(tmpdir: Path):
    """
    Write some random data to a temporary file and yield its path, the data, and the checksum of
    the data.
    """
    # write the file
    data = os.urandom(1024)

    expected_checksum = hashlib.sha256(data).hexdigest()

    with tempfile.NamedTemporaryFile(dir=fspath(tmpdir), delete=False) as tfh:
        tfh.write(data)

    yield tfh.name, data, expected_checksum
Exemple #5
0
    def test_numpy(self):
        """
        Generate a tileset consisting of a single TIFF tile, and then read it.
        """
        image = slicedimage.TileSet(
            [DimensionNames.X, DimensionNames.Y, "ch", "hyb"],
            {
                'ch': 1,
                'hyb': 1
            },
            {
                DimensionNames.Y: 120,
                DimensionNames.X: 80
            },
        )

        tile = slicedimage.Tile(
            {
                DimensionNames.X: (0.0, 0.01),
                DimensionNames.Y: (0.0, 0.01),
            },
            {
                'hyb': 0,
                'ch': 0,
            },
        )
        tile.numpy_array = np.random.randint(0,
                                             65535,
                                             size=(120, 80),
                                             dtype=np.uint16)
        image.add_tile(tile)

        with tempfile.TemporaryDirectory() as tempdir:
            tempdir_path = Path(tempdir)
            tileset_path = tempdir_path / "tileset.json"
            partition_doc = slicedimage.v0_0_0.Writer(
            ).generate_partition_document(image, (tempdir_path /
                                                  "tileset.json").as_uri())
            with open(fspath(tileset_path), "w") as fh:
                json.dump(partition_doc, fh)

            result = slicedimage.Reader.parse_doc("tileset.json",
                                                  tempdir_path.as_uri())

            self.assertTrue(
                np.array_equal(
                    list(result.tiles())[0].numpy_array, tile.numpy_array))
Exemple #6
0
def infer_backend(baseurl, backend_config=None):
    """
    Guess the backend based on the format of `baseurl`, the consistent part of the URL or file path.
    The backend_config dictionary can contain flexible parameters for the different backends.

    Caching parameter keys include:

     - ["caching"]["directory"]  (default: None which disables caching)
     - ["caching"]["debug"]      (default: False)
     - ["caching"]["size_limit"] (default: SIZE_LIMIT)

    """
    if backend_config is None:
        backend_config = {}

    parsed = urllib.parse.urlparse(baseurl)

    if parsed.scheme == "file":
        local_path = get_path_from_parsed_file_url(parsed)
        return DiskBackend(fspath(local_path))

    if parsed.scheme in ("http", "https"):
        backend = HttpBackend(baseurl)
    elif parsed.scheme == "s3":
        s3_config = backend_config.get("s3", {})
        backend = S3Backend(baseurl, s3_config)
    else:
        raise ValueError(
            "Unable to infer backend for url {}, please verify that baseurl points to a valid "
            "directory or web address".format(baseurl))

    # these backends might use a cache.
    cache_config = backend_config.get("caching", {})

    cache_dir = cache_config.get("directory", None)
    if cache_dir is not None:
        cache_dir = os.path.expanduser(cache_dir)
        size_limit = cache_config.get("size_limit", SIZE_LIMIT)
        if size_limit > 0:
            debug = cache_config.get("debug", False)
            if debug:
                print("> caching {} to {} (size_limit: {})".format(
                    baseurl, cache_dir, size_limit))
            backend = CachingBackend(cache_dir, backend, size_limit)

    return backend
Exemple #7
0
    def test_tiff(self):
        """
        Generate a tileset consisting of a single TIFF tile, and then read it.
        """
        with tempfile.TemporaryDirectory() as tempdir:
            tempdir_path = Path(tempdir)
            # write the tiff file
            data = np.random.randint(0, 65535, size=(120, 80), dtype=np.uint16)
            with tifffile.TiffWriter(os.path.join(tempdir,
                                                  "tile.tiff")) as tiff:
                tiff.save(data)

            # TODO: (ttung) We should really be producing a tileset programmatically and writing it
            # disk.  However, our current write path only produces numpy output files.
            manifest = build_skeleton_manifest()
            manifest['tiles'].append(
                {
                    "coordinates": {
                        DimensionNames.X.value: [
                            0.0,
                            0.0001,
                        ],
                        DimensionNames.Y.value: [
                            0.0,
                            0.0001,
                        ]
                    },
                    "indices": {
                        "hyb": 0,
                        "ch": 0,
                    },
                    "file": "tile.tiff",
                    "format": "tiff",
                }, )
            with open(fspath(tempdir_path / "tileset.json"), "w") as fh:
                fh.write(json.dumps(manifest))

            result = slicedimage.Reader.parse_doc("tileset.json",
                                                  tempdir_path.as_uri())

            self.assertTrue(
                np.array_equal(list(result.tiles())[0].numpy_array, data))
Exemple #8
0
    def test_write_png(self):
        image = slicedimage.TileSet(
            dimensions=[DimensionNames.X, DimensionNames.Y, "ch", "hyb"],
            shape={'ch': 2, 'hyb': 2},
            default_tile_shape={DimensionNames.Y: 120, DimensionNames.X: 80},
        )

        for hyb in range(2):
            for ch in range(2):
                tile = slicedimage.Tile(
                    coordinates={
                        DimensionNames.X: (0.0, 0.01),
                        DimensionNames.Y: (0.0, 0.01),
                    },
                    indices={
                        'hyb': hyb,
                        'ch': ch,
                    },
                )
                tile.numpy_array = np.zeros((120, 80), dtype=np.uint32)
                tile.numpy_array[hyb, ch] = 1
                image.add_tile(tile)

        with tempfile.TemporaryDirectory() as tempdir:
            with tempfile.NamedTemporaryFile(
                    suffix=".json", dir=tempdir, delete=False) as partition_file:
                partition_file_path = Path(partition_file.name)
                # create the tileset and save it.
                partition_doc = slicedimage.v0_0_0.Writer().generate_partition_document(
                    image, partition_file_path.as_uri(), tile_format=ImageFormat.PNG)
                writer = codecs.getwriter("utf-8")
                json.dump(partition_doc, writer(partition_file))
                partition_file.flush()

            # construct a URL to the tileset we wrote, and load the tileset.
            loaded = slicedimage.Reader.parse_doc(
                partition_file_path.name, partition_file_path.parent.as_uri())

            # verify that we wrote some pngs, and all the pngs we wrote actually identify as pngs.
            pngfiles = list(Path(tempdir).glob("*.png"))
            assert len(pngfiles) > 0
            for pngfile in pngfiles:
                filetype = magic.from_file(fspath(pngfile))
                assert filetype.lower().startswith("png")

            # compare the tiles we loaded to the tiles we set up.
            for hyb in range(2):
                for ch in range(2):
                    tiles = [_tile
                             for _tile in loaded.tiles(
                                 lambda tile: (
                                     tile.indices['hyb'] == hyb
                                     and tile.indices['ch'] == ch))]

                    self.assertEqual(len(tiles), 1)

                    expected = np.zeros((120, 80), dtype=np.uint32)
                    expected[hyb, ch] = 1

                    self.assertEqual(tiles[0].numpy_array.all(), expected.all())
                    self.assertIsNotNone(tiles[0].sha256)