def write(self, process_tile, data):
        """
        Write data from process tiles into PNG file(s).

        Parameters
        ----------
        process_tile : ``BufferedTile``
            must be member of process ``TilePyramid``
        """
        data = self._prepare_array(data)

        if data.mask.all():  # pragma: no cover
            logger.debug("data empty, nothing to write")
        else:
            # in case of S3 output, create an boto3 resource
            bucket_resource = get_boto3_bucket(
                self._bucket) if self._bucket else None

            # Convert from process_tile to output_tiles and write
            for tile in self.pyramid.intersecting(process_tile):
                out_path = self.get_path(tile)
                self.prepare_path(tile)
                out_tile = BufferedTile(tile, self.pixelbuffer)
                write_raster_window(in_tile=process_tile,
                                    in_data=data,
                                    out_profile=self.profile(out_tile),
                                    out_tile=out_tile,
                                    out_path=out_path,
                                    bucket_resource=bucket_resource)
Beispiel #2
0
    def gen_indexes_and_check():
        # generate indexes
        list(zoom_index_gen(
            mp=mp,
            zoom=zoom,
            out_dir=mp.config.output.path,
            geojson=True,
            txt=True,
            vrt=True
        ))

        # assert GeoJSON exists
        with fiona.open(os.path.join(mp.config.output.path, "%s.geojson" % zoom)) as src:
            assert len(src) == 2

        # assert TXT exists
        txt_index = os.path.join(mp.config.output.path, "%s.txt" % zoom)
        bucket = get_boto3_bucket(txt_index.split("/")[2])
        key = "/".join(txt_index.split("/")[3:])
        for obj in bucket.objects.filter(Prefix=key):
            if obj.key == key:
                content = obj.get()['Body'].read().decode()
                assert len([l + '\n' for l in content.split('\n') if l]) == 2

        # assert VRT exists
        with rasterio.open(os.path.join(mp.config.output.path, "%s.vrt" % zoom)) as src:
            assert src.read().any()
Beispiel #3
0
    def write(self, process_tile, data):
        """
        Write data from process tiles into GeoJSON file(s).

        Parameters
        ----------
        process_tile : ``BufferedTile``
            must be member of process ``TilePyramid``
        """
        if data is None or len(data) == 0:
            return
        if not isinstance(data, (list, types.GeneratorType)):
            raise TypeError(
                "GeoJSON driver data has to be a list or generator of GeoJSON objects"
            )

        data = list(data)
        if not len(data):
            logger.debug("no features to write")
        else:
            # in case of S3 output, create an boto3 resource
            bucket_resource = get_boto3_bucket(
                self._bucket) if self._bucket else None

            # Convert from process_tile to output_tiles
            for tile in self.pyramid.intersecting(process_tile):
                out_path = self.get_path(tile)
                self.prepare_path(tile)
                out_tile = BufferedTile(tile, self.pixelbuffer)
                write_vector_window(in_data=data,
                                    out_schema=self.output_params["schema"],
                                    out_tile=out_tile,
                                    out_path=out_path,
                                    bucket_resource=bucket_resource)
Beispiel #4
0
 def __init__(self, out_path=None, output=None, out_pyramid=None):
     # see if lxml is installed before checking all output tiles
     from lxml.builder import ElementMaker
     self.path = out_path
     self._tp = out_pyramid
     self._output = output
     self._bucket = self.path.split("/")[2] if self.path.startswith(
         "s3://") else None
     self.bucket_resource = get_boto3_bucket(
         self._bucket) if self._bucket else None
     logger.debug("initialize VRT writer for %s", self.path)
     if path_exists(self.path):
         if self._bucket:
             key = "/".join(self.path.split("/")[3:])
             for obj in self.bucket_resource.objects.filter(Prefix=key):
                 if obj.key == key:
                     self._existing = {
                         k: v
                         for k, v in self._xml_to_entries(
                             obj.get()['Body'].read().decode())
                     }
         else:
             with open(self.path) as src:
                 self._existing = {
                     k: v
                     for k, v in self._xml_to_entries(src.read())
                 }
     else:
         self._existing = {}
     logger.debug("%s existing entries", len(self._existing))
     self.new_entries = 0
     self._new = {}
Beispiel #5
0
 def __init__(self, out_path=None):
     self.path = out_path
     self._bucket = self.path.split("/")[2] if self.path.startswith(
         "s3://") else None
     self.bucket_resource = get_boto3_bucket(
         self._bucket) if self._bucket else None
     logger.debug("initialize TXT writer")
     if path_exists(self.path):
         if self._bucket:
             key = "/".join(self.path.split("/")[3:])
             for obj in self.bucket_resource.objects.filter(Prefix=key):
                 if obj.key == key:
                     self._existing = {
                         l + '\n'
                         for l in obj.get()['Body'].read().decode().split(
                             '\n') if l
                     }
         else:
             with open(self.path) as src:
                 self._existing = {l for l in src}
     else:
         self._existing = {}
     self.new_entries = 0
     if self._bucket:
         self.sink = ""
     else:
         self.sink = open(self.path, "w")
     for l in self._existing:
         self._write_line(l)
Beispiel #6
0
    def write(self, process_tile, data):
        """
        Write data from process tiles into GeoTIFF file(s).

        Parameters
        ----------
        process_tile : ``BufferedTile``
            must be member of process ``TilePyramid``
        data : ``np.ndarray``
        """
        if (
            isinstance(data, tuple) and
            len(data) == 2 and
            isinstance(data[1], dict)
        ):
            data, tags = data
        else:
            tags = {}
        data = prepare_array(
            data,
            masked=True,
            nodata=self.output_params["nodata"],
            dtype=self.profile(process_tile)["dtype"]
        )

        if data.mask.all():
            logger.debug("data empty, nothing to write")
        else:
            # in case of S3 output, create an boto3 resource
            bucket_resource = get_boto3_bucket(self._bucket) if self._bucket else None

            # Convert from process_tile to output_tiles and write
            for tile in self.pyramid.intersecting(process_tile):
                out_path = self.get_path(tile)
                self.prepare_path(tile)
                out_tile = BufferedTile(tile, self.pixelbuffer)
                write_raster_window(
                    in_tile=process_tile,
                    in_data=data,
                    out_profile=self.profile(out_tile),
                    out_tile=out_tile,
                    out_path=out_path,
                    tags=tags,
                    bucket_resource=bucket_resource
                )
Beispiel #7
0
 def __init__(self, output_params, **kwargs):
     """Initialize."""
     logger.debug("output is single file")
     self.dst = None
     super().__init__(output_params, **kwargs)
     self._set_attributes(output_params)
     if len(self.output_params["delimiters"]["zoom"]) != 1:
         raise ValueError(
             "single file output only works with one zoom level")
     self.zoom = output_params["delimiters"]["zoom"][0]
     self.cog = output_params.get("cog", False)
     if self.cog or "overviews" in output_params:
         self.overviews = True
         self.overviews_resampling = output_params.get(
             "overviews_resampling", "nearest")
         self.overviews_levels = output_params.get(
             "overviews_levels", [2**i for i in range(1, self.zoom + 1)])
     else:
         self.overviews = False
     self.in_memory = output_params.get("in_memory", True)
     _bucket = self.path.split("/")[2] if self.path.startswith(
         "s3://") else None
     self._bucket_resource = get_boto3_bucket(_bucket) if _bucket else None