def profile(input, tile, tilesize, zoom, add_kernels, add_stdout, config): """Profile COGReader Mercator Tile read.""" if not tile: with COGReader(input) as cog: if zoom is None: zoom = randint(cog.minzoom, cog.maxzoom) extrema = tile_extrema(cog.bounds, zoom) tile_x = sample(range(extrema["x"]["min"], extrema["x"]["max"]), 1)[0] tile_y = sample(range(extrema["y"]["min"], extrema["y"]["max"]), 1)[0] tile_z = zoom log.debug(f"reading tile: {tile_z}-{tile_x}-{tile_y}") else: tile_z, tile_x, tile_y = list(map(int, tile.split("-"))) @profiler( kernels=add_kernels, quiet=True, add_to_return=True, raw=add_stdout, config=config, ) def _read_tile(src_path: str, x: int, y: int, z: int, tilesize: int = 256): with COGReader(src_path) as cog: return cog.tile(x, y, z, tilesize=tilesize) (_, _), stats = _read_tile(input, tile_x, tile_y, tile_z, tilesize) click.echo(json.dumps(stats))
def get_web_optimized_params( src_dst, tilesize=256, latitude_adjustment: bool = True, warp_resampling: str = "nearest", grid_crs=CRS.from_epsg(3857), ) -> Dict: """Return VRT parameters for a WebOptimized COG.""" bounds = list( transform_bounds(src_dst.crs, CRS.from_epsg(4326), *src_dst.bounds, densify_pts=21)) center = [(bounds[0] + bounds[2]) / 2, (bounds[1] + bounds[3]) / 2] lat = 0 if latitude_adjustment else center[1] _, max_zoom = get_zooms(src_dst, lat=lat, tilesize=tilesize) extrema = tile_extrema(bounds, max_zoom) left, _, _, top = mercantile.xy_bounds(extrema["x"]["min"], extrema["y"]["min"], max_zoom) vrt_res = _meters_per_pixel(max_zoom, 0, tilesize=tilesize) vrt_transform = Affine(vrt_res, 0, left, 0, -vrt_res, top) vrt_width = (extrema["x"]["max"] - extrema["x"]["min"]) * tilesize vrt_height = (extrema["y"]["max"] - extrema["y"]["min"]) * tilesize return dict( crs=grid_crs, transform=vrt_transform, width=vrt_width, height=vrt_height, resampling=ResamplingEnums[warp_resampling], )
def random(input, zoom): """Get random tile.""" with COGReader(input) as cog: if zoom is None: zoom = randint(cog.minzoom, cog.maxzoom) extrema = tile_extrema(cog.bounds, zoom) x = sample(range(extrema["x"]["min"], extrema["x"]["max"]), 1)[0] y = sample(range(extrema["y"]["min"], extrema["y"]["max"]), 1)[0] click.echo(f"{zoom}-{x}-{y}")
def overview(bbox, endpoint, out_path, max_cloud, retina, start_date, end_date, pixel_selection): """Create overview landsat images """ bounds = list(map(float, bbox.split(','))) if start_date is not None: start_date = dateparse(start_date) if end_date is not None: end_date = dateparse(end_date) results = construct_mosaic(endpoint=endpoint, bounds=bounds, minzoom=9, max_cloud=max_cloud, retina=retina, start_date=start_date, end_date=end_date, seasons=['spring', 'summer'], tile_format='tif') tilescale = 2 if retina is True else 1 tilesize = 256 * tilescale zoom = results["maxzoom"] - (tilescale - 1) # Mercator tiles covering bounds extrema = tile_extrema(bounds, zoom) tiles = mercantile.tiles(*bounds, zooms=zoom) query_params = { # True Color RGB 'bands': "4,3,2", # Looks nice 'color_ops': "gamma RGB 3.5, saturation 1.7, sigmoidal RGB 15 0.35", 'pixel_selection': pixel_selection, } tiles_url = results["tiles"][0] + urllib.parse.urlencode(query_params) assemble_tif(out_path=out_path, zoom=zoom, tiles=tiles, tiles_url=tiles_url, extrema=extrema, tilesize=tilesize)
def create_overview_cogs( mosaic_path: str, output_profile: Dict, prefix: str = "mosaic_ovr", max_overview_level: int = 6, method: str = "first", config: Dict = None, threads=1, in_memory: bool = True, ) -> None: """ Create Low resolution mosaic image from a mosaicJSON. The output will be a web optimized COG with bounds matching the mosaicJSON bounds and with its resolution matching the mosaic MinZoom - 1. Attributes ---------- mosaic_path : str, required Mosaic definition path. output_profile : dict, required prefix : str max_overview_level : int method: str, optional pixel_selection method name (default is 'first'). config : dict Rasterio Env options. threads: int, optional maximum number of threads to use (default is 1). in_memory: bool, optional Force COG creation in memory (default is True). """ pixel_method = PIXSEL_METHODS[method] with MosaicBackend(mosaic_path) as mosaic: base_zoom = mosaic.metadata["minzoom"] - 1 mosaic_quadkey_zoom = mosaic.quadkey_zoom bounds = mosaic.metadata["bounds"] mosaic_quadkeys = set(mosaic._quadkeys) # Select a random quakey/asset and get dataset info tile = mercantile.quadkey_to_tile(random.sample(mosaic_quadkeys, 1)[0]) assets = mosaic.assets_for_tile(*tile) info = _get_info(assets[0]) extrema = tile_extrema(bounds, base_zoom) tilesize = 256 resolution = _meters_per_pixel(base_zoom, 0, tilesize=tilesize) # Create multiples files if coverage is too big extremas = _split_extrema(extrema, max_ovr=max_overview_level) for ix, extrema in enumerate(extremas): click.echo(f"Part {1 + ix}/{len(extremas)}", err=True) output_path = f"{prefix}_{ix}.tif" blocks = list(_get_blocks(extrema, tilesize)) random.shuffle(blocks) width = (extrema["x"]["max"] - extrema["x"]["min"]) * tilesize height = (extrema["y"]["max"] - extrema["y"]["min"]) * tilesize w, n = mercantile.xy(*mercantile.ul( extrema["x"]["min"], extrema["y"]["min"], base_zoom)) params = dict( driver="GTiff", dtype=info["dtype"], count=len(info["band_descriptions"]), width=width, height=height, crs="epsg:3857", transform=Affine(resolution, 0, w, 0, -resolution, n), nodata=info["nodata_value"], ) params.update(**output_profile) config = config or {} with rasterio.Env(**config): with ExitStack() as ctx: if in_memory: tmpfile = ctx.enter_context(MemoryFile()) tmp_dst = ctx.enter_context(tmpfile.open(**params)) else: tmpfile = ctx.enter_context( TemporaryRasterFile(output_path)) tmp_dst = ctx.enter_context( rasterio.open(tmpfile.name, "w", **params)) def _get_tile(wind): idx, window = wind x = extrema["x"]["min"] + idx[1] y = extrema["y"]["min"] + idx[0] t = mercantile.Tile(x, y, base_zoom) kds = set(find_quadkeys(t, mosaic_quadkey_zoom)) if not mosaic_quadkeys.intersection(kds): return window, None, None try: (tile, mask), _ = mosaic.tile( t.x, t.y, t.z, tilesize=tilesize, pixel_selection=pixel_method(), ) except NoAssetFoundError: return window, None, None return window, tile, mask with futures.ThreadPoolExecutor( max_workers=threads) as executor: future_work = [ executor.submit(_get_tile, item) for item in blocks ] with click.progressbar( futures.as_completed(future_work), length=len(future_work), show_percent=True, label="Loading tiles", ) as future: for res in future: pass for f in _filter_futures(future_work): window, tile, mask = f if tile is None: continue tmp_dst.write(tile, window=window) if info["nodata_type"] == "Mask": tmp_dst.write_mask(mask.astype("uint8"), window=window) min_tile_size = tilesize = min( int(output_profile["blockxsize"]), int(output_profile["blockysize"]), ) overview_level = get_maximum_overview_level( tmp_dst.width, tmp_dst.height, minsize=min_tile_size) overviews = [2**j for j in range(1, overview_level + 1)] tmp_dst.build_overviews(overviews) copy(tmp_dst, output_path, copy_src_overviews=True, **params)
def cog_translate( source, dst_path, dst_kwargs, indexes=None, nodata=None, dtype=None, add_mask=None, overview_level=None, overview_resampling="nearest", web_optimized=False, latitude_adjustment=True, resampling="nearest", in_memory=None, config=None, allow_intermediate_compression=False, forward_band_tags=False, quiet=False, ): """ Create Cloud Optimized Geotiff. Parameters ---------- source : str, PathLike object or rasterio.io.DatasetReader A dataset path, URL or rasterio.io.DatasetReader object. Will be opened in "r" mode. dst_path : str or Path-like object An output dataset path or or PathLike object. Will be opened in "w" mode. dst_kwargs: dict Output dataset creation options. indexes : tuple or int, optional Raster band indexes to copy. nodata, int, optional Overwrite nodata masking values for input dataset. dtype: str, optional Overwrite output data type. Default will be the input data type. add_mask, bool, optional Force output dataset creation with a mask. overview_level : int, optional (default: 6) COGEO overview (decimation) level overview_resampling : str, optional (default: "nearest") Resampling algorithm for overviews web_optimized: bool, option (default: False) Create web-optimized cogeo. latitude_adjustment: bool, option (default: True) Use mercator meters for zoom calculation or ensure max zoom equality. resampling : str, optional (default: "nearest") Resampling algorithm. in_memory: bool, optional Force processing raster in memory (default: process in memory if small) config : dict Rasterio Env options. allow_intermediate_compression: bool, optional (default: False) Allow intermediate file compression to reduce memory/disk footprint. Note: This could reduce the speed of the process. Ref: https://github.com/cogeotiff/rio-cogeo/issues/103 forward_band_tags: bool, optional Forward band tags to output bands. Ref: https://github.com/cogeotiff/rio-cogeo/issues/19 quiet: bool, optional (default: False) Mask processing steps. """ if isinstance(indexes, int): indexes = (indexes, ) config = config or {} with rasterio.Env(**config): with ExitStack() as ctx: if isinstance(source, (DatasetReader, DatasetWriter, WarpedVRT)): src_dst = source else: src_dst = ctx.enter_context(rasterio.open(source)) meta = src_dst.meta indexes = indexes if indexes else src_dst.indexes nodata = nodata if nodata is not None else src_dst.nodata dtype = dtype if dtype else src_dst.dtypes[0] alpha = has_alpha_band(src_dst) mask = has_mask_band(src_dst) if not add_mask and ( (nodata is not None or alpha) and dst_kwargs.get("compress") in ["JPEG", "jpeg"]): warnings.warn( "Using lossy compression with Nodata or Alpha band " "can results in unwanted artefacts.", LossyCompression, ) tilesize = min(int(dst_kwargs["blockxsize"]), int(dst_kwargs["blockysize"])) if src_dst.width < tilesize or src_dst.height < tilesize: tilesize = 2**int( math.log(min(src_dst.width, src_dst.height), 2)) if tilesize < 64: warnings.warn( "Raster has dimension < 64px. Output COG cannot be tiled" " and overviews cannot be added.", IncompatibleBlockRasterSize, ) dst_kwargs.pop("blockxsize", None) dst_kwargs.pop("blockysize", None) dst_kwargs.pop("tiled") overview_level = 0 else: warnings.warn( "Block Size are bigger than raster sizes. " "Setting blocksize to {}".format(tilesize), IncompatibleBlockRasterSize, ) dst_kwargs["blockxsize"] = tilesize dst_kwargs["blockysize"] = tilesize vrt_params = dict(add_alpha=True, dtype=dtype) if nodata is not None: vrt_params.update( dict(nodata=nodata, add_alpha=False, src_nodata=nodata)) if alpha: vrt_params.update(dict(add_alpha=False)) if web_optimized: bounds = list( transform_bounds(*[src_dst.crs, "epsg:4326"] + list(src_dst.bounds), densify_pts=21)) center = [(bounds[0] + bounds[2]) / 2, (bounds[1] + bounds[3]) / 2] lat = 0 if latitude_adjustment else center[1] max_zoom = get_max_zoom(src_dst, lat=lat, tilesize=tilesize) extrema = tile_extrema(bounds, max_zoom) w, n = mercantile.xy(*mercantile.ul( extrema["x"]["min"], extrema["y"]["min"], max_zoom)) vrt_res = _meters_per_pixel(max_zoom, 0, tilesize=tilesize) vrt_transform = Affine(vrt_res, 0, w, 0, -vrt_res, n) vrt_width = (extrema["x"]["max"] - extrema["x"]["min"]) * tilesize vrt_height = (extrema["y"]["max"] - extrema["y"]["min"]) * tilesize vrt_params.update( dict( crs="epsg:3857", transform=vrt_transform, width=vrt_width, height=vrt_height, resampling=ResamplingEnums[resampling], )) with WarpedVRT(src_dst, **vrt_params) as vrt_dst: meta = vrt_dst.meta meta["count"] = len(indexes) if add_mask: meta.pop("nodata", None) meta.pop("alpha", None) if (dst_kwargs.get("photometric", "").upper() == "YCBCR" and meta["count"] == 1): warnings.warn( "PHOTOMETRIC=YCBCR not supported on a 1-band raster" " and has been set to 'MINISBLACK'") dst_kwargs["photometric"] = "MINISBLACK" meta.update(**dst_kwargs) if not allow_intermediate_compression: meta.pop("compress", None) meta.pop("photometric", None) if in_memory is None: in_memory = vrt_dst.width * vrt_dst.height < IN_MEMORY_THRESHOLD if in_memory: tmpfile = ctx.enter_context(MemoryFile()) tmp_dst = ctx.enter_context(tmpfile.open(**meta)) else: tmpfile = ctx.enter_context(TemporaryRasterFile(dst_path)) tmp_dst = ctx.enter_context( rasterio.open(tmpfile.name, "w", **meta)) # Transfer color interpolation if len(indexes) == 1 and (vrt_dst.colorinterp[indexes[0] - 1] is not ColorInterp.palette): tmp_dst.colorinterp = [ColorInterp.gray] else: tmp_dst.colorinterp = [ vrt_dst.colorinterp[b - 1] for b in indexes ] if tmp_dst.colorinterp[0] is ColorInterp.palette: try: tmp_dst.write_colormap(1, vrt_dst.colormap(1)) except ValueError: warnings.warn( "Dataset has `Palette` color interpretation" " but is missing colormap information") wind = list(tmp_dst.block_windows(1)) if not quiet: click.echo("Reading input: {}".format(source), err=True) fout = os.devnull if quiet else sys.stderr with click.progressbar(wind, length=len(wind), file=fout, show_percent=True) as windows: for ij, w in windows: matrix = vrt_dst.read(window=w, indexes=indexes) tmp_dst.write(matrix, window=w) if add_mask or mask: # Cast mask to uint8 to fix rasterio 1.1.2 error (ref #115) mask_value = vrt_dst.dataset_mask( window=w).astype("uint8") tmp_dst.write_mask(mask_value, window=w) if overview_level is None: overview_level = get_maximum_overview_level( vrt_dst, tilesize) if not quiet and overview_level: click.echo("Adding overviews...", err=True) overviews = [2**j for j in range(1, overview_level + 1)] tmp_dst.build_overviews(overviews, ResamplingEnums[overview_resampling]) if not quiet: click.echo("Updating dataset tags...", err=True) for i, b in enumerate(indexes): tmp_dst.set_band_description(i + 1, src_dst.descriptions[b - 1]) if forward_band_tags: tmp_dst.update_tags(i + 1, **src_dst.tags(b)) tags = src_dst.tags() tags.update( dict( OVR_RESAMPLING_ALG=ResamplingEnums[overview_resampling] .name.upper())) tmp_dst.update_tags(**tags) tmp_dst._set_all_scales( [vrt_dst.scales[b - 1] for b in indexes]) tmp_dst._set_all_offsets( [vrt_dst.offsets[b - 1] for b in indexes]) if not quiet: click.echo("Writing output to: {}".format(dst_path), err=True) copy(tmp_dst, dst_path, copy_src_overviews=True, **dst_kwargs)
def cog_translate( src_path, dst_path, dst_kwargs, indexes=None, nodata=None, add_mask=None, overview_level=None, overview_resampling="nearest", web_optimized=False, latitude_adjustment=True, resampling="nearest", in_memory=None, config=None, quiet=False, ): """ Create Cloud Optimized Geotiff. Parameters ---------- src_path : str or PathLike object A dataset path or URL. Will be opened in "r" mode. dst_path : str or Path-like object An output dataset path or or PathLike object. Will be opened in "w" mode. dst_kwargs: dict Output dataset creation options. indexes : tuple, int, optional Raster band indexes to copy. nodata, int, optional Overwrite nodata masking values for input dataset. add_mask, bool, optional Force output dataset creation with a mask. overview_level : int, optional (default: 6) COGEO overview (decimation) level overview_resampling : str, optional (default: "nearest") Resampling algorithm for overviews web_optimized: bool, option (default: False) Create web-optimized cogeo. latitude_adjustment: bool, option (default: True) Use mercator meters for zoom calculation or ensure max zoom equality. resampling : str, optional (default: "nearest") Resampling algorithm. in_memory: bool, optional Force processing raster in memory (default: process in memory if small) config : dict Rasterio Env options. quiet: bool, optional (default: False) Mask processing steps. """ config = config or {} with rasterio.Env(**config): with rasterio.open(src_path) as src_dst: meta = src_dst.meta indexes = indexes if indexes else src_dst.indexes nodata = nodata if nodata is not None else src_dst.nodata alpha = has_alpha_band(src_dst) mask = has_mask_band(src_dst) if not add_mask and ( (nodata is not None or alpha) and dst_kwargs.get("compress") in ["JPEG", "jpeg"]): warnings.warn( "Using lossy compression with Nodata or Alpha band " "can results in unwanted artefacts.", LossyCompression, ) tilesize = min(int(dst_kwargs["blockxsize"]), int(dst_kwargs["blockysize"])) if src_dst.width < tilesize or src_dst.height < tilesize: tilesize = 2**int( math.log(min(src_dst.width, src_dst.height), 2)) if tilesize < 64: warnings.warn( "Raster has dimension < 64px. Output COG cannot be tiled" " and overviews cannot be added.", IncompatibleBlockRasterSize, ) dst_kwargs.pop("blockxsize", None) dst_kwargs.pop("blockysize", None) dst_kwargs.pop("tiled") overview_level = 0 else: warnings.warn( "Block Size are bigger than raster sizes. " "Setting blocksize to {}".format(tilesize), IncompatibleBlockRasterSize, ) dst_kwargs["blockxsize"] = tilesize dst_kwargs["blockysize"] = tilesize vrt_params = dict(add_alpha=True) if nodata is not None: vrt_params.update( dict(nodata=nodata, add_alpha=False, src_nodata=nodata)) if alpha: vrt_params.update(dict(add_alpha=False)) if web_optimized: bounds = list( transform_bounds(*[src_dst.crs, "epsg:4326"] + list(src_dst.bounds), densify_pts=21)) center = [(bounds[0] + bounds[2]) / 2, (bounds[1] + bounds[3]) / 2] lat = 0 if latitude_adjustment else center[1] max_zoom = get_max_zoom(src_dst, lat=lat, tilesize=tilesize) extrema = tile_extrema(bounds, max_zoom) w, n = mercantile.xy(*mercantile.ul( extrema["x"]["min"], extrema["y"]["min"], max_zoom)) vrt_res = _meters_per_pixel(max_zoom, 0, tilesize=tilesize) vrt_transform = Affine(vrt_res, 0, w, 0, -vrt_res, n) vrt_width = (extrema["x"]["max"] - extrema["x"]["min"]) * tilesize vrt_height = (extrema["y"]["max"] - extrema["y"]["min"]) * tilesize vrt_params.update( dict( crs="epsg:3857", transform=vrt_transform, width=vrt_width, height=vrt_height, resampling=ResamplingEnums[resampling], )) with WarpedVRT(src_dst, **vrt_params) as vrt_dst: meta = vrt_dst.meta meta["count"] = len(indexes) if add_mask: meta.pop("nodata", None) meta.pop("alpha", None) meta.update(**dst_kwargs) meta.pop("compress", None) meta.pop("photometric", None) if in_memory is None: in_memory = vrt_dst.width * vrt_dst.height < IN_MEMORY_THRESHOLD with ExitStack() as ctx: if in_memory: tmpfile = ctx.enter_context(MemoryFile()) tmp_dst = ctx.enter_context(tmpfile.open(**meta)) else: tmpfile = ctx.enter_context( TemporaryRasterFile(dst_path)) tmp_dst = ctx.enter_context( rasterio.open(tmpfile.name, "w", **meta)) wind = list(tmp_dst.block_windows(1)) if not quiet: click.echo("Reading input: {}".format(src_path), err=True) fout = os.devnull if quiet else sys.stderr with click.progressbar(wind, length=len(wind), file=fout, show_percent=True) as windows: for ij, w in windows: matrix = vrt_dst.read(window=w, indexes=indexes) tmp_dst.write(matrix, window=w) if add_mask or mask: mask_value = vrt_dst.dataset_mask(window=w) tmp_dst.write_mask(mask_value, window=w) if overview_level is None: overview_level = get_maximum_overview_level( vrt_dst, tilesize) if not quiet and overview_level: click.echo("Adding overviews...", err=True) overviews = [2**j for j in range(1, overview_level + 1)] tmp_dst.build_overviews( overviews, ResamplingEnums[overview_resampling]) if not quiet: click.echo("Updating dataset tags...", err=True) for i, b in enumerate(indexes): tmp_dst.set_band_description( i + 1, src_dst.descriptions[b - 1]) tags = src_dst.tags() tags.update( dict(OVR_RESAMPLING_ALG=ResamplingEnums[ overview_resampling].name.upper())) tmp_dst.update_tags(**tags) if not quiet: click.echo("Writing output to: {}".format(dst_path), err=True) copy(tmp_dst, dst_path, copy_src_overviews=True, **dst_kwargs)
def create_low_level_cogs( mosaic_path: str, output_profile: Dict, prefix: str = "mosaic_ovr", max_overview_level: int = 6, config: Dict = None, threads=1, ) -> None: """ Create WebOptimized Overview COG from a mosaic definition file. Attributes ---------- mosaic_path : str, required Mosaic definition path. output_profile : dict, required prefix : str max_overview_level : int config : dict Rasterio Env options. """ with MosaicBackend(mosaic_path) as mosaic: base_zoom = mosaic.metadata["minzoom"] - 1 mosaic_quadkey_zoom = mosaic.quadkey_zoom bounds = mosaic.metadata["bounds"] mosaic_quadkeys = set(mosaic._quadkeys) # Select a random quakey/asset and get dataset info tile = mercantile.quadkey_to_tile(random.sample(mosaic_quadkeys, 1)[0]) assets = mosaic.tile(*tile) info = _get_info(assets[0]) extrema = tile_extrema(bounds, base_zoom) tilesize = 256 res = _meters_per_pixel(base_zoom, 0, tilesize=tilesize) # Create multiples files if coverage is too big extremas = _split_extrema(extrema, max_ovr=max_overview_level, tilesize=tilesize) for ix, extrema in enumerate(extremas): blocks = list(_get_blocks(extrema)) random.shuffle(blocks) width = (extrema["x"]["max"] - extrema["x"]["min"]) * tilesize height = (extrema["y"]["max"] - extrema["y"]["min"]) * tilesize w, n = mercantile.xy(*mercantile.ul( extrema["x"]["min"], extrema["y"]["min"], base_zoom)) params = dict( driver="GTiff", dtype=info[1], count=len(info[6]), width=width, height=height, crs="epsg:3857", transform=Affine(res, 0, w, 0, -res, n), nodata=info[4], tiled=True, blockxsize=256, blockysize=256, ) config = config or {} with rasterio.Env(**config): with MemoryFile() as memfile: with memfile.open(**params) as mem: def _get_tile(wind): idx, window = wind x = extrema["x"]["min"] + idx[1] y = extrema["y"]["min"] + idx[0] t = mercantile.Tile(x, y, base_zoom) kds = set(find_quadkeys(t, mosaic_quadkey_zoom)) if not mosaic_quadkeys.intersection(kds): return window, None, None assets = mosaic.tile(*t) if not assets: raise Exception( f"No asset for tile {x}-{y}-{base_zoom}") if assets: tile, mask = mosaic_tiler( assets, x, y, base_zoom, cogeo.tile, indexes=info[6], tilesize=tilesize, pixel_selection=defaults.FirstMethod(), ) return window, tile, mask with futures.ThreadPoolExecutor( max_workers=threads) as executor: future_work = [ executor.submit(_get_tile, item) for item in blocks ] with click.progressbar( futures.as_completed(future_work), length=len(future_work), show_percent=True, ) as future: for res in future: pass for f in _filter_futures(future_work): window, tile, mask = f if tile is None: continue mem.write(tile, window=window) if info[5]: mem.write_mask(mask.astype("uint8"), window=window) cog_translate( mem, f"{prefix}_{ix}.tif", output_profile, config=config, in_memory=True, )
def create_low_level_cogs( mosaic_definition: Dict, output_profile: Dict, prefix: str = "mosaic_ovr", max_overview_level: int = 6, config: Dict = None, threads=1, ) -> None: """ Create WebOptimized Overview COG from a mosaic definition file. Attributes ---------- mosaic_definition : dict, required Mosaic definition. prefix : str add_mask, bool, optional Force output dataset creation with a mask. max_overview_level : int config : dict Rasterio Env options. """ tilesize = 256 base_zoom = mosaic_definition["minzoom"] - 1 bounds = mosaic_definition["bounds"] asset = _get_asset_example(mosaic_definition) info = _get_info(asset) extrema = tile_extrema(bounds, base_zoom) res = _meters_per_pixel(base_zoom, 0, tilesize=tilesize) # Create multiples files if coverage is too big extremas = _split_extrema(extrema, max_ovr=max_overview_level, tilesize=tilesize) for ix, extrema in enumerate(extremas): blocks = list(_get_blocks(extrema)) random.shuffle(blocks) width = (extrema["x"]["max"] - extrema["x"]["min"]) * tilesize height = (extrema["y"]["max"] - extrema["y"]["min"]) * tilesize w, n = mercantile.xy(*mercantile.ul(extrema["x"]["min"], extrema["y"] ["min"], base_zoom)) params = dict( driver="GTiff", dtype=info[1], count=info[0], width=width, height=height, crs="epsg:3857", transform=Affine(res, 0, w, 0, -res, n), nodata=info[4], tiled=True, blockxsize=256, blockysize=256, ) config = config or {} with rasterio.Env(**config): with MemoryFile() as memfile: with memfile.open(**params) as mem: def _get_tile(wind): idx, window = wind x = extrema["x"]["min"] + idx[1] y = extrema["y"]["min"] + idx[0] assets = list( set(get_assets(mosaic_definition, x, y, base_zoom))) if assets: tile, mask = mosaic_tiler( assets, x, y, base_zoom, cogeoTiler, tilesize=tilesize, pixel_selection=defaults.FirstMethod(), ) if tile is None: raise Exception("Empty") return window, tile, mask with futures.ThreadPoolExecutor( max_workers=threads) as executor: future_work = [ executor.submit(_get_tile, item) for item in blocks ] with click.progressbar( futures.as_completed(future_work), length=len(future_work), show_percent=True, ) as future: for res in future: pass for f in _filter_futures(future_work): window, tile, mask = f mem.write(tile, window=window) if info[5]: mem.write_mask(mask.astype("uint8"), window=window) cog_translate( mem, f"{prefix}_{ix}.tif", output_profile, config=config, in_memory=True, )