def parent(ctx, input, depth): """Takes [x, y, z] tiles as input and writes parents to stdout in the same form. Input may be a compact newline-delimited sequences of JSON or a pretty-printed ASCII RS-delimited sequence of JSON (like https://tools.ietf.org/html/rfc8142 and https://tools.ietf.org/html/rfc7159). $ echo "[486, 332, 10]" | mercantile parent Output: [243, 166, 9] """ src = normalize_input(input) for line in iter_lines(src): tile = json.loads(line)[:3] if tile[2] - depth < 0: raise click.UsageError( "Invalid parent level: {0}".format(tile[2] - depth)) for i in range(depth): tile = mercantile.parent(tile) output = json.dumps(tile) click.echo(output)
def gdf_from_tile(self, tile: Tile) -> gpd.GeoDataFrame: x, y, z = tile if z != int(z): raise ValueError("Fractional zooms not allowed!") if x == int(x) and y == int(y): x, y, z = int(x), int(y), int(z) tile_parents = [parent((x, y, z), zoom=i) for i in range(z + 1)] for tile_parent in tile_parents: if tile_parent in self.cached_tiles: return self.__get_from_cache(tile) if self.use_overpass: print( f"[WARNING] Fetching {tile} from OpenStreetMap! This is slower than using regional shapefiles." ) return self.__get_from_osmnx(tile) else: return self.__get_from_cache(tile) source_tiles = [ Tile(np.trunc(x), np.trunc(y), z), Tile(np.trunc(x), np.ceil(y), z), Tile(np.ceil(x), np.trunc(y), z), Tile(np.ceil(x), np.ceil(y), z), ] returned_gdfs = [self.gdf_from_tile(tile) for tile in source_tiles] return pd.concat(returned_gdfs, ignore_index=True)
def parent(ctx, input, depth): """Takes a [x, y, z] tile as input and writes its parent to stdout in the same form. $ echo "[486, 332, 10]" | mercantile parent Output: [243, 166, 9] """ verbosity = ctx.obj['verbosity'] logger = logging.getLogger('mercantile') try: src = click.open_file(input).readlines() except IOError: src = [input] stdout = click.get_text_stream('stdout') try: for line in src: line = line.strip() tile = json.loads(line)[:3] if tile[2] - depth < 0: raise ValueError("Maximum depth exceeded.") for i in range(depth): tile = mercantile.parent(tile) output = json.dumps(tile) stdout.write(output) stdout.write('\n') sys.exit(0) except Exception: logger.exception("Failed. Exception caught") sys.exit(1)
def prefetch(self, tile: Tile, host_dir: str = None, verbose: bool = False): x, y, z = tile if z != int(z): raise ValueError("Fractional zooms not allowed!") if x == int(x) and y == int(y): x, y = int(x), int(y) tile_parents = [parent(tile, zoom=i) for i in range(z + 1)] for tile_parent in tile_parents: if tile_parent in self.cached_tiles: return if z < 14: blob = children(tile, zoom=14) if verbose: from tqdm import tqdm blob = tqdm(blob, desc="OpenStreetMap Prefetch") for child in blob: self.prefetch(child) else: self.__get_from_osmnx(tile) if host_dir is not None: self.save(host_dir) return source_tiles = [ Tile(np.trunc(x), np.trunc(y), z), Tile(np.trunc(x), np.ceil(y), z), Tile(np.ceil(x), np.trunc(y), z), Tile(np.ceil(x), np.ceil(y), z), ] for tile in source_tiles: self.prefetch(tile) return
def parent(ctx, input, depth): """Takes [x, y, z] tiles as input and writes parents to stdout in the same form. Input may be a compact newline-delimited sequences of JSON or a pretty-printed ASCII RS-delimited sequence of JSON (like https://tools.ietf.org/html/rfc8142 and https://tools.ietf.org/html/rfc7159). Example: \b echo "[486, 332, 10]" | mercantile parent [243, 166, 9] """ src = normalize_input(input) for line in iter_lines(src): tile = json.loads(line)[:3] if tile[2] - depth < 0: raise click.UsageError("Invalid parent level: {0}".format(tile[2] - depth)) for i in range(depth): tile = mercantile.parent(tile) output = json.dumps(tile) click.echo(output)
def siblings(self) -> List["TileID"]: """ Returns a list of this tile's siblings. """ return [ TileID(mt) for mt in mercantile.children(mercantile.parent(self.asmrcantile)) ]
def increment(z, y, x): #print("Incrementing", (x,y,z)) while z > 1: key = (z, y, x) Heatmap.count_dict[key] += 1 tile = mercantile.Tile(x, y, z) tile = mercantile.parent(tile) x, y, z = tile.x, tile.y, tile.z
def test_site_tile(): shapes_dir = testing_utils.original_shapes_dir assert site_tile('negev', 10) == site_tile('negev', 10, shapes_dir) for site in site_names(shapes_dir): if site == 'rrvalley': # railroad valley on tile intersection - not supported yet with pytest.raises(AssertionError): tile = site_tile(site, shapes_dir=shapes_dir) else: tile = site_tile(site, shapes_dir=shapes_dir) assert parent(tile) == site_tile(site, 12, shapes_dir)
def get_up_tile(x, y, z, target_zoom): assert z >= target_zoom, 'target zoom more than zoom %s >= %s' % (z, target_zoom) k = (x, y, z, target_zoom) if k not in cache_up: if z == target_zoom: result = (x, y, z) else: t = mercantile.parent(x, y, z) result = get_up_tile(t.x, t.y, t.z, target_zoom) cache_up[k] = result return result return cache_up[k]
def add_parent_tiles(child_x, child_y, child_z, date_dict): output_list = [] for i in range(child_z, 4, -1): parent = mercantile.parent(child_x, child_y, child_z) output_list.append(((parent.x, parent.y, parent.z), date_dict)) child_x, child_y, child_z = parent.x, parent.y, parent.z return output_list
def get_server(z, x, y): x1 = x y1 = y for l in range(z, 0, -1): #print(z, l, x1, y1) server = Server.objects.filter(active=True, z=l, x=x1, y=y1).first() if server: # todo: return random one return server x1, y1, _ = mercantile.parent((x1, y1, l)) return Server.objects.get(z=0, x=0, y=0)
def _generate_tile(self, tile: mercantile.Tile): """ Generate a single target tile from its parent by running osmconvert. If the tile is smaller than the intended target size it is considered done and moved to the out_dir. If not, additional jobs are scheduled to further break it down. :param tile: Target tile which should be generated """ box = mercantile.bounds(tile) parent = mercantile.parent(tile) parent_file = self.working_dir / f'{parent.z}_{parent.x}_{parent.y}.pbf' target_file = self.working_dir / f'{tile.z}_{tile.x}_{tile.y}.pbf' # these cases should not be hit but we check them regardless if not parent_file.exists(): print_error(f'Not generating {tile} because parent does not exist') return if parent_file.stat().st_size < self.target_size: print_error( f'Not generating {tile} because parent has reached target size' ) return if not target_file.exists( ) or parent_file.stat().st_mtime > target_file.stat().st_mtime: # only build file if it does not exist # or parent file has been modified since target was last generated print(f'Generating {tile}') cmd = [ 'osmconvert', f'-b={box.west},{box.south},{box.east},{box.north}', f'-o={target_file.absolute()}', '--complete-ways', '--complex-ways', '--out-pbf', str(parent_file.absolute()) ] subprocess.run(cmd, cwd=str(parent_file.parent), check=True) else: print(f'{tile} already exists and is current. skipping') if target_file.stat().st_size < self.target_size: print( f'{Colors.OKGREEN}{tile} has reached target size{Colors.ENDC}') subprocess.run( ['rsync', str(target_file.absolute()), str(self.out_dir)], check=True) else: self.extract(tile)
def get_up_tile(x, y, z, target_zoom): assert z >= target_zoom, 'target zoom more than zoom %s >= %s' % ( z, target_zoom) k = (x, y, z, target_zoom) if k not in cache_up: if z == target_zoom: result = (x, y, z) else: t = mercantile.parent(x, y, z) result = get_up_tile(t.x, t.y, t.z, target_zoom) cache_up[k] = result return result return cache_up[k]
def get_assets(url: str, x: int, y: int, z: int) -> Tuple[str]: """Get assets.""" mosaic_def = fetch_mosaic_definition(url) min_zoom = mosaic_def["minzoom"] max_zoom = mosaic_def["maxzoom"] if z > max_zoom or z < min_zoom: return [] # return empty asset mercator_tile = mercantile.Tile(x=x, y=y, z=z) quadkey_zoom = mosaic_def.get("quadkey_zoom", min_zoom) # 0.0.2 # get parent if mercator_tile.z > quadkey_zoom: depth = mercator_tile.z - quadkey_zoom for i in range(depth): mercator_tile = mercantile.parent(mercator_tile) quadkey = [mercantile.quadkey(*mercator_tile)] # get child elif mercator_tile.z < quadkey_zoom: depth = quadkey_zoom - mercator_tile.z mercator_tiles = [mercator_tile] for i in range(depth): mercator_tiles = sum([mercantile.children(t) for t in mercator_tiles], []) mercator_tiles = list(filter(lambda t: t.z == quadkey_zoom, mercator_tiles)) quadkey = [mercantile.quadkey(*tile) for tile in mercator_tiles] else: quadkey = [mercantile.quadkey(*mercator_tile)] assets = list( itertools.chain.from_iterable( [mosaic_def["tiles"].get(qk, []) for qk in quadkey] ) ) # check if we have a mosaic in the url (.json/.gz) return list( itertools.chain.from_iterable( [ get_assets(asset, x, y, z) if os.path.splitext(asset)[1] in [".json", ".gz"] else [asset] for asset in assets ] ) )
def parent(ctx, input, depth): """Takes a [x, y, z] tile as input and writes its parent to stdout in the same form. $ echo "[486, 332, 10]" | mercantile parent Output: [243, 166, 9] """ src = normalize_input(input) for line in iter_lines(src): tile = json.loads(line)[:3] if tile[2] - depth < 0: raise click.UsageError("Invalid parent level: {0}".format(tile[2] - depth)) for i in range(depth): tile = mercantile.parent(tile) output = json.dumps(tile) click.echo(output)
def downsample((tile, data)): if data is None: return print("Downsampling", tile) # Get the bounds of the tile. ulx, uly = mercantile.xy( *mercantile.ul(tile.x, tile.y, tile.z)) lrx, lry = mercantile.xy( *mercantile.ul(tile.x + 1, tile.y + 1, tile.z)) # TODO constantize tmp_path = "/vsimem/tile" # create GeoTIFF meta = { "driver": "GTiff", "crs": "EPSG:3857", "nodata": data.fill_value, "count": 1, "dtype": data.dtype, "width": CHUNK_SIZE, "height": CHUNK_SIZE, "transform": from_bounds(ulx, lry, lrx, uly, CHUNK_SIZE, CHUNK_SIZE), } with rasterio.drivers(): with rasterio.open(tmp_path, "w", **meta) as tmp: # use GDAL to resample by writing an ndarray and immediately reading # it out into a smaller array tmp.write(data, 1) resampled = tmp.read( indexes=1, masked=True, out=ma.array(np.empty((CHUNK_SIZE / 2, CHUNK_SIZE / 2), data.dtype)), ) if resampled.mask.all(): return corner = CORNERS[(tile.x % 2, tile.y % 2)] return (mercantile.parent(tile), (corner, resampled))
def get_image(self, *tile: Tile, source_zoom: int = 18, black_fail: bool = False): if len(tile) == 1: tile = tile[0] x, y, z = tile if not self.max_res: source_zoom = z parents = [ parent(Tile(int(fn(x)), int(fn(y)), z), zoom=i) for i, fn in product(range(z + 1), [np.ceil, np.floor]) ] for supertile in parents: if supertile in self.stacks: return self.__make_image(self.stacks[supertile], tile, source_zoom) raise FileNotFoundError( f"Could not find a parent tilestack for requested subtile {tile}!")
def find_quadkeys(mercator_tile: mercantile.Tile, quadkey_zoom: int) -> List[str]: """ Find quadkeys at desired zoom for tile Attributes ---------- mercator_tile: mercantile.Tile Input tile to use when searching for quadkeys quadkey_zoom: int Zoom level Returns ------- list List[str] of quadkeys """ # get parent if mercator_tile.z > quadkey_zoom: depth = mercator_tile.z - quadkey_zoom for i in range(depth): mercator_tile = mercantile.parent(mercator_tile) return [mercantile.quadkey(*mercator_tile)] # get child elif mercator_tile.z < quadkey_zoom: depth = quadkey_zoom - mercator_tile.z mercator_tiles = [mercator_tile] for i in range(depth): mercator_tiles = sum( [mercantile.children(t) for t in mercator_tiles], []) mercator_tiles = list( filter(lambda t: t.z == quadkey_zoom, mercator_tiles)) return [mercantile.quadkey(*tile) for tile in mercator_tiles] else: return [mercantile.quadkey(*mercator_tile)]
def generate_overview_for_zoom(existing_zoom, tile_dir, max_coords): """Generate overview tiles for a given zoom level Args: - existing_zoom: the zoom level for which tiles already exist - tile_dir: the root of directory with tiles """ zoom_dir = tile_dir / str(existing_zoom) tile_coords = [] for path in zoom_dir.glob('*/*.geojson'): y = int(path.stem) x = int(path.parents[0].name) tile_coords.append((x, y)) tiles = [mercantile.Tile(x, y, existing_zoom) for x, y in tile_coords] parents = {mercantile.parent(t) for t in tiles} for parent in parents: # Which of its children exist? children = [c for c in mercantile.children(parent) if c in tiles] # If the parent only has one child, then you can assume the child was # already small enough, and just write if len(children) == 1: # Load the child's features features = load_features(tile=children[0], tile_dir=tile_dir) # And write to the parent's tile write_geojson(features=features, tile=parent, tile_dir=tile_dir) continue # Otherwise, we have more than one child. # Load all the features, then determine if there are too many features = [] for child in children: features.extend(load_features(tile=child, tile_dir=tile_dir)) features = simplify_features(features, max_coords) write_geojson(features=features, tile=parent, tile_dir=tile_dir)
def test_parent(): parent = mercantile.parent(486, 332, 10) assert parent == (243, 166, 9) assert parent.z == 9
def test_root_parent(): assert mercantile.parent(0, 0, 0) is None
def test_parent_invalid_args(args): """tile arg must have length 1 or 3""" with pytest.raises(mercantile.TileArgParsingError): mercantile.parent(*args)
def test_parent_bad_tile_zoom(): with pytest.raises(mercantile.InvalidZoomError) as e: mercantile.parent((243.3, 166.2, 9), zoom=10) assert "zoom must be an integer and less than" in str(e.value)
def parent(self) -> "TileID": """ Return the TileId for the parent. """ return TileID(mercantile.parent(self.asmrcantile))
def test_parent_fractional_tile(): with pytest.raises(mercantile.ParentTileError) as e: mercantile.parent((243.3, 166.2, 9), zoom=1) assert "the parent of a non-integer tile is undefined" in str(e.value)
def pyramid(sc, zoom, dtype, nodata, tiles, prefix, resampling="average"): meta = dict( driver="GTiff", crs="EPSG:3857", tiled=True, compress="deflate", predictor=2, sparse_ok=True, nodata=nodata, dtype=dtype, blockxsize=512, blockysize=512, ) if np.dtype(dtype).kind == "f": meta["predictor"] = 3 empty = ma.masked_array(np.full((CHUNK_SIZE, CHUNK_SIZE), nodata, dtype), fill_value=nodata) tile_count = tiles.count() print("%d tiles to process" % (tile_count)) # TODO deal with multiple bands (probably with flatMapValues) min_zoom = 0 for z in range(zoom - 1, min_zoom - 1, -1): print("Processing zoom %d" % (z)) tile_count = max(1, tile_count / 4) print("Tile count: %d" % (tile_count)) # generate a list of tiles at the current zoom (from available children) tiles = tiles.map( lambda child: mercantile.parent(child) ).distinct() tiles.keyBy(z_key).partitionBy(tiles.count()).values().mapPartitions( lambda partition: map( # for each parent tile: # 5. write it to disk lambda parent: write(meta, prefix)( reduce( # 3. merge merge, # 2. downsample them itertools.ifilter( None, itertools.imap( downsample, # 1. fetch children itertools.ifilter( None, itertools.imap( lambda tile: read_chunk(tile, prefix), mercantile.children(parent) ) ) ) ), (None, empty.copy()) ) ), partition ) ).collect()
def test_parent_fractional_zoom(): with pytest.raises(mercantile.InvalidZoomError) as e: mercantile.parent((243, 166, 9), zoom=1.2) assert "zoom must be an integer and less than" in str(e.value)
def roll_up_affected_tiles(self, tiles): affected = set() for tile in tiles: affected.add(mercantile.parent(tile)) return affected
def test_parent_multi(): parent = mercantile.parent(486, 332, 10, zoom=8) assert parent == (121, 83, 8) assert parent.z == 8
def render_png_from_stac_catalog(z, x, y, scale=1): with Timer("rendering png from stac catalog"): stac_url = request.args.get("url", None) jq_filter = request.args.get("jq", None) stac_expr = request.args.get("expr", None) stac_datetime = request.args.get("datetime", None) # size of the tile, usually (256, 256) shape = tuple(map(int, Affine.scale(scale) * TILE_SHAPE)) if stac_expr: # captures asset-band combos # like B5[0] in (B5[0] - B4[0])/(B5[0] + B4[0]) # or like NIR in (NIR - RED) / (NIR + RED) asset_band_regex = "(?P<asset>[A-Za-z][A-Za-z0-9]+)(?:\[(?P<band>\d+)\])?" matches = list(set(re.findall(asset_band_regex, stac_expr))) # sorted list of assets and bands # like [('B4', 0), ('B5', 0)] # or like [('NIR', 0), ('RED', 0)] asset_bands = sorted( list( set([(asset, int(band) if band else 0) for asset, band in matches]))) # sorted list of asset names # like ['B4', 'B5'] # or like ['NIR', 'RED'] asset_names = sorted( list(set([asset for asset, band in asset_bands]))) # convert expr from a format for running band math across multiple assets # into a format for running band math for a single file for the combined assets # from: (NIR - RED) / (NIR + RED) # to: (b1 - b2) / (b1 + b2) def repl(m): asset, band = m.groups() band = int(band) if band else 0 # add one to index number because single-file band math expression # requires that band indexes starts at 1, i.e. b1, b2, b3... return 'b' + str(asset_bands.index((asset, band)) + 1) expr = re.sub(asset_band_regex, repl, stac_expr) else: asset_names = None expr = None tile = mercantile.Tile(x, y, z) tile_bounds = mercantile.bounds(tile) tile_bbox = [ tile_bounds.west, tile_bounds.south, tile_bounds.east, tile_bounds.north ] # we use the parent tile for searching because sometimes # a search engine might not return results # when the tile is really small parent_tile = mercantile.parent(tile) search_bounds = mercantile.bounds(parent_tile) search_bbox = [ search_bounds.west, search_bounds.south, search_bounds.east, search_bounds.north ] tile_polygon = box(*tile_bbox) params = { 'bbox': str(search_bbox).replace(' ', ''), 'limit': 500, } if stac_datetime: params['datetime'] = stac_datetime with Timer("querying stac"): response = requests.get(stac_url, params=params) features = response.json()['features'] LOG.info(f'number of features: {len(features)}') # filter features to those that overlap tile features = [ feature for feature in features if box(*feature['bbox']).intersects(tile_polygon) ] feature_count = len(features) LOG.info( f'number of features after filtering by feature extent: {feature_count}' ) if jq_filter: features = jq.compile(jq_filter).input(features).first() LOG.info( f'number of features after filtering by jq expression: {len(features)}' ) canvas_bounds = Bounds(bounds=mercantile.xy_bounds(tile), crs=WEB_MERCATOR_CRS) LOG.info(f'canvas bounds: {canvas_bounds}') assets = [] for fid, feature in enumerate(features): images = {} if asset_names: for asset_name in asset_names: images[asset_name] = feature['assets'][asset_name]['href'] elif 'visual' in feature['assets']: images['visual'] = feature['assets']['visual']['href'] else: raise "Not sure what assets to use to create the image" for asset_name, href in images.items(): assets.append({"fid": fid, "name": asset_name, "url": href}) if expr and len(asset_names) > 0: def add_pixels_to_asset(asset): try: url = asset['url'] with Timer(f'reading pixels for {url}'): source = Source(url=url, name=url, resolution=None) with get_source(url) as src: with Timer(f'reading window for {url}'): asset['pixels'] = read_window( src, canvas_bounds, shape, source) except Exception as e: LOG.error(e) raise e with Timer(f'reading all the pixels'): with futures.ThreadPoolExecutor( max_workers=MAX_WORKERS) as executor: executor.map(add_pixels_to_asset, assets) sources = [] for fid, assets in groupby(assets, lambda asset: asset['fid']): assets = list(assets) name_to_pixels = dict([(asset['name'], asset['pixels']) for asset in assets]) windows = tuple([ name_to_pixels[name].data[band] for name, band in asset_bands ]) stacked = np.ma.stack(windows) names = [asset['name'] for asset in assets] source = Source(url=None, name=str(fid) + '{' + ','.join(names) + '}', resolution=None, expr=expr, pixels=PixelCollection(stacked, canvas_bounds), recipes={ "expr": expr, "imagery": True } if expr else {}) sources.append(source) else: sources = [ Source(url=asset['url'], name=asset['name'], resolution=None, recipes={ "expr": expr, "imagery": True } if expr else {}) for asset in assets ] headers, data = tiling.render_tile_from_sources( tile, sources, format=IMAGE_FORMAT, transformation=IMAGE_TRANSFORMATION, scale=scale, ) return data, 200, headers