def get_sources(self, bounds, resolution): recipes = {"imagery": True} if self._rgb is not None: recipes["rgb_bands"] = map(int, self._rgb.split(",")) if self._nodata is not None: recipes["nodata"] = self._nodata if self._linear_stretch is not None: recipes["linear_stretch"] = "per_band" if self._resample is not None: recipes["resample"] = self._resample if self._expr is not None: recipes["expr"] = self._expr yield Source( url=self._uri, name=self._name, resolution=self._resolution, band_info={}, meta=self._meta, recipes=recipes, )
def _candidates(self, bounds, resolution): cursor = self.conn.cursor() zoom = get_zoom(max(resolution)) if bounds.crs == WGS84_CRS: left, bottom, right, top = bounds.bounds else: left, bottom, right, top = warp.transform_bounds( bounds.crs, WGS84_CRS, *bounds.bounds) left = left if left != Infinity else -180 bottom = bottom if bottom != Infinity else -90 right = right if right != Infinity else 180 top = top if top != Infinity else 90 try: cursor.execute( """ WITH bbox AS ( SELECT SetSRID( GeomFromText('BOX({minx} {miny}, {maxx} {maxy})'), 4326) geom ), sources AS ( SELECT url, source, resolution, coalesce(band_info, '{{}}') band_info, coalesce(meta, '{{}}') meta, coalesce(recipes, '{{}}') recipes, acquired_at, priority, ST_Multi(footprints.geom) geom, min_zoom, max_zoom FROM footprints JOIN bbox ON ST_Intersects(footprints.geom, bbox.geom) WHERE ? BETWEEN min_zoom AND max_zoom ) SELECT url, source, resolution, band_info, meta, recipes, acquired_at, null band, priority FROM sources """.format(minx=left, miny=bottom, maxx=right, maxy=top), (zoom, )) for record in cursor: yield Source(*record) except Exception as e: print(e) LOG.warn(e) finally: cursor.close()
def add_pixels_to_asset(asset): try: url = asset['url'] with Timer(f'reading pixels for {url}'): source = Source(url=url, name=url, resolution=None) with get_source(url) as src: with Timer(f'reading window for {url}'): asset['pixels'] = read_window( src, canvas_bounds, shape, source) except Exception as e: LOG.error(e) raise e
def get_sources(self, bounds, resolution): bounds, bounds_crs = bounds zoom = get_zoom(max(resolution)) left, bottom, right, top = warp.transform_bounds( bounds_crs, WGS84_CRS, *bounds) if (self._bounds[0] <= left <= self._bounds[2] or self._bounds[0] <= right <= self._bounds[2]) and ( self._bounds[1] <= bottom <= self._bounds[3] or self._bounds[1] <= top <= self._bounds[3]) and ( self._minzoom <= zoom <= self._maxzoom): yield Source(self._source, self._name, self._resolution, {}, {}, {"imagery": True})
def get_sources(self, bounds, resolution): recipes = {"imagery": True} if self._rgb is not None: recipes["rgb_bands"] = list(map(int, self._rgb.split(","))) if self._nodata is not None: recipes["nodata"] = self._nodata if self._linear_stretch is not None: valid_values = ["per_band", "global", "if_needed"] if self._linear_stretch not in valid_values: self._linear_stretch = valid_values[0] LOG.debug("No specific linear_stretch passed, using: {0}".format(self._linear_stretch)) recipes["linear_stretch"] = self._linear_stretch if self._resample is not None: recipes["resample"] = self._resample if self._to_vis is not None: recipes["dst_min"] = 0 recipes["dst_max"] = 255 recipes["force_cast"] = 'uint8' if self._dst_min is not None: recipes["dst_min"] = self._dst_min if self._dst_max is not None: recipes["dst_max"] = self._dst_max if self._force_cast is not None: recipes["force_cast"] = self._force_cast yield Source( url=self._uri, name=self._name, resolution=self._resolution, band_info={}, meta=self._meta, recipes=recipes, )
def get_sources(self, bounds, resolution): cursor = self.conn.cursor() # TODO this is becoming relatively standard catalog boilerplate zoom = get_zoom(max(resolution)) if bounds.crs == WGS84_CRS: left, bottom, right, top = bounds.bounds else: left, bottom, right, top = warp.transform_bounds( bounds.crs, WGS84_CRS, *bounds.bounds) left = left if left != Infinity else -180 bottom = bottom if bottom != Infinity else -90 right = right if right != Infinity else 180 top = top if top != Infinity else 90 try: query = """ WITH bbox AS ( SELECT SetSRID(GeomFromGeoJSON(?), 4326) geom ), uncovered AS ( SELECT SetSRID(GeomFromGeoJSON(?), 4326) geom ), date_range AS ( SELECT COALESCE(min(acquired_at), date('1970-01-01')) min, COALESCE(max(acquired_at), date('1970-01-01')) max FROM footprints ) SELECT url, source, resolution, coalesce(band_info, '{{}}') band_info, coalesce(meta, '{{}}') meta, coalesce(recipes, '{{}}') recipes, acquired_at, null band, -- for Source constructor compatibility priority, ST_Area(ST_Intersection(uncovered.geom, ST_Difference(footprints.geom, footprints.mask))) / ST_Area(bbox.geom) coverage, AsGeoJSON(ST_Intersection(bbox.geom, footprints.geom)) geom, AsGeoJSON(ST_Intersection(footprints.mask, bbox.geom)) mask, AsGeoJSON(ST_Difference(uncovered.geom, ST_Difference(footprints.geom, footprints.mask))) uncovered FROM bbox, date_range, footprints JOIN uncovered ON ST_Intersects(footprints.geom, uncovered.geom) WHERE footprints.source || ' - ' || footprints.url NOT IN ({id_placeholders}) AND ? BETWEEN min_zoom AND max_zoom ORDER BY 10 * coalesce(footprints.priority, 0.5) * .1 * (1 - (strftime('%s') - strftime('%s', COALESCE(acquired_at, date('2000-01-01')))) / (strftime('%s') - strftime('%s', date_range.min))) * 50 * -- de-prioritize over-zoomed sources CASE WHEN ? / footprints.resolution >= 1 THEN 1 ELSE 1 / footprints.resolution END * ST_Area( ST_Intersection(bbox.geom, footprints.geom)) / ST_Area(bbox.geom) DESC LIMIT 1 """ bbox = json.dumps({ "type": "Polygon", "coordinates": [[ [left, bottom], [left, top], [right, top], [right, bottom], [left, bottom], ]], }) uncovered = bbox ids = set() while True: id_placeholders = ", ".join("?" * len(ids)) cursor.execute( query.format(id_placeholders=id_placeholders), (bbox, uncovered) + tuple(ids) + (zoom, min(resolution)), ) count = 0 for record in cursor: count += 1 ( url, source, res, band_info, meta, recipes, acquired_at, band, priority, coverage, _, mask, uncovered, ) = record if mask is not None: mask = json.loads(mask) yield Source( url, source, res, json.loads(band_info), json.loads(meta), json.loads(recipes), acquired_at, band, priority, coverage, mask=mask, ) ids.add(source + " - " + url) if count == 0 or uncovered is None: break except Exception as e: LOG.exception(e) finally: cursor.close()
LOG.warn(e) LOG.warn(traceback.format_exc(e)) finally: cursor.close() if __name__ == "__main__": logging.basicConfig(level=logging.INFO) # curl http://localhost:8000/imagery/13-15/4393/2357.geojson > data.json with open("data.json") as f: features = json.load(f) cat = SpatialiteCatalog() for f in features["features"]: s = Source(geom=f["geometry"], **f["properties"]) cat.add_source(s) # z14 bounds = Bounds( (13.0517578125, 60.46805012087461, 13.07373046875, 60.4788788301667), WGS84_CRS) # z13 bounds = Bounds( (13.0517578125, 60.45721779774396, 13.095703125, 60.4788788301667), WGS84_CRS) # 8/136/72 bounds = Bounds((11.25, 60.930432202923335, 12.65625, 61.60639637138628), WGS84_CRS)
def render_png_from_stac_catalog(z, x, y, scale=1): with Timer("rendering png from stac catalog"): stac_url = request.args.get("url", None) jq_filter = request.args.get("jq", None) stac_expr = request.args.get("expr", None) stac_datetime = request.args.get("datetime", None) # size of the tile, usually (256, 256) shape = tuple(map(int, Affine.scale(scale) * TILE_SHAPE)) if stac_expr: # captures asset-band combos # like B5[0] in (B5[0] - B4[0])/(B5[0] + B4[0]) # or like NIR in (NIR - RED) / (NIR + RED) asset_band_regex = "(?P<asset>[A-Za-z][A-Za-z0-9]+)(?:\[(?P<band>\d+)\])?" matches = list(set(re.findall(asset_band_regex, stac_expr))) # sorted list of assets and bands # like [('B4', 0), ('B5', 0)] # or like [('NIR', 0), ('RED', 0)] asset_bands = sorted( list( set([(asset, int(band) if band else 0) for asset, band in matches]))) # sorted list of asset names # like ['B4', 'B5'] # or like ['NIR', 'RED'] asset_names = sorted( list(set([asset for asset, band in asset_bands]))) # convert expr from a format for running band math across multiple assets # into a format for running band math for a single file for the combined assets # from: (NIR - RED) / (NIR + RED) # to: (b1 - b2) / (b1 + b2) def repl(m): asset, band = m.groups() band = int(band) if band else 0 # add one to index number because single-file band math expression # requires that band indexes starts at 1, i.e. b1, b2, b3... return 'b' + str(asset_bands.index((asset, band)) + 1) expr = re.sub(asset_band_regex, repl, stac_expr) else: asset_names = None expr = None tile = mercantile.Tile(x, y, z) tile_bounds = mercantile.bounds(tile) tile_bbox = [ tile_bounds.west, tile_bounds.south, tile_bounds.east, tile_bounds.north ] # we use the parent tile for searching because sometimes # a search engine might not return results # when the tile is really small parent_tile = mercantile.parent(tile) search_bounds = mercantile.bounds(parent_tile) search_bbox = [ search_bounds.west, search_bounds.south, search_bounds.east, search_bounds.north ] tile_polygon = box(*tile_bbox) params = { 'bbox': str(search_bbox).replace(' ', ''), 'limit': 500, } if stac_datetime: params['datetime'] = stac_datetime with Timer("querying stac"): response = requests.get(stac_url, params=params) features = response.json()['features'] LOG.info(f'number of features: {len(features)}') # filter features to those that overlap tile features = [ feature for feature in features if box(*feature['bbox']).intersects(tile_polygon) ] feature_count = len(features) LOG.info( f'number of features after filtering by feature extent: {feature_count}' ) if jq_filter: features = jq.compile(jq_filter).input(features).first() LOG.info( f'number of features after filtering by jq expression: {len(features)}' ) canvas_bounds = Bounds(bounds=mercantile.xy_bounds(tile), crs=WEB_MERCATOR_CRS) LOG.info(f'canvas bounds: {canvas_bounds}') assets = [] for fid, feature in enumerate(features): images = {} if asset_names: for asset_name in asset_names: images[asset_name] = feature['assets'][asset_name]['href'] elif 'visual' in feature['assets']: images['visual'] = feature['assets']['visual']['href'] else: raise "Not sure what assets to use to create the image" for asset_name, href in images.items(): assets.append({"fid": fid, "name": asset_name, "url": href}) if expr and len(asset_names) > 0: def add_pixels_to_asset(asset): try: url = asset['url'] with Timer(f'reading pixels for {url}'): source = Source(url=url, name=url, resolution=None) with get_source(url) as src: with Timer(f'reading window for {url}'): asset['pixels'] = read_window( src, canvas_bounds, shape, source) except Exception as e: LOG.error(e) raise e with Timer(f'reading all the pixels'): with futures.ThreadPoolExecutor( max_workers=MAX_WORKERS) as executor: executor.map(add_pixels_to_asset, assets) sources = [] for fid, assets in groupby(assets, lambda asset: asset['fid']): assets = list(assets) name_to_pixels = dict([(asset['name'], asset['pixels']) for asset in assets]) windows = tuple([ name_to_pixels[name].data[band] for name, band in asset_bands ]) stacked = np.ma.stack(windows) names = [asset['name'] for asset in assets] source = Source(url=None, name=str(fid) + '{' + ','.join(names) + '}', resolution=None, expr=expr, pixels=PixelCollection(stacked, canvas_bounds), recipes={ "expr": expr, "imagery": True } if expr else {}) sources.append(source) else: sources = [ Source(url=asset['url'], name=asset['name'], resolution=None, recipes={ "expr": expr, "imagery": True } if expr else {}) for asset in assets ] headers, data = tiling.render_tile_from_sources( tile, sources, format=IMAGE_FORMAT, transformation=IMAGE_TRANSFORMATION, scale=scale, ) return data, 200, headers