コード例 #1
0
    def worker(idx):
        """
        """
        nb_features = len(features)
        while True:
            zoom = randint(min_zoom, max_zoom)
            feature = features[randint(0, nb_features - 1)]

            tiles = burn([feature], zoom)
            nb_mctiles = len(tiles)
            tile = tiles[randint(0, nb_mctiles - 1)]
            tile_x = tile[0]
            tile_y = tile[1]
            tile_z = tile[2]

            url = f'https://api.mapbox.com/v4/{mapid}/{tile_z}/{tile_x}/{tile_y}@2x.jpg?access_token={access_token}'
            response = requests.get(url)
            if response.status_code != 200:
                continue

            header = response.headers
            size = header.get('Content-Length')
            if int(size) < 10000:
                continue

            new_im.paste(Image.open(BytesIO(response.content)), (idx['c'], idx['r']))
            return True
コード例 #2
0
    def geojson_parse_polygon(zoom, srid, feature_map, polygon):

        for i, ring in enumerate(
                polygon["coordinates"]
        ):  # GeoJSON coordinates could be N dimensionals
            polygon["coordinates"][i] = [[
                x, y
            ] for point in ring for x, y in zip([point[0]], [point[1]])]

        if srid != 4326:
            polygon = transform_geom(CRS.from_epsg(srid), CRS.from_epsg(4326),
                                     polygon)

        try:
            for tile in burntiles.burn([{
                    "type": "feature",
                    "geometry": polygon
            }],
                                       zoom=zoom):
                feature_map[mercantile.Tile(*tile)].append({
                    "type": "feature",
                    "geometry": polygon
                })
        except:
            pass

        return feature_map
コード例 #3
0
    def geojson_parse_polygon(zoom, srid, feature_map, polygon):

        if srid != 4326:
            polygon = [
                xy for xy in geojson_reproject(
                    {
                        "type": "feature",
                        "geometry": polygon
                    }, srid, 4326)
            ][0]

        # GeoJSON coordinates could be N dimensionals
        for i, ring in enumerate(polygon["coordinates"]):
            polygon["coordinates"][i] = [[
                x, y
            ] for point in ring for x, y in zip([point[0]], [point[1]])]

        if polygon["coordinates"]:
            for tile in burntiles.burn([{
                    "type": "feature",
                    "geometry": polygon
            }],
                                       zoom=zoom):
                feature_map[mercantile.Tile(*tile)].append({
                    "type": "feature",
                    "geometry": polygon
                })

        return feature_map
コード例 #4
0
    def geojson_parse_polygon(zoom, srid, feature_map, polygon):

        if isinstance(polygon["coordinates"],
                      list):  # https://github.com/Toblerity/Shapely/issues/245
            for i, ring in enumerate(
                    polygon["coordinates"]
            ):  # GeoJSON coordinates could be N dimensionals
                polygon["coordinates"][i] = [[
                    x, y
                ] for point in ring for x, y in zip([point[0]], [point[1]])]

        if srid != 4326:
            try:
                polygon = transform_geom(CRS.from_epsg(srid),
                                         CRS.from_epsg(4326), polygon)
            except:  # negative buffer could lead to empty/invalid geom
                return feature_map

        try:
            for tile in burntiles.burn([{
                    "type": "feature",
                    "geometry": polygon
            }],
                                       zoom=zoom):
                feature_map[mercantile.Tile(*tile)].append({
                    "type": "feature",
                    "geometry": polygon
                })
        except:
            pass

        return feature_map
コード例 #5
0
def main(args):

    if not args.zoom and args.type in ["geojson", "bbox"]:
        sys.exit("Zoom parameter is required")

    cover = []

    if args.type == "geojson":
        with open(args.input) as f:
            features = json.load(f)

        for feature in tqdm(features["features"], ascii=True, unit="feature"):
            cover.extend(map(tuple, burntiles.burn([feature], args.zoom).tolist()))

        cover = list(set(cover))  # tiles can overlap for multiple features; unique tile ids

    elif args.type == "bbox":
        west, south, east, north = map(float, args.input.split(","))
        cover = tiles(west, south, east, north, args.zoom)

    elif args.type == "dir":
        cover = [tile for tile, _ in tiles_from_slippy_map(args.input)]

    if os.path.dirname(args.out) and not os.path.isdir(os.path.dirname(args.out)):
        os.makedirs(os.path.dirname(args.out), exist_ok=True)

    with open(args.out, "w") as fp:
        csv.writer(fp).writerows(cover)
コード例 #6
0
ファイル: utils.py プロジェクト: avinash-chouhan/deepres
def get_list_of_mercator_tiles(path):
    bounds = get_bounds_from_raster(path)
    with rasterio.open(path) as src:
        pixel_size = src.res[0]
        zoom_level = get_zoom_level_for_pixel_size(pixel_size)
        tile_set = burntiles.burn(bounds, zoom_level)

    return tile_set
コード例 #7
0
def main(args):
    dataset = load_config(args.dataset)

    classes = dataset["common"]["classes"]
    colors = dataset["common"]["colors"]
    assert len(classes) == len(colors), "classes and colors coincide"

    assert len(colors) == 2, "only binary models supported right now"
    bg = colors[0]
    fg = colors[1]

    os.makedirs(args.out, exist_ok=True)

    # We can only rasterize all tiles at a single zoom.
    assert all(tile.z == args.zoom for tile in tiles_from_csv(args.tiles))

    with open(args.features) as f:
        fc = json.load(f)

    # Find all tiles the features cover and make a map object for quick lookup.
    feature_map = collections.defaultdict(list)
    for i, feature in enumerate(
            tqdm(fc["features"], ascii=True, unit="feature")):

        if feature["geometry"]["type"] != "Polygon":
            continue

        try:
            for tile in burntiles.burn([feature], zoom=args.zoom):
                feature_map[mercantile.Tile(*tile)].append(feature)
        except ValueError as e:
            print("Warning: invalid feature {}, skipping".format(i),
                  file=sys.stderr)
            continue

    # Burn features to tiles and write to a slippy map directory.
    for tile in tqdm(list(tiles_from_csv(args.tiles)), ascii=True,
                     unit="tile"):
        if tile in feature_map:
            out = burn(tile, feature_map[tile], args.size)
        else:
            out = np.zeros(shape=(args.size, args.size), dtype=np.uint8)

        out_dir = os.path.join(args.out, str(tile.z), str(tile.x))
        os.makedirs(out_dir, exist_ok=True)

        out_path = os.path.join(out_dir, "{}.png".format(tile.y))

        if os.path.exists(out_path):
            prev = np.array(Image.open(out_path))
            out = np.maximum(out, prev)

        out = Image.fromarray(out, mode="P")

        palette = make_palette(bg, fg)
        out.putpalette(palette)

        out.save(out_path, optimize=True)
コード例 #8
0
def burn(features, sequence, zoom):
    """
    Burn a stream of GeoJSONs into a output stream of the tiles they intersect for a given zoom.
    """
    features = [f for f in super_utils.filter_polygons(features)]

    tiles = burntiles.burn(features, zoom)
    for t in tiles:
        click.echo(t.tolist())
コード例 #9
0
ファイル: cli.py プロジェクト: mapbox/supermercado
def burn(features, sequence, zoom):
    """
    Burn a stream of GeoJSONs into a output stream of the tiles they intersect for a given zoom.
    """
    features = [f for f in super_utils.filter_polygons(features)]

    tiles = burntiles.burn(features, zoom)
    for t in tiles:
        click.echo(t.tolist())
コード例 #10
0
def tile_cover(geometry, z):
    import geopandas as gpd
    from supermercado import burntiles, super_utils
    from pygeotile.tile import Tile
    geo = gpd.GeoSeries([geometry]).__geo_interface__['features'][0]
    geo = [f for f in super_utils.filter_polygons([geo])]
    return [
        Tile.from_google(*geo).quad_tree
        for geo in [f for f in burntiles.burn(geo, z)]
    ]
コード例 #11
0
ファイル: cover.py プロジェクト: shepherdmeng/robosat
def main(args):
    with open(args.features) as f:
        features = json.load(f)

    tiles = []
    for feature in tqdm(features['features'], ascii=True, unit='feature'):
        tiles.extend(burntiles.burn([feature], args.zoom))

    with open(args.out, 'w') as fp:
        writer = csv.writer(fp)
        writer.writerows(tiles)
コード例 #12
0
def main(args):
    dataset = load_config(args.dataset)

    classes = dataset['common']['classes']
    colors = dataset['common']['colors']
    assert len(classes) == len(colors), 'classes and colors coincide'

    assert len(colors) == 2, 'only binary models supported right now'
    bg = colors[0]
    fg = colors[1]

    os.makedirs(args.out, exist_ok=True)

    # We can only rasterize all tiles at a single zoom.
    assert all(tile.z == args.zoom for tile in tiles_from_csv(args.tiles))

    with open(args.features) as f:
        fc = json.load(f)

    # Find all tiles the features cover and make a map object for quick lookup.
    feature_map = collections.defaultdict(list)
    for i, feature in enumerate(
            tqdm(fc['features'], ascii=True, unit='feature')):

        if feature['geometry']['type'] != 'Polygon':
            continue

        try:
            for tile in burntiles.burn([feature], zoom=args.zoom):
                feature_map[mercantile.Tile(*tile)].append(feature)
        except ValueError as e:
            print('Warning: invalid feature {}, skipping'.format(i),
                  file=sys.stderr)
            continue

    # Burn features to tiles and write to a slippy map directory.
    for tile in tqdm(list(tiles_from_csv(args.tiles)), ascii=True,
                     unit='tile'):
        if tile in feature_map:
            out = burn(tile, feature_map[tile], args.size)
        else:
            out = Image.fromarray(np.zeros(shape=(args.size,
                                                  args.size)).astype(int),
                                  mode='P')

        palette = make_palette(bg, fg)
        out.putpalette(palette)

        out_path = os.path.join(args.out, str(tile.z), str(tile.x))
        os.makedirs(out_path, exist_ok=True)

        out.save(os.path.join(out_path, '{}.png'.format(tile.y)),
                 optimize=True)
コード例 #13
0
    def geojson_parse_polygon(zoom, feature_map, polygon, i):

        try:
            for i, ring in enumerate(polygon["coordinates"]):  # GeoJSON coordinates could be N dimensionals
                polygon["coordinates"][i] = [[x, y] for point in ring for x, y in zip([point[0]], [point[1]])]

            for tile in burntiles.burn([{"type": "feature", "geometry": polygon}], zoom=zoom):
                feature_map[mercantile.Tile(*tile)].append({"type": "feature", "geometry": polygon})

        except ValueError:
            log.log("Warning: invalid feature {}, skipping".format(i))

        return feature_map
def main(args):
    with open(args.features) as f:
        features = json.load(f)

    tiles = []

    for feature in tqdm(features['features'], ascii=True, unit='feature'):
        tiles.extend(map(tuple, burntiles.burn([feature], args.zoom).tolist()))

    # tiles can overlap for multiple features; unique tile ids
    tiles = list(set(tiles))

    with open(args.out, 'w') as fp:
        writer = csv.writer(fp)
        writer.writerows(tiles)
コード例 #15
0
def main(args):
    with open(args.features) as f:
        features = json.load(f)

    tiles = []

    for feature in tqdm(features['features'], ascii=True, unit='feature'):
        tiles.extend(burntiles.burn([feature], args.zoom))

    # tiles can overlap for multiple features; unique tile ids
    tiles = list(set(tiles))

    with open(args.out, 'w') as fp:
        writer = csv.writer(fp)
        writer.writerows(tiles)
コード例 #16
0
ファイル: cover.py プロジェクト: npauzenga/robosat_labeling
def main(args):
    with open(args.features) as f:
        features = json.load(f)

    tiles = []

    for feature in tqdm(features["features"], ascii=True, unit="feature"):
        tiles.extend(map(tuple, burntiles.burn([feature], args.zoom).tolist()))

    # tiles can overlap for multiple features; unique tile ids
    tiles = list(set(tiles))

    with open(args.out, "w") as fp:
        writer = csv.writer(fp)
        writer.writerows(tiles)
コード例 #17
0
    def tiles_cover(self):
        """generates tiles covering GeoJSON file.
        """
        features = json.load(open(self.gis_path))
        tiles = []

        for feature in features["features"]:
            tiles.extend(
                map(tuple,
                    burntiles.burn([feature], self.zoom).tolist()))

        tiles = list(set(tiles))

        with open(self.tiles_cover_path, "w") as fp:
            writer = csv.writer(fp)
            writer.writerows(tiles)
def main(args):
    dataset = load_config(args.dataset)

    classes = dataset['common']['classes']
    colors = dataset['common']['colors']
    assert len(classes) == len(colors), 'classes and colors coincide'

    assert len(colors) == 2, 'only binary models supported right now'
    bg = colors[0]
    fg = colors[1]

    os.makedirs(args.out, exist_ok=True)

    # We can only rasterize all tiles at a single zoom.
    assert all(tile.z == args.zoom for tile in tiles_from_csv(args.tiles))

    with open(args.features) as f:
        fc = json.load(f)

    # Find all tiles the features cover and make a map object for quick lookup.
    feature_map = collections.defaultdict(list)
    for i, feature in enumerate(tqdm(fc['features'], ascii=True, unit='feature')):

        if feature['geometry']['type'] != 'Polygon':
            continue

        try:
            for tile in burntiles.burn([feature], zoom=args.zoom):
                feature_map[mercantile.Tile(*tile)].append(feature)
        except ValueError as e:
            print('Warning: invalid feature {}, skipping'.format(i), file=sys.stderr)
            continue

    # Burn features to tiles and write to a slippy map directory.
    for tile in tqdm(list(tiles_from_csv(args.tiles)), ascii=True, unit='tile'):
        if tile in feature_map:
            out = burn(tile, feature_map[tile], args.size)
        else:
            out = Image.fromarray(np.zeros(shape=(args.size, args.size)).astype(int), mode='P')

        palette = make_palette(bg, fg)
        out.putpalette(palette)

        out_path = os.path.join(args.out, str(tile.z), str(tile.x))
        os.makedirs(out_path, exist_ok=True)

        out.save(os.path.join(out_path, '{}.png'.format(tile.y)), optimize=True)
コード例 #19
0
    def map_masks(self, colors):

        os.makedirs(self.masks_path, exist_ok=True)
        assert all(tile.z == self.zoom
                   for tile in tiles_from_csv(self.tiles_cover_path))

        with open(self.gis_path) as f:
            fc = json.load(f)

        # Find all tiles the features cover and make a map object for quick lookup.
        feature_map = collections.defaultdict(list)
        for i, feature in enumerate(fc["features"]):

            if feature["geometry"]["type"] != "Polygon":
                continue

            try:
                for tile in burntiles.burn([feature], zoom=self.zoom):
                    feature_map[mercantile.Tile(*tile)].append(feature)
            except ValueError:  # as e:
                print("Warning: invalid feature {}, skipping".format(i),
                      file=sys.stderr)
                continue
        # Burn features to tiles and write to a png image.
        for tile in list(tiles_from_csv(self.tiles_cover_path)):
            if tile in feature_map:
                out = burn(tile, feature_map[tile], self.size)
            else:
                out = np.zeros(shape=(self.size, self.size), dtype=np.uint8)

            x, y, z = map(str, [tile.x, tile.y, tile.z])
            out_path = os.path.join(
                self.masks_path, "{}_{}_{}.{}".format(z, x, y,
                                                      self.mask_format))
            bg = colors[0]
            fg = colors[1]
            out = Image.fromarray(out, mode="P")
            palette = make_palette(bg, fg)
            out.putpalette(palette)

            out.save(out_path, optimize=True)
コード例 #20
0
def vector_tile_to_geojson(uri, zoom, map_extent):
    """Get GeoJSON features that overlap with an extent from a vector tile endpoint."""
    log.info('Downloading and converting vector tiles to GeoJSON...')

    # Figure out which tiles cover the extent.
    extent_polys = [{
        'type': 'Feature',
        'properties': {},
        'geometry': {
            'type': 'Polygon',
            'coordinates': [map_extent.geojson_coordinates()]
        }
    }]
    xyzs = burn(extent_polys, zoom)

    # Retrieve tile features.
    features = []
    for xyz in xyzs:
        x, y, z = xyz
        # If this isn't a zxy schema, this is a no-op.
        tile_uri = uri.format(x=x, y=y, z=z)
        tile_features = get_tile_features(tile_uri, z, x, y)
        features.extend(tile_features)

    # Crop features to extent
    extent_geom = map_extent.to_shapely()
    cropped_features = []
    for f in features:
        geom = shape(f['geometry'])
        if f['geometry']['type'].lower() in ['polygon', 'multipolygon']:
            geom = geom.buffer(0)
        geom = geom.intersection(extent_geom)
        if not geom.is_empty:
            f = dict(f)
            f['geometry'] = mapping(geom)
            cropped_features.append(f)

    return {'type': 'FeatureCollection', 'features': cropped_features}
コード例 #21
0
def main(args):
    dataset = load_config(args.dataset)

    classes = dataset["common"]["classes"]
    colors = dataset["common"]["colors"]
    assert len(classes) == len(colors), "classes and colors coincide"

    assert len(colors) == 2, "only binary models supported right now"
    bg = colors[0]
    fg = colors[1]

    os.makedirs(args.out, exist_ok=True)

    # We can only rasterize all tiles at a single zoom.
    assert all(tile.z == args.zoom for tile in tiles_from_csv(args.tiles))

    with open(args.features) as f:
        fc = json.load(f)

    # Find all tiles the features cover and make a map object for quick lookup.
    feature_map = collections.defaultdict(list)
    for i, feature in enumerate(
            tqdm(fc["features"], ascii=True, unit="feature")):

        if feature["geometry"]["type"] != "Polygon":
            continue

        try:
            for tile in burntiles.burn([feature], zoom=args.zoom):
                feature_map[mercantile.Tile(*tile)].append(feature)
        except ValueError as e:
            print("Warning: invalid feature {}, skipping".format(i),
                  file=sys.stderr)
            continue

    single_burning(args, feature_map, bg, fg)
コード例 #22
0
ファイル: cover.py プロジェクト: MinnDevelopment/robosat.pink
def main(args):

    if (
        int(args.bbox is not None)
        + int(args.geojson is not None)
        + int(args.dir is not None)
        + int(args.xyz is not None)
        + int(args.raster is not None)
        + int(args.cover is not None)
        != 1
    ):
        sys.exit("ERROR: One, and only one, input type must be provided, among: --dir, --bbox, --cover or --geojson.")

    if args.bbox:
        try:
            w, s, e, n, crs = args.bbox.split(",")
            w, s, e, n = map(float, (w, s, e, n))
        except:
            try:
                crs = None
                w, s, e, n = map(float, args.bbox.split(","))
            except:
                sys.exit("ERROR: invalid bbox parameter.")

    if args.splits:

        try:
            splits = [int(split) for split in args.splits.split("/")]
            assert len(splits) == len(args.out)
            assert sum(splits) == 100
        except:
            sys.exit("ERROR: Invalid split value or incoherent with provided out paths.")

    if not args.zoom and (args.geojson or args.bbox or args.raster):
        sys.exit("ERROR: Zoom parameter is required.")

    args.out = [os.path.expanduser(out) for out in args.out]

    cover = []

    if args.raster:
        print("RoboSat.pink - cover from {} at zoom {}".format(args.raster, args.zoom))
        with rasterio_open(os.path.expanduser(args.raster)) as r:
            try:
                w, s, e, n = transform_bounds(r.crs, "EPSG:4326", *r.bounds)
            except:
                sys.exit("ERROR: unable to deal with raster projection")

            cover = [tile for tile in tiles(w, s, e, n, args.zoom)]

    if args.geojson:
        print("RoboSat.pink - cover from {} at zoom {}".format(args.geojson, args.zoom))
        with open(os.path.expanduser(args.geojson)) as f:
            features = json.load(f)

        try:
            for feature in tqdm(features["features"], ascii=True, unit="feature"):
                cover.extend(map(tuple, burntiles.burn([feature], args.zoom).tolist()))
        except:
            sys.exit("ERROR: invalid or unsupported GeoJSON.")

        cover = list(set(cover))  # tiles can overlap for multiple features; unique tile ids

    if args.bbox:
        print("RoboSat.pink - cover from {} at zoom {}".format(args.bbox, args.zoom))
        if crs:
            try:
                w, s, e, n = transform_bounds(crs, "EPSG:4326", w, s, e, n)
            except:
                sys.exit("ERROR: unable to deal with raster projection")

        cover = [tile for tile in tiles(w, s, e, n, args.zoom)]

    if args.cover:
        print("RoboSat.pink - cover from {}".format(args.cover))
        cover = [tile for tile in tiles_from_csv(args.cover)]

    if args.dir:
        print("RoboSat.pink - cover from {}".format(args.dir))
        cover = [tile for tile in tiles_from_dir(args.dir, xyz=False)]

    if args.xyz:
        print("RoboSat.pink - cover from {}".format(args.xyz))
        cover = [tile for tile in tiles_from_dir(args.xyz, xyz=True)]

    _cover = []
    for tile in tqdm(cover, ascii=True, unit="tile"):
        if args.zoom and tile.z != args.zoom:
            w, s, n, e = transform_bounds("EPSG:3857", "EPSG:4326", *xy_bounds(tile))
            for t in tiles(w, s, n, e, args.zoom):
                unique = True
                for _t in _cover:
                    if _t == t:
                        unique = False
                if unique:
                    _cover.append(t)
        else:
            _cover.append(tile)
    cover = _cover

    if args.splits:
        shuffle(cover)  # in-place
        splits = [math.floor(len(cover) * split / 100) for i, split in enumerate(splits, 1)]
        s = 0
        covers = []
        for e in splits:
            covers.append(cover[s : s + e - 1])
            s += e
    else:
        covers = [cover]

    for i, cover in enumerate(covers):

        if os.path.dirname(args.out[i]) and not os.path.isdir(os.path.dirname(args.out[i])):
            os.makedirs(os.path.dirname(args.out[i]), exist_ok=True)

        with open(args.out[i], "w") as fp:
            csv.writer(fp).writerows(cover)
コード例 #23
0
ファイル: utils.py プロジェクト: bekerov/cogeo-mosaic
def create_mosaic(
    dataset_list: Tuple,
    minzoom: int = None,
    maxzoom: int = None,
    max_threads: int = 20,
    minimum_tile_cover: float = None,
    tile_cover_sort: bool = False,
    version: str = "0.0.2",
    quiet: bool = True,
) -> Dict:
    """
    Create mosaic definition content.

    Attributes
    ----------
        dataset_list : tuple or list, required
            Dataset urls.
        minzoom: int, optional
            Force mosaic min-zoom.
        maxzoom: int, optional
            Force mosaic max-zoom.
        minimum_tile_cover: float, optional (default: 0)
            Filter files with low tile intersection coverage.
        tile_cover_sort: bool, optional (default: None)
            Sort intersecting files by coverage.
        max_threads : int
            Max threads to use (default: 20).
        version: str, optional
            mosaicJSON definition version
        quiet: bool, optional (default: True)
            Mask processing steps.

    Returns
    -------
        mosaic_definition : dict
            Mosaic definition.

    """
    if version not in ["0.0.1", "0.0.2"]:
        raise Exception(f"Invalid mosaicJSON's version: {version}")

    if not quiet:
        click.echo("Get files footprint", err=True)

    results = get_footprints(dataset_list,
                             max_threads=max_threads,
                             quiet=quiet)

    if minzoom is None:
        minzoom = list(set([feat["properties"]["minzoom"]
                            for feat in results]))
        if len(minzoom) > 1:
            warnings.warn("Multiple MinZoom, Assets different minzoom values",
                          UserWarning)

        minzoom = max(minzoom)

    if maxzoom is None:
        maxzoom = list(set([feat["properties"]["maxzoom"]
                            for feat in results]))
        if len(maxzoom) > 1:
            warnings.warn(
                "Multiple MaxZoom, Assets have multiple resolution values",
                UserWarning)

        maxzoom = max(maxzoom)

    quadkey_zoom = minzoom

    datatype = list(set([feat["properties"]["datatype"] for feat in results]))
    if len(datatype) > 1:
        raise Exception("Dataset should have the same data type")

    if not quiet:
        click.echo(f"Get quadkey list for zoom: {quadkey_zoom}", err=True)

    tiles = burntiles.burn(results, quadkey_zoom)
    tiles = ["{2}-{0}-{1}".format(*tile.tolist()) for tile in tiles]

    bounds = burntiles.find_extrema(results)
    mosaic_definition = dict(
        mosaicjson=version,
        minzoom=minzoom,
        maxzoom=maxzoom,
        bounds=bounds,
        center=[(bounds[0] + bounds[2]) / 2, (bounds[1] + bounds[3]) / 2,
                minzoom],
        tiles={},
        version="1.0.0",
    )

    if version == "0.0.2":
        mosaic_definition.update(dict(quadkey_zoom=quadkey_zoom))

    if not quiet:
        click.echo(f"Feed Quadkey index", err=True)

    dataset_geoms = polygons(
        [feat["geometry"]["coordinates"][0] for feat in results])
    dataset = [{
        "path": f["properties"]["path"],
        "geometry": geom
    } for (f, geom) in zip(results, dataset_geoms)]

    for parent in tiles:
        z, x, y = list(map(int, parent.split("-")))
        parent = mercantile.Tile(x=x, y=y, z=z)
        quad = mercantile.quadkey(*parent)
        tile_geometry = polygons(
            mercantile.feature(parent)["geometry"]["coordinates"][0])
        fdataset = [
            dataset[idx] for idx in numpy.nonzero(
                intersects(tile_geometry, dataset_geoms))[0]
        ]
        if minimum_tile_cover is not None or tile_cover_sort:
            fdataset = _filter_and_sort(
                tile_geometry,
                fdataset,
                minimum_cover=minimum_tile_cover,
                sort_cover=tile_cover_sort,
            )
        if len(fdataset):
            mosaic_definition["tiles"][quad] = [f["path"] for f in fdataset]

    return mosaic_definition
コード例 #24
0
ファイル: utils.py プロジェクト: bekerov/cogeo-mosaic
def update_mosaic(
    dataset_list: Tuple,
    mosaic_def: dict,
    max_threads: int = 20,
    minimum_tile_cover: float = None,
) -> Dict:
    """
    Create mosaic definition content.

    Attributes
    ----------
        dataset_list : tuple or list, required
            Dataset urls.
        mosaic_def : dict
            Mosaic definition to update.
        max_threads : int
            Max threads to use (default: 20).
        minimum_tile_cover: float, optional (default: 0)
            Filter files with low tile intersection coverage.

    Returns
    -------
        mosaic_definition : dict
            Updated mosaic definition.

    """
    version = mosaic_def.get("version")
    if version:
        version = list(map(int, version.split(".")))
        version[-1] += 1
        version = ".".join(map(str, version))
    else:
        version = "1.0.0"
    mosaic_def["version"] = version

    results = get_footprints(dataset_list, max_threads=max_threads)
    min_zoom = mosaic_def["minzoom"]
    quadkey_zoom = mosaic_def.get("quadkey_zoom", min_zoom)

    dataset_geoms = polygons(
        [feat["geometry"]["coordinates"][0] for feat in results])
    for idx, r in enumerate(results):
        tiles = burntiles.burn([r], quadkey_zoom)
        tiles = ["{2}-{0}-{1}".format(*tile.tolist()) for tile in tiles]

        dataset = [{
            "path": r["properties"]["path"],
            "geometry": dataset_geoms[idx]
        }]
        for parent in tiles:
            z, x, y = list(map(int, parent.split("-")))
            parent = mercantile.Tile(x=x, y=y, z=z)
            quad = mercantile.quadkey(*parent)
            tile_geometry = polygons(
                mercantile.feature(parent)["geometry"]["coordinates"][0])

            fdataset = dataset
            if minimum_tile_cover is not None:
                fdataset = _filter_and_sort(tile_geometry,
                                            fdataset,
                                            minimum_cover=minimum_tile_cover)

            if len(fdataset):
                dst_quad = mosaic_def["tiles"].get(quad, [])
                for f in fdataset:
                    dst_quad.append(f["path"])

                mosaic_def["tiles"][quad] = dst_quad

    tiles = [
        mercantile.quadkey_to_tile(qk) for qk in mosaic_def["tiles"].keys()
    ]
    bounds = tiles_to_bounds(tiles)

    mosaic_def["bounds"] = bounds
    mosaic_def["center"] = [
        (bounds[0] + bounds[2]) / 2,
        (bounds[1] + bounds[3]) / 2,
        mosaic_def["minzoom"],
    ]

    return mosaic_def
コード例 #25
0
ファイル: tile.py プロジェクト: ajijohn/planet-snowcover
def tile_image(imageFile, output_dir, zoom, cover=None, indexes = None, quant = None, aws_profile = None, skip_blanks = True, max_nodata_pct = 0.0):
    """
    Produce either A) all tiles covering <image> at <zoom> or B) all tiles in <cover> if <cover> is not None at <zoom> and place OSM directory structure in <imageFile>/Z/X/Y.png format inside output_dir. If quant, divide all bands by Quant first. Can write to s3:// destinations with aws_profile.

    """
    from shapely.geometry import box
    from json import loads
    from supermercado import burntiles

    def __load_cover_tiles(coverfile):
        coverTiles = pd.read_csv(coverfile)
        if len(coverTiles.columns) != 3:
            raise Exception("cover file needs to have 3 columns (z, x, y)")

        return [Tile(z, x, y) for _, (z, x, y) in list(coverTiles.iterrows())]


    f = None
    if (imageFile.startswith("s3://")):
        with rio.Env(profile_name = aws_profile):
            f = rio.open(imageFile)
    else:
        f = rio.open(imageFile)

    # check crs:
    if int(f.crs.to_dict()['init'].split(":")[1]) != 4326:
        print(f"invalid crs ({f.crs.to_dict()['init']}), reprojecting raster....")
        f.close()
        mf = rio.io.MemoryFile()
        reproject_raster(imageFile, 4326, mf)
        mf.seek(0)

        f = mf.open()

        print(f"reproject successful {f.crs.to_dict()}")

    bbox = box(f.bounds.left, f.bounds.bottom, f.bounds.right, f.bounds.top)
    bbox = loads(gpd.GeoSeries(bbox).to_json())['features'] # need geojson dict

    tiles = [Tile(z, x, y) for z, x, y in burntiles.burn(bbox, zoom)]


    covertiles = set()
    if cover is not None:
        covertiles = set(__load_cover_tiles(cover))
        tiles = set(tiles).intersection(covertiles)


    __TILER = partial(_write_tile, image = f,
                     output_dir = output_dir, bands = indexes,
                     quant = quant, aws_profile = aws_profile,
                     skip_blanks = skip_blanks, nodata_val = f.nodata,
                     max_nodata_pct = max_nodata_pct)

    with futures.ThreadPoolExecutor() as executor:
        responses = list(executor.map(__TILER, tiles))

    tiles, status = zip(*responses) 

    print("#tiles: {} | written: {}\tfailed:{}".format(len(tiles), sum(status), len(tiles) - sum(status)))

    return(responses)
コード例 #26
0
    def _create_mosaic(
        cls,
        features: Sequence[Dict],
        minzoom: int,
        maxzoom: int,
        quadkey_zoom: Optional[int] = None,
        accessor: Callable[[Dict], str] = default_accessor,
        asset_filter: Callable = default_filter,
        version: str = "0.0.2",
        quiet: bool = True,
        **kwargs,
    ):
        """
        Create mosaic definition content.

        Attributes
        ----------
        features : List, required
            List of GeoJSON features.
        minzoom: int, required
            Force mosaic min-zoom.
        maxzoom: int, required
            Force mosaic max-zoom.
        quadkey_zoom: int, optional
            Force mosaic quadkey zoom.
        accessor: callable, required
            Function called on each feature to get its identifier (default is feature["properties"]["path"]).
        asset_filter: callable, required
            Function to filter features.
        version: str, optional
            mosaicJSON definition version (default: 0.0.2).
        quiet: bool, optional (default: True)
            Mask processing steps.
        kwargs: any
            Options forwarded to `asset_filter`

        Returns
        -------
        mosaic_definition : MosaicJSON
            Mosaic definition.

        """
        quadkey_zoom = quadkey_zoom or minzoom

        if not quiet:
            click.echo(f"Get quadkey list for zoom: {quadkey_zoom}", err=True)

        # Find dataset geometries
        dataset_geoms = polygons(
            [feat["geometry"]["coordinates"][0] for feat in features])
        bounds = list(total_bounds(dataset_geoms))

        tiles = burntiles.burn(features, quadkey_zoom)
        tiles = [mercantile.Tile(*tile) for tile in tiles]

        mosaic_definition: Dict[str, Any] = dict(
            mosaicjson=version,
            minzoom=minzoom,
            maxzoom=maxzoom,
            quadkey_zoom=quadkey_zoom,
            bounds=bounds,
            center=((bounds[0] + bounds[2]) / 2, (bounds[1] + bounds[3]) / 2,
                    minzoom),
            tiles={},
            version="1.0.0",
        )

        if not quiet:
            click.echo(f"Feed Quadkey index", err=True)

        # Create tree and find assets that overlap each tile
        tree = STRtree(dataset_geoms)

        for tile in tiles:
            quadkey = mercantile.quadkey(tile)
            tile_geom = polygons(
                mercantile.feature(tile)["geometry"]["coordinates"][0])

            # Find intersections from rtree
            intersections_idx = sorted(
                tree.query(tile_geom, predicate="intersects"))
            if len(intersections_idx) == 0:
                continue

            intersect_dataset, intersect_geoms = zip(
                *[(features[idx], dataset_geoms[idx])
                  for idx in intersections_idx])

            dataset = asset_filter(tile, intersect_dataset, intersect_geoms,
                                   **kwargs)

            if dataset:
                mosaic_definition["tiles"][quadkey] = [
                    accessor(f) for f in dataset
                ]

        return cls(**mosaic_definition)
コード例 #27
0
ファイル: stac.py プロジェクト: robert-werner/awspds-mosaic
def stac_to_mosaicJSON(
    query: Dict,
    minzoom: int = 7,
    maxzoom: int = 12,
    optimized_selection: bool = True,
    maximum_items_per_tile: int = 20,
    stac_collection_limit: int = 500,
    seasons: Tuple = ["spring", "summer", "autumn", "winter"],
    stac_url: str = os.environ.get("SATAPI_URL",
                                   "https://sat-api.developmentseed.org"),
) -> Dict:
    """
    Create a mosaicJSON from a stac request.

    Attributes
    ----------
    query : str
        sat-api query.
    minzoom : int, optional, (default: 7)
        Mosaic Min Zoom.
    maxzoom : int, optional (default: 12)
        Mosaic Max Zoom.
    optimized_selection : bool, optional (default: true)
        Limit one Path-Row scene per quadkey.
    maximum_items_per_tile : int, optional (default: 20)
        Limit number of scene per quadkey. Use 0 to use all items.
    stac_collection_limit : int, optional (default: None)
        Limits the number of items returned by sat-api
    stac_url : str, optional (default: from ENV)

    Returns
    -------
    out : dict
        MosaicJSON definition.

    """
    if stac_collection_limit:
        query.update(limit=stac_collection_limit)

    logger.debug(json.dumps(query))

    def fetch_sat_api(query):
        headers = {
            "Content-Type": "application/json",
            "Accept-Encoding": "gzip",
            "Accept": "application/geo+json",
        }

        url = f"{stac_url}/stac/search"
        data = requests.post(url, headers=headers, json=query).json()
        error = data.get("message", "")
        if error:
            raise Exception(f"SAT-API failed and returned: {error}")

        meta = data.get("meta", {})
        if not meta.get("found"):
            return []

        logger.debug(json.dumps(meta))

        features = data["features"]
        if data["links"]:
            curr_page = int(meta["page"])
            query["page"] = curr_page + 1
            query["limit"] = meta["limit"]

            features = list(itertools.chain(features, fetch_sat_api(query)))

        return features

    features = fetch_sat_api(query)
    if not features:
        raise Exception(f"No asset found for query '{json.dumps(query)}'")

    logger.debug(f"Found: {len(features)} scenes")

    features = list(
        filter(
            lambda x: _get_season(x["properties"]["datetime"],
                                  max(x["bbox"][1], x["bbox"][3])) in seasons,
            features,
        ))

    if optimized_selection:
        dataset = []
        prs = []
        for item in features:
            pr = item["properties"]["eo:column"] + "-" + item["properties"][
                "eo:row"]
            if pr not in prs:
                prs.append(pr)
                dataset.append(item)
    else:
        dataset = features

    if query.get("bbox"):
        bounds = query["bbox"]
    else:
        bounds = burntiles.find_extrema(dataset)

    for i in range(len(dataset)):
        dataset[i]["geometry"] = shape(dataset[i]["geometry"])

    tiles = burntiles.burn([bbox_to_geojson(bounds)], minzoom)
    tiles = list(set(["{2}-{0}-{1}".format(*tile.tolist()) for tile in tiles]))

    logger.debug(f"Number tiles: {len(tiles)}")

    mosaic_definition = dict(
        mosaicjson="0.0.1",
        minzoom=minzoom,
        maxzoom=maxzoom,
        bounds=bounds,
        center=[(bounds[0] + bounds[2]) / 2, (bounds[1] + bounds[3]) / 2,
                minzoom],
        tiles={},
    )

    for tile in tiles:
        z, x, y = list(map(int, tile.split("-")))
        tile = mercantile.Tile(x=x, y=y, z=z)
        quadkey = mercantile.quadkey(*tile)
        geometry = box(*mercantile.bounds(tile))
        intersect_dataset = list(
            filter(lambda x: geometry.intersects(x["geometry"]), dataset))
        if len(intersect_dataset):
            # We limit the item per quadkey to 20
            if maximum_items_per_tile:
                intersect_dataset = intersect_dataset[0:maximum_items_per_tile]

            mosaic_definition["tiles"][quadkey] = [
                scene["properties"]["landsat:product_id"]
                for scene in intersect_dataset
            ]

    return mosaic_definition
コード例 #28
0
ファイル: mosaic.py プロジェクト: moradology/cogeo-mosaic
    def _create_mosaic(
        cls,
        features: Sequence[Dict],
        minzoom: int,
        maxzoom: int,
        quadkey_zoom: Optional[int] = None,
        accessor: Callable[[Dict], str] = default_accessor,
        asset_filter: Callable = default_filter,
        version: str = "0.0.2",
        quiet: bool = True,
        **kwargs,
    ):
        """Create mosaic definition content.

        Attributes:
            features (list): List of GeoJSON features.
            minzoom (int): Force mosaic min-zoom.
            maxzoom (int): Force mosaic max-zoom.
            quadkey_zoom (int): Force mosaic quadkey zoom (optional).
            accessor (callable): Function called on each feature to get its identifier (default is feature["properties"]["path"]).
            asset_filter (callable):  Function to filter features.
            version (str): mosaicJSON definition version (default: 0.0.2).
            quiet (bool): Mask processing steps (default is True).
            kwargs (any): Options forwarded to `asset_filter`

        Returns:
            mosaic_definition (MosaicJSON): Mosaic definition.

        Examples:
            >>> MosaicJSON._create_mosaic([], 12, 14)

        """
        quadkey_zoom = quadkey_zoom or minzoom

        if not quiet:
            click.echo(f"Get quadkey list for zoom: {quadkey_zoom}", err=True)

        # If Pygeos throws an error, fall back to non-vectorized operation
        # Ref: https://github.com/developmentseed/cogeo-mosaic/issues/81
        try:
            dataset_geoms = polygons(
                [feat["geometry"]["coordinates"][0] for feat in features]
            )
        except TypeError:
            dataset_geoms = [
                polygons(feat["geometry"]["coordinates"][0]) for feat in features
            ]

        bounds = tuple(total_bounds(dataset_geoms))

        tiles = burntiles.burn(features, quadkey_zoom)
        tiles = [mercantile.Tile(*tile) for tile in tiles]

        mosaic_definition: Dict[str, Any] = dict(
            mosaicjson=version,
            minzoom=minzoom,
            maxzoom=maxzoom,
            quadkey_zoom=quadkey_zoom,
            bounds=bounds,
            center=((bounds[0] + bounds[2]) / 2, (bounds[1] + bounds[3]) / 2, minzoom),
            tiles={},
            version="1.0.0",
        )

        if not quiet:
            click.echo("Feed Quadkey index", err=True)

        # Create tree and find assets that overlap each tile
        tree = STRtree(dataset_geoms)

        fout = os.devnull if quiet else sys.stderr
        with click.progressbar(  # type: ignore
            tiles, file=fout, show_percent=True, label="Iterate over quadkeys"
        ) as bar:
            for tile in bar:
                quadkey = mercantile.quadkey(tile)
                tile_geom = polygons(
                    mercantile.feature(tile)["geometry"]["coordinates"][0]
                )

                # Find intersections from rtree
                intersections_idx = sorted(
                    tree.query(tile_geom, predicate="intersects")
                )
                if len(intersections_idx) == 0:
                    continue

                intersect_dataset, intersect_geoms = zip(
                    *[(features[idx], dataset_geoms[idx]) for idx in intersections_idx]
                )

                dataset = asset_filter(
                    tile, intersect_dataset, intersect_geoms, **kwargs
                )

                if dataset:
                    mosaic_definition["tiles"][quadkey] = [accessor(f) for f in dataset]

        return cls(**mosaic_definition)