def main(args): assert not (args.extent and args.splits), "--splits and --extent are mutually exclusive options." assert not (args.extent and len(args.out) > 1), "--extent option imply a single output." assert ( int(args.bbox is not None) + int(args.geojson is not None) + int(args.dir is not None) + int(args.raster is not None) + int(args.cover is not None) == 1 ), "One, and only one, input type must be provided, among: --dir, --bbox, --cover or --geojson." if args.bbox: try: w, s, e, n, crs = args.bbox.split(",") w, s, e, n = map(float, (w, s, e, n)) except: crs = None w, s, e, n = map(float, args.bbox.split(",")) assert isinstance(w, float) and isinstance(s, float), "Invalid bbox parameter." if args.splits: splits = [int(split) for split in args.splits.split("/")] assert len(splits) == len(args.out) and 0 < sum(splits) <= 100, "Invalid split value or incoherent with out paths." assert not (not args.zoom and (args.geojson or args.bbox or args.raster)), "Zoom parameter is required." args.out = [os.path.expanduser(out) for out in args.out] cover = [] if args.raster: print("RoboSat.pink - cover from {} at zoom {}".format(args.raster, args.zoom), file=sys.stderr, flush=True) with rasterio_open(os.path.expanduser(args.raster)) as r: w, s, e, n = transform_bounds(r.crs, "EPSG:4326", *r.bounds) assert isinstance(w, float) and isinstance(s, float), "Unable to deal with raster projection" cover = [tile for tile in tiles(w, s, e, n, args.zoom)] if args.geojson: print("RoboSat.pink - cover from {} at zoom {}".format(args.geojson, args.zoom), file=sys.stderr, flush=True) with open(os.path.expanduser(args.geojson)) as f: feature_collection = json.load(f) srid = geojson_srid(feature_collection) feature_map = collections.defaultdict(list) for i, feature in enumerate(tqdm(feature_collection["features"], ascii=True, unit="feature")): feature_map = geojson_parse_feature(args.zoom, srid, feature_map, feature) cover = feature_map.keys() if args.bbox: print("RoboSat.pink - cover from {} at zoom {}".format(args.bbox, args.zoom), file=sys.stderr, flush=True) if crs: w, s, e, n = transform_bounds(crs, "EPSG:4326", w, s, e, n) assert isinstance(w, float) and isinstance(s, float), "Unable to deal with raster projection" cover = [tile for tile in tiles(w, s, e, n, args.zoom)] if args.cover: print("RoboSat.pink - cover from {}".format(args.cover), file=sys.stderr, flush=True) cover = [tile for tile in tiles_from_csv(args.cover)] if args.dir: print("RoboSat.pink - cover from {}".format(args.dir), file=sys.stderr, flush=True) cover = [tile for tile in tiles_from_dir(args.dir, xyz=not (args.no_xyz))] _cover = [] extent_w, extent_s, extent_n, extent_e = (180.0, 90.0, -180.0, -90.0) for tile in tqdm(cover, ascii=True, unit="tile"): if args.zoom and tile.z != args.zoom: w, s, n, e = transform_bounds("EPSG:3857", "EPSG:4326", *xy_bounds(tile)) for t in tiles(w, s, n, e, args.zoom): unique = True for _t in _cover: if _t == t: unique = False if unique: _cover.append(t) else: if args.extent: w, s, n, e = transform_bounds("EPSG:3857", "EPSG:4326", *xy_bounds(tile)) _cover.append(tile) if args.extent: extent_w, extent_s, extent_n, extent_e = (min(extent_w, w), min(extent_s, s), max(extent_n, n), max(extent_e, e)) cover = _cover if args.splits: shuffle(cover) # in-place cover_splits = [math.floor(len(cover) * split / 100) for i, split in enumerate(splits, 1)] if len(splits) > 1 and sum(map(int, splits)) == 100 and len(cover) > sum(map(int, splits)): cover_splits[0] = len(cover) - sum(map(int, cover_splits[1:])) # no tile waste s = 0 covers = [] for e in cover_splits: covers.append(cover[s : s + e]) s += e else: covers = [cover] if args.extent: if args.out and os.path.dirname(args.out[0]) and not os.path.isdir(os.path.dirname(args.out[0])): os.makedirs(os.path.dirname(args.out[0]), exist_ok=True) extent = "{:.8f},{:.8f},{:.8f},{:.8f}".format(extent_w, extent_s, extent_n, extent_e) if args.out: with open(args.out[0], "w") as fp: fp.write(extent) else: print(extent) else: for i, cover in enumerate(covers): if os.path.dirname(args.out[i]) and not os.path.isdir(os.path.dirname(args.out[i])): os.makedirs(os.path.dirname(args.out[i]), exist_ok=True) with open(args.out[i], "w") as fp: csv.writer(fp).writerows(cover)
def main(args): assert not (args.extent and args.splits ), "--splits and --extent are mutually exclusive options." assert not (args.extent and len(args.out) > 1), "--extent option imply a single output." assert not (args.sql and not args.pg), "--sql option imply --pg" assert ( int(args.bbox is not None) + int(args.geojson is not None) + int(args.sql is not None) + int(args.dir is not None) + int(args.raster is not None) + int(args.cover is not None) == 1 ), "One, and only one, input type must be provided, among: --dir, --bbox, --cover, --raster, --geojson or --sql" if args.bbox: try: w, s, e, n, crs = args.bbox.split(",") w, s, e, n = map(float, (w, s, e, n)) except: crs = None w, s, e, n = map(float, args.bbox.split(",")) assert isinstance(w, float) and isinstance( s, float), "Invalid bbox parameter." if args.splits: splits = [int(split) for split in args.splits.split("/")] assert len(splits) == len(args.out) and 0 < sum( splits) <= 100, "Invalid split value or incoherent with out paths." assert not (not args.zoom and (args.geojson or args.bbox or args.raster)), "Zoom parameter is required." args.out = [os.path.expanduser(out) for out in args.out] cover = [] if args.raster: print("RoboSat.pink - cover from {} at zoom {}".format( args.raster, args.zoom), file=sys.stderr, flush=True) with rasterio_open(os.path.expanduser(args.raster)) as r: w, s, e, n = transform_bounds(r.crs, "EPSG:4326", *r.bounds) assert isinstance(w, float) and isinstance( s, float), "Unable to deal with raster projection" cover = [tile for tile in tiles(w, s, e, n, args.zoom)] if args.geojson: print("RoboSat.pink - cover from {} at zoom {}".format( args.geojson, args.zoom), file=sys.stderr, flush=True) with open(os.path.expanduser(args.geojson)) as f: feature_collection = json.load(f) srid = geojson_srid(feature_collection) feature_map = collections.defaultdict(list) for feature in tqdm(feature_collection["features"], ascii=True, unit="feature"): feature_map = geojson_parse_feature(args.zoom, srid, feature_map, feature) cover = feature_map.keys() if args.sql: print("RoboSat.pink - cover from {} {} at zoom {}".format( args.sql, args.pg, args.zoom), file=sys.stderr, flush=True) conn = psycopg2.connect(args.pg) assert conn, "Unable to connect to PostgreSQL database." db = conn.cursor() query = """ WITH sql AS ({}), geom AS (SELECT "1" AS geom FROM sql AS t("1")) SELECT '{{"type": "Feature", "geometry": ' || ST_AsGeoJSON((ST_Dump(ST_Transform(ST_Force2D(geom.geom), 4326))).geom, 6) || '}}' AS features FROM geom """.format(args.sql) db.execute(query) assert db.rowcount is not None and db.rowcount != -1, "SQL Query return no result." feature_map = collections.defaultdict(list) for feature in tqdm( db.fetchall(), ascii=True, unit="feature" ): # FIXME: fetchall will not always fit in memory... feature_map = geojson_parse_feature(args.zoom, 4326, feature_map, json.loads(feature[0])) cover = feature_map.keys() if args.bbox: print("RoboSat.pink - cover from {} at zoom {}".format( args.bbox, args.zoom), file=sys.stderr, flush=True) if crs: w, s, e, n = transform_bounds(crs, "EPSG:4326", w, s, e, n) assert isinstance(w, float) and isinstance( s, float), "Unable to deal with raster projection" cover = [tile for tile in tiles(w, s, e, n, args.zoom)] if args.cover: print("RoboSat.pink - cover from {}".format(args.cover), file=sys.stderr, flush=True) cover = [tile for tile in tiles_from_csv(args.cover)] if args.dir: print("RoboSat.pink - cover from {}".format(args.dir), file=sys.stderr, flush=True) cover = [ tile for tile in tiles_from_dir(args.dir, xyz=not (args.no_xyz)) ] _cover = [] extent_w, extent_s, extent_n, extent_e = (180.0, 90.0, -180.0, -90.0) for tile in tqdm(cover, ascii=True, unit="tile"): if args.zoom and tile.z != args.zoom: w, s, n, e = transform_bounds("EPSG:3857", "EPSG:4326", *xy_bounds(tile)) for t in tiles(w, s, n, e, args.zoom): unique = True for _t in _cover: if _t == t: unique = False if unique: _cover.append(t) else: if args.extent: w, s, n, e = transform_bounds("EPSG:3857", "EPSG:4326", *xy_bounds(tile)) _cover.append(tile) if args.extent: extent_w, extent_s, extent_n, extent_e = (min(extent_w, w), min(extent_s, s), max(extent_n, n), max(extent_e, e)) cover = _cover if args.splits: shuffle(cover) # in-place cover_splits = [ math.floor(len(cover) * split / 100) for i, split in enumerate(splits, 1) ] if len(splits) > 1 and sum(map( int, splits)) == 100 and len(cover) > sum(map(int, splits)): cover_splits[0] = len(cover) - sum(map( int, cover_splits[1:])) # no tile waste s = 0 covers = [] for e in cover_splits: covers.append(cover[s:s + e]) s += e else: covers = [cover] if args.extent: if args.out and os.path.dirname(args.out[0]) and not os.path.isdir( os.path.dirname(args.out[0])): os.makedirs(os.path.dirname(args.out[0]), exist_ok=True) extent = "{:.8f},{:.8f},{:.8f},{:.8f}".format(extent_w, extent_s, extent_n, extent_e) if args.out: with open(args.out[0], "w") as fp: fp.write(extent) else: print(extent) else: for i, cover in enumerate(covers): if os.path.dirname(args.out[i]) and not os.path.isdir( os.path.dirname(args.out[i])): os.makedirs(os.path.dirname(args.out[i]), exist_ok=True) with open(args.out[i], "w") as fp: csv.writer(fp).writerows(cover)
def main(args): assert not (args.sql and args.geojson), "You can only use at once --pg OR --geojson." assert not (args.pg and not args.sql ), "With PostgreSQL --pg, --sql must also be provided" assert len(args.ts.split( ",")) == 2, "--ts expect width,height value (e.g 512,512)" config = load_config(args.config) check_classes(config) palette = make_palette([classe["color"] for classe in config["classes"]], complementary=True) index = [ config["classes"].index(classe) for classe in config["classes"] if classe["title"] == args.type ] assert index, "Requested type is not contains in your config file classes." burn_value = int(math.pow(2, index[0] - 1)) # 8bits One Hot Encoding assert 0 <= burn_value <= 128 args.out = os.path.expanduser(args.out) os.makedirs(args.out, exist_ok=True) log = Logs(os.path.join(args.out, "log"), out=sys.stderr) if args.geojson: tiles = [ tile for tile in tiles_from_csv(os.path.expanduser(args.cover)) ] assert tiles, "Empty cover" zoom = tiles[0].z assert not [tile for tile in tiles if tile.z != zoom ], "Unsupported zoom mixed cover. Use PostGIS instead" feature_map = collections.defaultdict(list) log.log("RoboSat.pink - rasterize - Compute spatial index") for geojson_file in args.geojson: with open(os.path.expanduser(geojson_file)) as geojson: feature_collection = json.load(geojson) srid = geojson_srid(feature_collection) feature_map = collections.defaultdict(list) for i, feature in enumerate( tqdm(feature_collection["features"], ascii=True, unit="feature")): feature_map = geojson_parse_feature( zoom, srid, feature_map, feature) features = args.geojson if args.pg: conn = psycopg2.connect(args.pg) db = conn.cursor() assert "limit" not in args.sql.lower(), "LIMIT is not supported" assert "TILE_GEOM" in args.sql, "TILE_GEOM filter not found in your SQL" sql = re.sub(r"ST_Intersects( )*\((.*)?TILE_GEOM(.*)?\)", "1=1", args.sql, re.I) assert sql and sql != args.sql db.execute( """SELECT ST_Srid("1") AS srid FROM ({} LIMIT 1) AS t("1")""". format(sql)) srid = db.fetchone()[0] assert srid and int(srid) > 0, "Unable to retrieve geometry SRID." features = args.sql log.log( "RoboSat.pink - rasterize - rasterizing {} from {} on cover {}".format( args.type, features, args.cover)) with open(os.path.join(os.path.expanduser(args.out), "instances_" + args.type.lower() + ".cover"), mode="w") as cover: for tile in tqdm(list(tiles_from_csv(os.path.expanduser(args.cover))), ascii=True, unit="tile"): geojson = None if args.pg: w, s, e, n = tile_bbox(tile) tile_geom = "ST_Transform(ST_MakeEnvelope({},{},{},{}, 4326), {})".format( w, s, e, n, srid) query = """ WITH sql AS ({}), geom AS (SELECT "1" AS geom FROM sql AS t("1")), json AS (SELECT '{{"type": "Feature", "geometry": ' || ST_AsGeoJSON((ST_Dump(ST_Transform(ST_Force2D(geom.geom), 4326))).geom, 6) || '}}' AS features FROM geom) SELECT '{{"type": "FeatureCollection", "features": [' || Array_To_String(array_agg(features), ',') || ']}}' FROM json """.format(args.sql.replace("TILE_GEOM", tile_geom)) db.execute(query) row = db.fetchone() try: geojson = json.loads( row[0])["features"] if row and row[0] else None except Exception: log.log("Warning: Invalid geometries, skipping {}".format( tile)) conn = psycopg2.connect(args.pg) db = conn.cursor() if args.geojson: geojson = feature_map[tile] if tile in feature_map else None if geojson: num = len(geojson) out = geojson_tile_burn(tile, geojson, 4326, list(map(int, args.ts.split(","))), burn_value) if not geojson or out is None: num = 0 out = np.zeros(shape=list(map(int, args.ts.split(","))), dtype=np.uint8) tile_label_to_file(args.out, tile, palette, out, append=args.append) cover.write("{},{},{} {}{}".format(tile.x, tile.y, tile.z, num, os.linesep)) if not args.no_web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template base_url = args.web_ui_base_url if args.web_ui_base_url else "." tiles = [tile for tile in tiles_from_csv(args.cover)] web_ui(args.out, base_url, tiles, tiles, "png", template)