def main(): ap = argparse.ArgumentParser(description='run model on tileset') ap.add_argument('model', help='path to model checkpoint') ap.add_argument('config', help='path to model config') ap.add_argument('tiles', help='path to XYZ tile folder') ap.add_argument('outputdir', help='name for tile output') ap.add_argument('--aws_profile', help='AWS Profile Name', default='default') args = ap.parse_args() config = load_config(args.config) tiles = S3SlippyMapTiles(args.tiles, mode='multibands', aws_profile=args.aws_profile) net = model(config, args.model) loader = DataLoader(tiles, batch_size=config['model']['batch_size'], shuffle=True, num_workers=1) palette = make_palette(config["classes"][0]["color"]) fs = s3fs.S3FileSystem(session=boto3.Session(profile_name='esip')) outputdir = args.outputdir[5:] + '/' + os.path.basename(args.tiles) print("Saving predictions to {}.".format(outputdir)) predict(net, loader, outputdir, palette, fs)
def write_tile(root, tile, colors, out): """ """ out_path = os.path.join(root, str(tile.z), str(tile.x)) os.makedirs(out_path, exist_ok=True) out = Image.fromarray(out, mode="P") out.putpalette(complementary_palette(make_palette(colors[0], colors[1]))) out.save(os.path.join(out_path, "{}.png".format(tile.y)), optimize=True)
def main(args): config = load_config(args.config) num_classes = len(config["classes"]) batch_size = args.batch_size if args.batch_size else config["model"][ "batch_size"] tile_size = args.tile_size if args.tile_size else config["model"][ "tile_size"] if torch.cuda.is_available(): device = torch.device("cuda") torch.backends.cudnn.benchmark = True else: device = torch.device("cpu") def map_location(storage, _): return storage.cuda() if torch.cuda.is_available() else storage.cpu() # https://github.com/pytorch/pytorch/issues/7178 chkpt = torch.load(args.checkpoint, map_location=map_location) models = [ name for _, name, _ in pkgutil.iter_modules( [os.path.dirname(robosat_pink.models.__file__)]) ] if config["model"]["name"] not in [model for model in models]: sys.exit("Unknown model, thoses available are {}".format( [model for model in models])) std = [] mean = [] num_channels = 0 for channel in config["channels"]: std.extend(channel["std"]) mean.extend(channel["mean"]) num_channels += len(channel["bands"]) encoder = config["model"]["encoder"] pretrained = config["model"]["pretrained"] model_module = import_module("robosat_pink.models.{}".format( config["model"]["name"])) net = getattr(model_module, "{}".format(config["model"]["name"].title()))( num_classes=num_classes, num_channels=num_channels, encoder=encoder, pretrained=pretrained).to(device) net = torch.nn.DataParallel(net) net.load_state_dict(chkpt["state_dict"]) net.eval() transform = Compose([ImageToTensor(), Normalize(mean=mean, std=std)]) directory = BufferedSlippyMapTiles(args.tiles, transform=transform, size=tile_size, overlap=args.overlap) loader = DataLoader(directory, batch_size=batch_size, num_workers=args.workers) palette = make_palette(config["classes"][0]["color"], config["classes"][1]["color"]) # don't track tensors with autograd during prediction with torch.no_grad(): for images, tiles in tqdm(loader, desc="Eval", unit="batch", ascii=True): images = images.to(device) outputs = net(images) # manually compute segmentation mask class probabilities per pixel probs = torch.nn.functional.softmax(outputs, dim=1).data.cpu().numpy() for tile, prob in zip(tiles, probs): x, y, z = list(map(int, tile)) # we predicted on buffered tiles; now get back probs for original image prob = directory.unbuffer(prob) assert prob.shape[ 0] == 2, "single channel requires binary model" assert np.allclose( np.sum(prob, axis=0), 1.0 ), "single channel requires probabilities to sum up to one" image = np.around(prob[1:, :, :]).astype(np.uint8).squeeze() out = Image.fromarray(image, mode="P") out.putpalette(palette) os.makedirs(os.path.join(args.probs, str(z), str(x)), exist_ok=True) path = os.path.join(args.probs, str(z), str(x), str(y) + ".png") out.save(path, optimize=True) if args.web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template base_url = args.web_ui_base_url if args.web_ui_base_url else "./" tiles = [tile for tile, _ in tiles_from_slippy_map(args.tiles)] web_ui(args.probs, base_url, tiles, tiles, "png", template)
def main(args): config = load_config(args.config) num_classes = len(config["classes"]) batch_size = args.batch_size if args.batch_size else config["model"][ "batch_size"] tile_size = args.tile_size if args.tile_size else config["model"][ "tile_size"] if torch.cuda.is_available(): device = torch.device("cuda") torch.backends.cudnn.benchmark = True else: device = torch.device("cpu") def map_location(storage, _): return storage.cuda() if torch.cuda.is_available() else storage.cpu() # https://github.com/pytorch/pytorch/issues/7178 # chkpt = torch.load(args.checkpoint, map_location=map_location) S3_CHECKPOINT = False chkpt = args.checkpoint if chkpt.startswith("s3://"): S3_CHECKPOINT = True # load from s3 chkpt = chkpt[5:] models = [ name for _, name, _ in pkgutil.iter_modules( [os.path.dirname(robosat_pink.models.__file__)]) ] if config["model"]["name"] not in [model for model in models]: sys.exit("Unknown model, thoses available are {}".format( [model for model in models])) num_channels = 0 for channel in config["channels"]: num_channels += len(channel["bands"]) pretrained = config["model"]["pretrained"] encoder = config["model"]["encoder"] model_module = import_module("robosat_pink.models.{}".format( config["model"]["name"])) net = getattr(model_module, "{}".format(config["model"]["name"].title()))( num_classes=num_classes, num_channels=num_channels, encoder=encoder, pretrained=pretrained).to(device) net = torch.nn.DataParallel(net) try: if S3_CHECKPOINT: sess = boto3.Session(profile_name=args.aws_profile) fs = s3fs.S3FileSystem(session=sess) with s3fs.S3File(fs, chkpt, 'rb') as C: state = torch.load(io.BytesIO(C.read()), map_location=map_location) else: state = torch.load(chkpt, map_location=map_location) net.load_state_dict(state['state_dict']) net.to(device) except FileNotFoundError as f: print("{} checkpoint not found.".format(CHECKPOINT)) net.eval() # # mean = np.array([[[8237.95084794]], # # [[6467.98702156]], # # [[6446.61743148]], # # [[4520.95360105]]]) # std = array([[[12067.03414753]], # # [[ 8810.00542703]], # # [[10710.64289882]], # # [[ 9024.92028515]]]) # #transform = Compose([ImageToTensor(), Normalize(mean=mean, std=std)]) # transform = A.Compose([ # A.Normalize(mean = mean, std = std, max_pixel_value = 1.0), # A.ToFloat() # ]) if args.tiles.startswith('s3://'): directory = S3SlippyMapTiles(args.tiles, mode='multibands', transform=None, aws_profile=args.aws_profile) else: directory = SlippyMapTiles(args.tiles, mode="multibands", transform=transform) # directory = BufferedSlippyMapDirectory(args.tiles, transform=transform, size=tile_size, overlap=args.overlap) loader = DataLoader(directory, batch_size=batch_size, num_workers=args.workers) palette = make_palette(config["classes"][0]["color"]) # don't track tensors with autograd during prediction with torch.no_grad(): for tiles, images in tqdm(loader, desc="Eval", unit="batch", ascii=True): tiles = list(zip(tiles[0], tiles[1], tiles[2])) images = images.to(device) outputs = net(images) print(len(tiles), len(outputs)) for tile, prob in zip([tiles], outputs): savedir = args.probs x = tile[0].item() y = tile[1].item() z = tile[2].item() # manually compute segmentation mask class probabilities per pixel image = (prob > args.threshold).astype(np.uint8) out = Image.fromarray(image, mode="P") out.putpalette(palette) os.makedirs(os.path.join(args.probs, str(z), str(x)), exist_ok=True) path = os.path.join(args.probs, str(z), str(x), str(y) + ".png") out.save(path, optimize=True) if args.web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template base_url = args.web_ui_base_url if args.web_ui_base_url else "./" tiles = [tile for tile, _ in tiles_from_slippy_map(args.tiles)] web_ui(args.probs, base_url, tiles, tiles, "png", template)
def main(args): if not args.workers: args.workers = max(1, math.floor(os.cpu_count() * 0.5)) if args.label: config = load_config(args.config) check_classes(config) colors = [classe["color"] for classe in config["classes"]] palette = make_palette(*colors) splits_path = os.path.join(os.path.expanduser(args.out), ".splits") tiles_map = {} print("RoboSat.pink - tile on CPU, with {} workers".format(args.workers)) bands = -1 for path in args.rasters: try: raster = rasterio_open(path) w, s, e, n = transform_bounds(raster.crs, "EPSG:4326", *raster.bounds) except: sys.exit("Error: Unable to load raster {} or deal with it's projection".format(args.raster)) if bands != -1: assert bands == len(raster.indexes), "Coverage must be bands consistent" bands = len(raster.indexes) tiles = [mercantile.Tile(x=x, y=y, z=z) for x, y, z in mercantile.tiles(w, s, e, n, args.zoom)] for tile in tiles: tile_key = (str(tile.x), str(tile.y), str(tile.z)) if tile_key not in tiles_map.keys(): tiles_map[tile_key] = [] tiles_map[tile_key].append(path) if args.label: ext = "png" bands = 1 if not args.label: if bands == 1: ext = "png" if bands == 3: ext = "webp" if bands > 3: ext = "tiff" tiles = [] progress = tqdm(total=len(tiles_map), ascii=True, unit="tile") # Begin to tile plain tiles with futures.ThreadPoolExecutor(args.workers) as executor: def worker(path): raster = rasterio_open(path) w, s, e, n = transform_bounds(raster.crs, "EPSG:4326", *raster.bounds) transform, _, _ = calculate_default_transform(raster.crs, "EPSG:3857", raster.width, raster.height, w, s, e, n) tiles = [mercantile.Tile(x=x, y=y, z=z) for x, y, z in mercantile.tiles(w, s, e, n, args.zoom)] tiled = [] for tile in tiles: try: w, s, e, n = mercantile.xy_bounds(tile) # inspired by rio-tiler, cf: https://github.com/mapbox/rio-tiler/pull/45 warp_vrt = WarpedVRT( raster, crs="epsg:3857", resampling=Resampling.bilinear, add_alpha=False, transform=from_bounds(w, s, e, n, args.ts, args.ts), width=math.ceil((e - w) / transform.a), height=math.ceil((s - n) / transform.e), ) data = warp_vrt.read( out_shape=(len(raster.indexes), args.ts, args.ts), window=warp_vrt.window(w, s, e, n) ) image = np.moveaxis(data, 0, 2) # C,H,W -> H,W,C except: sys.exit("Error: Unable to tile {} from raster {}.".format(str(tile), raster)) tile_key = (str(tile.x), str(tile.y), str(tile.z)) if not args.label and len(tiles_map[tile_key]) == 1 and is_border(image): progress.update() continue if len(tiles_map[tile_key]) > 1: out = os.path.join(splits_path, str(tiles_map[tile_key].index(path))) else: out = args.out x, y, z = map(int, tile) if not args.label: ret = tile_image_to_file(out, mercantile.Tile(x=x, y=y, z=z), image) if args.label: ret = tile_label_to_file(out, mercantile.Tile(x=x, y=y, z=z), palette, image) if not ret: sys.exit("Error: Unable to write tile {} from raster {}.".format(str(tile), raster)) if len(tiles_map[tile_key]) == 1: progress.update() tiled.append(mercantile.Tile(x=x, y=y, z=z)) return tiled for tiled in executor.map(worker, args.rasters): if tiled is not None: tiles.extend(tiled) # Aggregate remaining tiles splits with futures.ThreadPoolExecutor(args.workers) as executor: def worker(tile_key): if len(tiles_map[tile_key]) == 1: return image = np.zeros((args.ts, args.ts, bands), np.uint8) x, y, z = map(int, tile_key) for i in range(len(tiles_map[tile_key])): root = os.path.join(splits_path, str(i)) _, path = tile_from_slippy_map(root, x, y, z) if not args.label: split = tile_image_from_file(path) if args.label: split = tile_label_from_file(path) split = split.reshape((args.ts, args.ts, 1)) # H,W -> H,W,C assert image.shape == split.shape image[:, :, :] += split[:, :, :] if not args.label and is_border(image): progress.update() return tile = mercantile.Tile(x=x, y=y, z=z) if not args.label: ret = tile_image_to_file(args.out, tile, image) if args.label: ret = tile_label_to_file(args.out, tile, palette, image) if not ret: sys.exit("Error: Unable to write tile {}.".format(str(tile_key))) progress.update() return tile for tiled in executor.map(worker, tiles_map.keys()): if tiled is not None: tiles.append(tiled) if splits_path and os.path.isdir(splits_path): shutil.rmtree(splits_path) # Delete suffixes dir if any if not args.no_web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template base_url = args.web_ui_base_url if args.web_ui_base_url else "./" web_ui(args.out, base_url, tiles, tiles, ext, template)
def main(args): config = load_config(args.config) num_classes = len(config["classes"]) batch_size = args.batch_size if args.batch_size else config["model"]["batch_size"] tile_size = config["model"]["tile_size"] if torch.cuda.is_available(): device = torch.device("cuda") torch.backends.cudnn.benchmark = True else: device = torch.device("cpu") def map_location(storage, _): return storage.cuda() if torch.cuda.is_available() else storage.cpu() # check checkpoint situation + load if ncessary chkpt = None # no checkpoint if args.checkpoint: # command line checkpoint chkpt = args.checkpoint else: try: # config file checkpoint chkpt = config["checkpoint"]['path'] except: # no checkpoint in config file pass S3_CHECKPOINT = False if chkpt.startswith("s3://"): S3_CHECKPOINT = True # load from s3 chkpt = chkpt[5:] models = [name for _, name, _ in pkgutil.iter_modules([os.path.dirname(robosat_pink.models.__file__)])] if config["model"]["name"] not in [model for model in models]: sys.exit("Unknown model, thoses available are {}".format([model for model in models])) num_channels = 0 for channel in config["channels"]: num_channels += len(channel["bands"]) pretrained = config["model"]["pretrained"] encoder = config["model"]["encoder"] model_module = import_module("robosat_pink.models.{}".format(config["model"]["name"])) net = getattr(model_module, "{}".format(config["model"]["name"].title()))( num_classes=num_classes, num_channels=num_channels, encoder=encoder, pretrained=pretrained ).to(device) net = torch.nn.DataParallel(net) try: if S3_CHECKPOINT: sess = boto3.Session(profile_name=args.aws_profile) fs = s3fs.S3FileSystem(session=sess) with s3fs.S3File(fs, chkpt, 'rb') as C: state = torch.load(io.BytesIO(C.read()), map_location = map_location) else: state = torch.load(chkpt, map_location= map_location) net.load_state_dict(state['state_dict'], strict=False) net.to(device) except FileNotFoundError as f: print("{} checkpoint not found.".format(chkpt)) net.eval() tile_ids_filter = None if args.tile_ids is not None: tile_ids_filter = pd.read_csv(args.tile_ids, names=['ids']).ids.values ## Construct torch Dataset, either from single directory (if args.tiles is given) or from config. Used --tile_ids argument ## to determine how to filter resulting tiles (e.g. to only run prediction on a test set) if args.tiles is not None: imagery_locs = [args.tiles] # use tiledir provided if args.tiles.startswith('s3://'): allImageryDatasets = [S3SlippyMapTiles(args.tiles, mode='multibands', transform=None, aws_profile = args.aws_profile, ids = tile_ids_filter, buffered=args.buffer, buffered_overlap=args.buffer_overlap, tilesize=tile_size, bands=num_channels)] else: allImageryDatasets = [SlippyMapTiles(args.tiles, mode="multibands", transform = None)] # directory = BufferedSlippyMapDirectory(args.tiles, transform=transform, size=tile_size,re overlap=args.overlap) else: # use config to search for tiles fs = s3fs.S3FileSystem(session = boto3.Session(profile_name = config['dataset']['aws_profile'])) p = pprint.PrettyPrinter() imagery_searchpath = config['dataset']['image_bucket'] + '/' + config['dataset']['imagery_directory_regex'] print("Searching for imagery...({})".format(imagery_searchpath)) imagery_candidates = fs.ls(config['dataset']['image_bucket']) print("candidates:") p.pprint(imagery_candidates) imagery_locs = [c for c in imagery_candidates if match(imagery_searchpath, c)] print("result:") p.pprint(imagery_locs) allImageryDatasets = [ S3SlippyMapTiles("s3://" + loc, mode='multibands', transform=None, aws_profile=args.aws_profile, ids=tile_ids_filter) for loc in imagery_locs ] palette = make_palette(config["classes"][0]["color"]) # don't track tensors with autograd during prediction with torch.no_grad(): for dataset, imageloc in zip(allImageryDatasets, imagery_locs): print("Prediction: {}".format(imageloc)) imageloc_path = imageloc.replace("/", ":") # to not recreate directory structure when saving loader = DataLoader(dataset, batch_size=batch_size, num_workers=args.workers) for tiles, images in tqdm(loader, desc="Eval", unit="batch", ascii=True): tiles = list(zip(tiles[0], tiles[1], tiles[2])) images = images.to(device) outputs = net(images) for i, (tile, prob) in enumerate(zip(tiles, outputs)): tile = Tile(tile[0].item(), tile[1].item(), tile[2].item()) savedir = args.preds # manually compute segmentation mask class probabilities per pixel image = (prob > args.threshold).cpu().numpy().astype(np.uint8) if args.buffer: image = allImageryDatasets[0].unbuffer(image) image = image.squeeze() _write_png(tile, image, os.path.join(savedir, imageloc_path), palette) if(args.create_tif): _write_tif(tile, image, os.path.join(savedir, imageloc_path))
def main(args): if (args.geojson and args.postgis) or (not args.geojson and not args.postgis): sys.exit( "ERROR: Input features to rasterize must be either GeoJSON or PostGIS" ) if args.postgis and not args.pg_dsn: sys.exit( "ERROR: With PostGIS input features, --pg_dsn must be provided") config = load_config(args.config) check_classes(config) palette = make_palette(*[classe["color"] for classe in config["classes"]], complementary=True) burn_value = 1 args.out = os.path.expanduser(args.out) os.makedirs(args.out, exist_ok=True) log = Logs(os.path.join(args.out, "log"), out=sys.stderr) def geojson_parse_polygon(zoom, srid, feature_map, polygon, i): try: if srid != 4326: polygon = [ xy for xy in geojson_reproject( { "type": "feature", "geometry": polygon }, srid, 4326) ][0] for i, ring in enumerate( polygon["coordinates"] ): # GeoJSON coordinates could be N dimensionals polygon["coordinates"][i] = [[ x, y ] for point in ring for x, y in zip([point[0]], [point[1]])] if polygon["coordinates"]: for tile in burntiles.burn([{ "type": "feature", "geometry": polygon }], zoom=zoom): feature_map[mercantile.Tile(*tile)].append({ "type": "feature", "geometry": polygon }) except ValueError: log.log("Warning: invalid feature {}, skipping".format(i)) return feature_map def geojson_parse_geometry(zoom, srid, feature_map, geometry, i): if geometry["type"] == "Polygon": feature_map = geojson_parse_polygon(zoom, srid, feature_map, geometry, i) elif geometry["type"] == "MultiPolygon": for polygon in geometry["coordinates"]: feature_map = geojson_parse_polygon(zoom, srid, feature_map, { "type": "Polygon", "coordinates": polygon }, i) else: log.log( "Notice: {} is a non surfacic geometry type, skipping feature {}" .format(geometry["type"], i)) return feature_map if args.geojson: try: tiles = [ tile for tile in tiles_from_csv(os.path.expanduser(args.cover)) ] zoom = tiles[0].z assert not [tile for tile in tiles if tile.z != zoom] except: sys.exit("ERROR: Inconsistent cover {}".format(args.cover)) feature_map = collections.defaultdict(list) log.log("RoboSat.pink - rasterize - Compute spatial index") for geojson_file in args.geojson: with open(os.path.expanduser(geojson_file)) as geojson: try: feature_collection = json.load(geojson) except: sys.exit("ERROR: {} is not a valid JSON file.".format( geojson_file)) try: crs_mapping = {"CRS84": "4326", "900913": "3857"} srid = feature_collection["crs"]["properties"][ "name"].split(":")[-1] srid = int(srid) if srid not in crs_mapping else int( crs_mapping[srid]) except: srid = int(4326) for i, feature in enumerate( tqdm(feature_collection["features"], ascii=True, unit="feature")): try: if feature["geometry"]["type"] == "GeometryCollection": for geometry in feature["geometry"]["geometries"]: feature_map = geojson_parse_geometry( zoom, srid, feature_map, geometry, i) else: feature_map = geojson_parse_geometry( zoom, srid, feature_map, feature["geometry"], i) except: sys.exit( "ERROR: Unable to parse {} file. Seems not a valid GEOJSON file." .format(geojson_file)) log.log( "RoboSat.pink - rasterize - rasterizing tiles from {} on cover {}". format(args.geojson, args.cover)) with open(os.path.join(os.path.expanduser(args.out), "instances.cover"), mode="w") as cover: for tile in tqdm(list( tiles_from_csv(os.path.expanduser(args.cover))), ascii=True, unit="tile"): try: if tile in feature_map: cover.write("{},{},{} {}{}".format( tile.x, tile.y, tile.z, len(feature_map[tile]), os.linesep)) out = geojson_tile_burn(tile, feature_map[tile], 4326, args.ts, burn_value) else: cover.write("{},{},{} {}{}".format( tile.x, tile.y, tile.z, 0, os.linesep)) out = np.zeros(shape=(args.ts, args.ts), dtype=np.uint8) tile_label_to_file(args.out, tile, palette, out) except: log.log("Warning: Unable to rasterize tile. Skipping {}". format(str(tile))) if args.postgis: try: pg_conn = psycopg2.connect(args.pg_dsn) pg = pg_conn.cursor() except Exception: sys.exit("Unable to connect PostgreSQL: {}".format(args.pg_dsn)) log.log( "RoboSat.pink - rasterize - rasterizing tiles from PostGIS on cover {}" .format(args.cover)) log.log(" SQL {}".format(args.postgis)) try: pg.execute( "SELECT ST_Srid(geom) AS srid FROM ({} LIMIT 1) AS sub".format( args.postgis)) srid = pg.fetchone()[0] except Exception: sys.exit("Unable to retrieve geometry SRID.") for tile in tqdm(list(tiles_from_csv(args.cover)), ascii=True, unit="tile"): s, w, e, n = mercantile.bounds(tile) raster = np.zeros((args.ts, args.ts)) query = """ WITH bbox AS (SELECT ST_Transform(ST_MakeEnvelope({},{},{},{}, 4326), {} ) AS bbox), bbox_merc AS (SELECT ST_Transform(ST_MakeEnvelope({},{},{},{}, 4326), 3857) AS bbox), rast_a AS (SELECT ST_AddBand( ST_SetSRID( ST_MakeEmptyRaster({}, {}, ST_Xmin(bbox), ST_Ymax(bbox), (ST_YMax(bbox) - ST_YMin(bbox)) / {}), 3857), '8BUI'::text, 0) AS rast FROM bbox_merc), features AS (SELECT ST_Union(ST_Transform(ST_Force2D(geom), 3857)) AS geom FROM ({}) AS sub, bbox WHERE ST_Intersects(geom, bbox)), rast_b AS (SELECT ST_AsRaster(geom, rast, '8BUI', {}) AS rast FROM features, rast_a WHERE NOT ST_IsEmpty(geom)) SELECT ST_AsBinary(ST_MapAlgebra(rast_a.rast, rast_b.rast, '{}', NULL, 'FIRST')) AS wkb FROM rast_a, rast_b """.format(s, w, e, n, srid, s, w, e, n, args.ts, args.ts, args.ts, args.postgis, burn_value, burn_value) try: pg.execute(query) row = pg.fetchone() if row: raster = np.squeeze(wkb_to_numpy(io.BytesIO(row[0])), axis=2) except Exception: log.log( "Warning: Invalid geometries, skipping {}".format(tile)) pg_conn = psycopg2.connect(args.pg_dsn) pg = pg_conn.cursor() try: tile_label_to_file(args.out, tile, palette, raster) except: log.log( "Warning: Unable to rasterize tile. Skipping {}".format( str(tile))) if not args.no_web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template base_url = args.web_ui_base_url if args.web_ui_base_url else "./" tiles = [tile for tile in tiles_from_csv(args.cover)] web_ui(args.out, base_url, tiles, tiles, "png", template)
def main(args): config = load_config(args.config) colors = [classe["color"] for classe in config["classes"]] tile_size = args.tile_size try: raster = rasterio_open(args.raster) w, s, e, n = bounds = transform_bounds(raster.crs, "EPSG:4326", *raster.bounds) transform, _, _ = calculate_default_transform(raster.crs, "EPSG:3857", raster.width, raster.height, *bounds) except: sys.exit("Error: Unable to load raster or deal with it's projection") tiles = [ mercantile.Tile(x=x, y=y, z=z) for x, y, z in mercantile.tiles(w, s, e, n, args.zoom) ] tiles_nodata = [] for tile in tqdm(tiles, desc="Tiling", unit="tile", ascii=True): w, s, e, n = tile_bounds = mercantile.xy_bounds(tile) # Inspired by Rio-Tiler, cf: https://github.com/mapbox/rio-tiler/pull/45 warp_vrt = WarpedVRT( raster, crs="EPSG:3857", resampling=Resampling.bilinear, add_alpha=False, transform=from_bounds(*tile_bounds, args.size, args.size), width=math.ceil((e - w) / transform.a), height=math.ceil((s - n) / transform.e), ) data = warp_vrt.read(out_shape=(len(raster.indexes), tile_size, tile_size), window=warp_vrt.window(w, s, e, n)) # If no_data is set, remove all tiles with at least one whole border filled only with no_data (on all bands) if type(args.no_data) is not None and ( np.all(data[:, 0, :] == args.no_data) or np.all(data[:, -1, :] == args.no_data) or np.all(data[:, :, 0] == args.no_data) or np.all(data[:, :, -1] == args.no_data)): tiles_nodata.append(tile) continue C, W, H = data.shape os.makedirs(os.path.join(args.out, str(args.zoom), str(tile.x)), exist_ok=True) path = os.path.join(args.out, str(args.zoom), str(tile.x), str(tile.y)) if args.type == "label": assert C == 1, "Error: Label raster input should be 1 band" ext = "png" img = Image.fromarray(np.squeeze(data, axis=0), mode="P") img.putpalette(make_palette(colors[0], colors[1])) img.save("{}.{}".format(path, ext), optimize=True) elif args.type == "image": assert C == 1 or C == 3, "Error: Image raster input should be either 1 or 3 bands" # GeoTiff could be 16 or 32bits if data.dtype == "uint16": data = np.uint8(data / 256) elif data.dtype == "uint32": data = np.uint8(data / (256 * 256)) if C == 1: ext = "png" Image.fromarray(np.squeeze(data, axis=0), mode="L").save("{}.{}".format(path, ext), optimize=True) elif C == 3: ext = "webp" Image.fromarray(np.moveaxis(data, 0, 2), mode="RGB").save("{}.{}".format(path, ext), optimize=True) if args.web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template tiles = [tile for tile in tiles if tile not in tiles_nodata] base_url = args.web_ui_base_url if args.web_ui_base_url else "./" web_ui(args.out, base_url, tiles, tiles, ext, template)
def main(args): config = load_config(args.config) tile_size = args.tile_size if args.tile_size else config["model"]["tile_size"] colors = [classe["color"] for classe in config["classes"]] os.makedirs(args.out, exist_ok=True) # We can only rasterize all tiles at a single zoom. assert all(tile.z == args.zoom for tile in tiles_from_csv(args.cover)) # Find all tiles the features cover and make a map object for quick lookup. feature_map = collections.defaultdict(list) log = Logs(os.path.join(args.out, "log"), out=sys.stderr) def parse_polygon(feature_map, polygon, i): try: for i, ring in enumerate(polygon["coordinates"]): # GeoJSON coordinates could be N dimensionals polygon["coordinates"][i] = [[x, y] for point in ring for x, y in zip([point[0]], [point[1]])] for tile in burntiles.burn([{"type": "feature", "geometry": polygon}], zoom=args.zoom): feature_map[mercantile.Tile(*tile)].append({"type": "feature", "geometry": polygon}) except ValueError: log.log("Warning: invalid feature {}, skipping".format(i)) return feature_map def parse_geometry(feature_map, geometry, i): if geometry["type"] == "Polygon": feature_map = parse_polygon(feature_map, geometry, i) elif geometry["type"] == "MultiPolygon": for polygon in geometry["coordinates"]: feature_map = parse_polygon(feature_map, {"type": "Polygon", "coordinates": polygon}, i) else: log.log("Notice: {} is a non surfacic geometry type, skipping feature {}".format(geometry["type"], i)) return feature_map for feature in args.features: with open(feature) as f: fc = json.load(f) for i, feature in enumerate(tqdm(fc["features"], ascii=True, unit="feature")): if feature["geometry"]["type"] == "GeometryCollection": for geometry in feature["geometry"]["geometries"]: feature_map = parse_geometry(feature_map, geometry, i) else: feature_map = parse_geometry(feature_map, feature["geometry"], i) # Burn features to tiles and write to a slippy map directory. for tile in tqdm(list(tiles_from_csv(args.cover)), ascii=True, unit="tile"): if tile in feature_map: out = burn(tile, feature_map[tile], tile_size) else: out = np.zeros(shape=(tile_size, tile_size), dtype=np.uint8) out_dir = os.path.join(args.out, str(tile.z), str(tile.x)) os.makedirs(out_dir, exist_ok=True) out_path = os.path.join(out_dir, "{}.png".format(tile.y)) if os.path.exists(out_path): prev = np.array(Image.open(out_path)) out = np.maximum(out, prev) out = Image.fromarray(out, mode="P") out_path = os.path.join(args.out, str(tile.z), str(tile.x)) os.makedirs(out_path, exist_ok=True) out.putpalette(complementary_palette(make_palette(colors[0], colors[1]))) out.save(os.path.join(out_path, "{}.png".format(tile.y)), optimize=True) if args.web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template base_url = args.web_ui_base_url if args.web_ui_base_url else "./" tiles = [tile for tile in tiles_from_csv(args.cover)] web_ui(args.out, base_url, tiles, tiles, "png", template)
def main(args): config = load_config(args.config) check_channels(config) check_classes(config) args.workers = torch.cuda.device_count() * 2 if torch.device( "cuda") and not args.workers else args.workers log = Logs(os.path.join(args.out, "log")) if torch.cuda.is_available(): log.log("RoboSat.pink - predict on {} GPUs, with {} workers".format( torch.cuda.device_count(), args.workers)) log.log("(Torch:{} Cuda:{} CudNN:{})".format( torch.__version__, torch.version.cuda, torch.backends.cudnn.version())) device = torch.device("cuda") torch.backends.cudnn.benchmark = True else: log.log("RoboSat.pink - predict on CPU, with {} workers".format( args.workers)) device = torch.device("cpu") try: chkpt = torch.load(args.checkpoint, map_location=device) assert chkpt["producer_name"] == "RoboSat.pink" model_module = import_module("robosat_pink.models.{}".format( chkpt["nn"].lower())) nn = getattr(model_module, chkpt["nn"])(chkpt["shape_in"], chkpt["shape_out"]).to(device) nn = torch.nn.DataParallel(nn) nn.load_state_dict(chkpt["state_dict"]) nn.eval() except: sys.exit("ERROR: Unable to load {} checkpoint.".format( args.checkpoint)) log.log("Model {} - UUID: {}".format(chkpt["nn"], chkpt["uuid"])) try: loader_module = import_module("robosat_pink.loaders.{}".format( chkpt["loader"].lower())) loader_predict = getattr(loader_module, chkpt["loader"])(config, chkpt["shape_in"][1:3], args.tiles, mode="predict") except: sys.exit("ERROR: Unable to load {} data loader.".format( chkpt["loader"])) loader = DataLoader(loader_predict, batch_size=args.bs, num_workers=args.workers) palette = make_palette(config["classes"][0]["color"], config["classes"][1]["color"]) with torch.no_grad( ): # don't track tensors with autograd during prediction for images, tiles in tqdm(loader, desc="Eval", unit="batch", ascii=True): images = images.to(device) try: outputs = nn(images) probs = torch.nn.functional.softmax(outputs, dim=1).data.cpu().numpy() except: log.log("WARNING: Skipping batch:") for tile, prob in zip(tiles, probs): log.log(" - {}".format(str(tile))) continue for tile, prob in zip(tiles, probs): try: x, y, z = list(map(int, tile)) mask = np.around(prob[1:, :, :]).astype(np.uint8).squeeze() tile_label_to_file(args.out, mercantile.Tile(x, y, z), palette, mask) except: log.log("WARNING: Skipping tile {}".format(str(tile))) if not args.no_web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template base_url = args.web_ui_base_url if args.web_ui_base_url else "./" tiles = [tile for tile, _ in tiles_from_slippy_map(args.out)] web_ui(args.out, base_url, tiles, tiles, "png", template)