def main(args): config = load_config(args.config) config["dataset"][ "path"] = args.dataset if args.dataset else config["dataset"]["path"] config["model"]["lr"] = args.lr if args.lr else config["model"]["lr"] config["model"][ "epochs"] = args.epochs if args.epochs else config["model"]["epochs"] config["model"][ "batch_size"] = args.batch_size if args.batch_size else config[ "model"]["batch_size"] log = Logs(os.path.join(args.out, "log")) if torch.cuda.is_available(): device = torch.device("cuda") torch.backends.cudnn.benchmark = True log.log("RoboSat - training on {} GPUs, with {} workers".format( torch.cuda.device_count(), args.workers)) else: device = torch.device("cpu") log.log("RoboSat - training on CPU, with {} workers".format( args.workers)) num_classes = len(config["classes"]) num_channels = 0 for channel in config["channels"]: num_channels += len(channel["bands"]) pretrained = config["model"]["pretrained"] encoder = config["model"]["encoder"] models = [ name for _, name, _ in pkgutil.iter_modules( [os.path.dirname(robosat_pink.models.__file__)]) ] if config["model"]["name"] not in [model for model in models]: sys.exit("Unknown model, thoses available are {}".format( [model for model in models])) model_module = import_module("robosat_pink.models.{}".format( config["model"]["name"])) net = getattr(model_module, "{}".format(config["model"]["name"].title()))( num_classes=num_classes, num_channels=num_channels, encoder=encoder, pretrained=pretrained).to(device) net = torch.nn.DataParallel(net) optimizer = Adam(net.parameters(), lr=config["model"]["lr"], weight_decay=config["model"]["decay"]) resume = 0 if args.checkpoint: def map_location(storage, _): return storage.cuda() if torch.cuda.is_available( ) else storage.cpu() # https://github.com/pytorch/pytorch/issues/7178 chkpt = torch.load(args.checkpoint, map_location=map_location) net.load_state_dict(chkpt["state_dict"]) log.log("Using checkpoint: {}".format(args.checkpoint)) if args.resume: optimizer.load_state_dict(chkpt["optimizer"]) resume = chkpt["epoch"] losses = [ name for _, name, _ in pkgutil.iter_modules( [os.path.dirname(robosat_pink.losses.__file__)]) ] if config["model"]["loss"] not in [loss for loss in losses]: sys.exit("Unknown loss, thoses available are {}".format( [loss for loss in losses])) loss_module = import_module("robosat_pink.losses.{}".format( config["model"]["loss"])) criterion = getattr(loss_module, "{}".format( config["model"]["loss"].title()))().to(device) train_loader, val_loader = get_dataset_loaders(config["dataset"]["path"], config, args.workers) if resume >= config["model"]["epochs"]: sys.exit( "Error: Epoch {} set in {} already reached by the checkpoint provided" .format(config["model"]["epochs"], args.config)) log.log("") log.log("--- Input tensor from Dataset: {} ---".format( config["dataset"]["path"])) num_channel = 1 for channel in config["channels"]: for band in channel["bands"]: log.log("Channel {}:\t\t {}[band: {}]".format( num_channel, channel["sub"], band)) num_channel += 1 log.log("") log.log("--- Hyper Parameters ---") log.log("Model:\t\t\t {}".format(config["model"]["name"])) log.log("Encoder model:\t\t {}".format(config["model"]["encoder"])) log.log("Loss function:\t\t {}".format(config["model"]["loss"])) log.log("ResNet pre-trained:\t {}".format(config["model"]["pretrained"])) log.log("Batch Size:\t\t {}".format(config["model"]["batch_size"])) log.log("Tile Size:\t\t {}".format(config["model"]["tile_size"])) log.log("Data Augmentation:\t {}".format( config["model"]["data_augmentation"])) log.log("Learning Rate:\t\t {}".format(config["model"]["lr"])) log.log("Weight Decay:\t\t {}".format(config["model"]["decay"])) log.log("") for epoch in range(resume, config["model"]["epochs"]): log.log("---") log.log("Epoch: {}/{}".format(epoch + 1, config["model"]["epochs"])) train_hist = train(train_loader, num_classes, device, net, optimizer, criterion) log.log( "Train loss: {:.4f}, mIoU: {:.3f}, {} IoU: {:.3f}, MCC: {:.3f}". format( train_hist["loss"], train_hist["miou"], config["classes"][1]["title"], train_hist["fg_iou"], train_hist["mcc"], )) val_hist = validate(val_loader, num_classes, device, net, criterion) log.log( "Validate loss: {:.4f}, mIoU: {:.3f}, {} IoU: {:.3f}, MCC: {:.3f}". format(val_hist["loss"], val_hist["miou"], config["classes"][1]["title"], val_hist["fg_iou"], val_hist["mcc"])) states = { "epoch": epoch + 1, "state_dict": net.state_dict(), "optimizer": optimizer.state_dict() } checkpoint_path = os.path.join( args.out, "checkpoint-{:05d}-of-{:05d}.pth".format( epoch + 1, config["model"]["epochs"])) torch.save(states, checkpoint_path)
def main(args): tiles = list(tiles_from_csv(args.tiles)) already_dl = 0 dl = 0 with requests.Session() as session: num_workers = args.rate os.makedirs(os.path.join(args.out), exist_ok=True) log = Logs(os.path.join(args.out, "log"), out=sys.stderr) log.log("Begin download from {}".format(args.url)) progress = tqdm(total=len(tiles), ascii=True, unit="image") with futures.ThreadPoolExecutor(num_workers) as executor: def worker(tile): tick = time.monotonic() x, y, z = map(str, [tile.x, tile.y, tile.z]) os.makedirs(os.path.join(args.out, z, x), exist_ok=True) path = os.path.join(args.out, z, x, "{}.{}".format(y, args.ext)) if os.path.isfile(path): progress.update() return tile, None, True if args.type == "XYZ": url = args.url.format(x=tile.x, y=tile.y, z=tile.z) elif args.type == "TMS": tile.y = (2**tile.z) - tile.y - 1 url = args.url.format(x=tile.x, y=tile.y, z=tile.z) elif args.type == "WMS": xmin, ymin, xmax, ymax = xy_bounds(tile) url = args.url.format(xmin=xmin, ymin=ymin, xmax=xmax, ymax=ymax) res = tile_image_from_url(session, url, args.timeout) if not res: return tile, url, False try: cv2.imwrite( path, cv2.imdecode(np.fromstring(res.read(), np.uint8), cv2.IMREAD_COLOR)) progress.update() except OSError: return tile, url, False tock = time.monotonic() time_for_req = tock - tick time_per_worker = num_workers / args.rate if time_for_req < time_per_worker: time.sleep(time_per_worker - time_for_req) return tile, url, True for tile, url, ok in executor.map(worker, tiles): if url and ok: dl += 1 elif not url and ok: already_dl += 1 else: log.log("Warning:\n {} failed, skipping.\n {}\n".format( tile, url)) if already_dl: log.log( "Notice:\n {} tiles were already downloaded previously, and so skipped now." .format(already_dl)) if already_dl + dl == len(tiles): log.log(" Coverage is fully downloaded.") if args.web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template base_url = args.web_ui_base_url if args.web_ui_base_url else "./" web_ui(args.out, base_url, tiles, tiles, args.ext, template)
def main(args): config = load_config(args.config) print(config) log = Logs(os.path.join(args.out, "log")) if torch.cuda.is_available(): device = torch.device("cuda") torch.backends.cudnn.benchmark = True log.log("RoboSat - training on {} GPUs, with {} workers".format( torch.cuda.device_count(), args.workers)) else: device = torch.device("cpu") log.log("RoboSat - training on CPU, with {} workers".format( args.workers)) num_classes = len(config["classes"]) num_channels = 0 for channel in config["channels"]: num_channels += len(channel["bands"]) pretrained = config["model"]["pretrained"] encoder = config["model"]["encoder"] models = [ name for _, name, _ in pkgutil.iter_modules( [os.path.dirname(robosat_pink.models.__file__)]) ] if config["model"]["name"] not in [model for model in models]: sys.exit("Unknown model, thoses available are {}".format( [model for model in models])) model_module = import_module("robosat_pink.models.{}".format( config["model"]["name"])) net = getattr(model_module, "{}".format(config["model"]["name"].title()))( num_classes=num_classes, num_channels=num_channels, encoder=encoder, pretrained=pretrained).to(device) net = torch.nn.DataParallel(net) optimizer = Adam(net.parameters(), lr=config["model"]["lr"], weight_decay=config["model"]["decay"]) resume = 0 # check checkpoint situation + load if ncessary checkpoint = None # no checkpoint if args.checkpoint: # command line checkpoint checkpoint = args.checkpoint try: # config file checkpoint checkpoint = config["checkpoint"]['path'] except: # no checkpoint in config file pass S3_CHECKPOINT = False if checkpoint: if checkpoint.startswith("s3://"): S3_CHECKPOINT = True # load from s3 checkpoint = checkpoint[5:] sess = boto3.Session(profile_name=config['dataset']['aws_profile']) fs = s3fs.S3FileSystem(session=sess) s3ckpt = s3fs.S3File(fs, checkpoint, 'rb') def map_location(storage, _): return storage.cuda() if torch.cuda.is_available( ) else storage.cpu() if checkpoint is not None: def map_location(storage, _): return storage.cuda() if torch.cuda.is_available( ) else storage.cpu() try: if S3_CHECKPOINT: with s3fs.S3File(fs, checkpoint, 'rb') as C: state = torch.load(io.BytesIO(C.read()), map_location=map_location) else: state = torch.load(checkpoint) optimizer.load_state_dict(state['optimizer']) net.load_state_dict(state['state_dict']) net.to(device) except FileNotFoundError as f: print("{} checkpoint not found.".format(CHECKPOINT)) log.log("Using checkpoint: {}".format(checkpoint)) losses = [ name for _, name, _ in pkgutil.iter_modules( [os.path.dirname(robosat_pink.losses.__file__)]) ] if config["model"]["loss"] not in [loss for loss in losses]: sys.exit("Unknown loss, thoses available are {}".format( [loss for loss in losses])) loss_module = import_module("robosat_pink.losses.{}".format( config["model"]["loss"])) criterion = getattr(loss_module, "{}".format( config["model"]["loss"].title()))().to(device) train_loader, val_loader = get_dataset_loaders(config, args.workers, idDir=args.out) if resume >= config["model"]["epochs"]: sys.exit( "Error: Epoch {} set in {} already reached by the checkpoint provided" .format(config["model"]["epochs"], args.config)) log.log("") log.log("--- Input tensor from Dataset: {} ---".format( config["dataset"]["image_bucket"] + '/' + config['dataset']['imagery_directory_regex'])) log.log("") log.log("--- Hyper Parameters ---") log.log("Model:\t\t\t {}".format(config["model"]["name"])) log.log("Encoder model:\t\t {}".format(config["model"]["encoder"])) log.log("Loss function:\t\t {}".format(config["model"]["loss"])) log.log("ResNet pre-trained:\t {}".format(config["model"]["pretrained"])) log.log("Batch Size:\t\t {}".format(config["model"]["batch_size"])) log.log("Tile Size:\t\t {}".format(config["model"]["tile_size"])) log.log("Data Augmentation:\t {}".format( config["model"]["data_augmentation"])) log.log("Learning Rate:\t\t {}".format(config["model"]["lr"])) log.log("Weight Decay:\t\t {}".format(config["model"]["decay"])) log.log("") for epoch in range(resume, config["model"]["epochs"]): log.log("---") log.log("Epoch: {}/{}".format(epoch + 1, config["model"]["epochs"])) train_hist = train(train_loader, num_classes, device, net, optimizer, criterion) log.log( "Train loss: {:.4f}, mIoU: {:.3f}, IoU: {:.3f}, precision: {:.3f}, recall: {:.3f}" .format( train_hist["loss"], train_hist["miou"], train_hist["fg_iou"], train_hist["precision"], train_hist["recall"], )) val_hist = validate(val_loader, num_classes, device, net, criterion) log.log( "Validate loss: {:.4f}, mIoU: {:.3f}, IoU: {:.3f}, precision: {:.3f}, recall: {:.3f}" .format( train_hist["loss"], train_hist["miou"], train_hist["fg_iou"], train_hist["precision"], train_hist["recall"], )) states = { "epoch": epoch + 1, "state_dict": net.state_dict(), "optimizer": optimizer.state_dict() } checkpoint_path = os.path.join( args.out, "checkpoint-{:05d}-of-{:05d}.pth".format( epoch + 1, config["model"]["epochs"])) torch.save(states, checkpoint_path)
def main(args): args.out = os.path.expanduser(args.out) if not args.workers: args.workers = max(1, math.floor(os.cpu_count() * 0.5)) print("RoboSat.pink - compare {} on CPU, with {} workers".format( args.mode, args.workers)) if not args.masks or not args.labels: if args.mode == "list": sys.exit( "ERROR: Parameters masks and labels are mandatories in list mode." ) if args.minimum_fg > 0 or args.maximum_fg < 100 or args.minimum_qod > 0 or args.maximum_qod < 100: sys.exit( "ERROR: Parameters masks and labels are mandatories in QoD filtering." ) try: if args.images: tiles = [tile for tile, _ in tiles_from_slippy_map(args.images[0])] for image in args.images[1:]: assert sorted(tiles) == sorted( [tile for tile, _ in tiles_from_slippy_map(image)]) if args.labels and args.masks: tiles_masks = [ tile for tile, _ in tiles_from_slippy_map(args.masks) ] tiles_labels = [ tile for tile, _ in tiles_from_slippy_map(args.labels) ] if args.images: assert sorted(tiles) == sorted(tiles_masks) == sorted( tiles_labels) else: assert sorted(tiles_masks) == sorted(tiles_labels) tiles = tiles_masks except: sys.exit("ERROR: inconsistent input coverage") tiles_list = [] tiles_compare = [] progress = tqdm(total=len(tiles), ascii=True, unit="tile") log = False if args.geojson else Logs(os.path.join(args.out, "log")) with futures.ThreadPoolExecutor(args.workers) as executor: def worker(tile): x, y, z = list(map(str, tile)) if args.masks and args.labels: try: dist, fg_ratio, qod = compare(args.masks, args.labels, tile) except: progress.update() return False, tile if not args.minimum_fg <= fg_ratio <= args.maximum_fg or not args.minimum_qod <= qod <= args.maximum_qod: progress.update() return True, tile tiles_compare.append(tile) if args.mode == "side": for i, root in enumerate(args.images): img = tile_image_from_file( tile_from_slippy_map(root, x, y, z)[1]) if i == 0: side = np.zeros( (img.shape[0], img.shape[1] * len(args.images), 3)) side = np.swapaxes(side, 0, 1) if args.vertical else side image_shape = img.shape else: assert image_shape[0:2] == img.shape[ 0:2], "Unconsistent image size to compare" if args.vertical: side[i * image_shape[0]:(i + 1) * image_shape[0], :, :] = img else: side[:, i * image_shape[0]:(i + 1) * image_shape[0], :] = img tile_image_to_file(args.out, tile, np.uint8(side)) elif args.mode == "stack": for i, root in enumerate(args.images): tile_image = tile_image_from_file( tile_from_slippy_map(root, x, y, z)[1]) if i == 0: image_shape = tile_image.shape[0:2] stack = tile_image / len(args.images) else: assert image_shape == tile_image.shape[ 0:2], "Unconsistent image size to compare" stack = stack + (tile_image / len(args.images)) tile_image_to_file(args.out, tile, np.uint8(stack)) elif args.mode == "list": tiles_list.append([tile, fg_ratio, qod]) progress.update() return True, tile for tile, ok in executor.map(worker, tiles): if not ok and log: log.log("Warning: skipping. {}".format(str(tile))) if args.mode == "list": with open(args.out, mode="w") as out: if args.geojson: out.write('{"type":"FeatureCollection","features":[') first = True for tile_list in tiles_list: tile, fg_ratio, qod = tile_list x, y, z = list(map(str, tile)) if args.geojson: prop = '"properties":{{"x":{},"y":{},"z":{},"fg":{:.1f},"qod":{:.1f}}}'.format( x, y, z, fg_ratio, qod) geom = '"geometry":{}'.format( json.dumps(feature(tile, precision=6)["geometry"])) out.write('{}{{"type":"Feature",{},{}}}'.format( "," if not first else "", geom, prop)) first = False else: out.write("{},{},{}\t{:.1f}\t{:.1f}{}".format( x, y, z, fg_ratio, qod, os.linesep)) if args.geojson: out.write("]}") out.close() base_url = args.web_ui_base_url if args.web_ui_base_url else "./" if args.mode == "side" and not args.no_web_ui: template = "compare.html" if not args.web_ui_template else args.web_ui_template web_ui(args.out, base_url, None, tiles_compare, args.format, template) if args.mode == "stack" and not args.no_web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template tiles = [tile for tile, _ in tiles_from_slippy_map(args.images[0])] web_ui(args.out, base_url, tiles, tiles_compare, args.format, template)
def main(args): if (args.geojson and args.postgis) or (not args.geojson and not args.postgis): sys.exit("Input features to rasterize must be either GeoJSON or PostGIS") config = load_config(args.config) tile_size = args.tile_size if args.tile_size else config["model"]["tile_size"] colors = [classe["color"] for classe in config["classes"]] burn_value = 1 os.makedirs(args.out, exist_ok=True) log = Logs(os.path.join(args.out, "log"), out=sys.stderr) def geojson_parse_polygon(zoom, feature_map, polygon, i): try: for i, ring in enumerate(polygon["coordinates"]): # GeoJSON coordinates could be N dimensionals polygon["coordinates"][i] = [[x, y] for point in ring for x, y in zip([point[0]], [point[1]])] for tile in burntiles.burn([{"type": "feature", "geometry": polygon}], zoom=zoom): feature_map[mercantile.Tile(*tile)].append({"type": "feature", "geometry": polygon}) except ValueError: log.log("Warning: invalid feature {}, skipping".format(i)) return feature_map def geojson_parse_geometry(zoom, feature_map, geometry, i): if geometry["type"] == "Polygon": feature_map = geojson_parse_polygon(zoom, feature_map, geometry, i) elif geometry["type"] == "MultiPolygon": for polygon in geometry["coordinates"]: feature_map = geojson_parse_polygon(zoom, feature_map, {"type": "Polygon", "coordinates": polygon}, i) else: log.log("Notice: {} is a non surfacic geometry type, skipping feature {}".format(geometry["type"], i)) return feature_map if args.geojson: tiles = [tile for tile in tiles_from_csv(args.cover)] zoom = tiles[0].z if [tile for tile in tiles if tile.z != zoom]: sys.exit("With GeoJson input, all tiles z values have to be the same, in the csv cover file.") feature_map = collections.defaultdict(list) # Compute a spatial index like for geojson_file in args.geojson: with open(geojson_file) as geojson: feature_collection = json.load(geojson) for i, feature in enumerate(tqdm(feature_collection["features"], ascii=True, unit="feature")): if feature["geometry"]["type"] == "GeometryCollection": for geometry in feature["geometry"]["geometries"]: feature_map = geojson_parse_geometry(zoom, feature_map, geometry, i) else: feature_map = geojson_parse_geometry(zoom, feature_map, feature["geometry"], i) # Rasterize tiles for tile in tqdm(list(tiles_from_csv(args.cover)), ascii=True, unit="tile"): if tile in feature_map: out = geojson_tile_burn(tile, feature_map[tile], tile_size, burn_value) else: out = np.zeros(shape=(tile_size, tile_size), dtype=np.uint8) write_tile(args.out, tile, colors, out) if args.postgis: try: pg_conn = psycopg2.connect(config["dataset"]["pg_dsn"]) pg = pg_conn.cursor() except Exception: sys.exit("Unable to connect PostgreSQL: {}".format(config["dataset"]["pg_dsn"])) try: pg.execute("SELECT ST_Srid(geom) AS srid FROM ({} LIMIT 1) AS sub".format(args.postgis)) srid = pg.fetchone()[0] except Exception: sys.exit("Unable to retrieve geometry SRID.") for tile in tqdm(list(tiles_from_csv(args.cover)), ascii=True, unit="tile"): s, w, e, n = mercantile.bounds(tile) raster = np.zeros((tile_size, tile_size)) query = """ WITH bbox AS (SELECT ST_Transform(ST_MakeEnvelope({},{},{},{}, 4326), {} ) AS bbox), bbox_merc AS (SELECT ST_Transform(ST_MakeEnvelope({},{},{},{}, 4326), 3857) AS bbox), rast_a AS (SELECT ST_AddBand( ST_SetSRID( ST_MakeEmptyRaster({}, {}, ST_Xmin(bbox), ST_Ymax(bbox), (ST_YMax(bbox) - ST_YMin(bbox)) / {}), 3857), '8BUI'::text, 0) AS rast FROM bbox_merc), features AS (SELECT ST_Union(ST_Transform(ST_Force2D(geom), 3857)) AS geom FROM ({}) AS sub, bbox WHERE ST_Intersects(geom, bbox)), rast_b AS (SELECT ST_AsRaster(geom, rast, '8BUI', {}) AS rast FROM features, rast_a WHERE NOT ST_IsEmpty(geom)) SELECT ST_AsBinary(ST_MapAlgebra(rast_a.rast, rast_b.rast, '{}', NULL, 'FIRST')) AS wkb FROM rast_a, rast_b """.format( s, w, e, n, srid, s, w, e, n, tile_size, tile_size, tile_size, args.postgis, burn_value, burn_value ) try: pg.execute(query) row = pg.fetchone() if row: raster = np.squeeze(wkb_to_numpy(io.BytesIO(row[0])), axis=2) except Exception: log.log("Warning: Invalid geometries, skipping {}".format(tile)) pg_conn = psycopg2.connect(config["dataset"]["pg_dsn"]) pg = pg_conn.cursor() write_tile(args.out, tile, colors, raster) if args.web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template base_url = args.web_ui_base_url if args.web_ui_base_url else "./" tiles = [tile for tile in tiles_from_csv(args.cover)] web_ui(args.out, base_url, tiles, tiles, "png", template)
def main(args): config = load_config(args.config) args.out = os.path.expanduser(args.out) args.workers = torch.cuda.device_count() * 2 if torch.device( "cuda") and not args.workers else args.workers config["model"][ "loader"] = args.loader if args.loader else config["model"]["loader"] config["model"]["bs"] = args.bs if args.bs else config["model"]["bs"] config["model"]["lr"] = args.lr if args.lr else config["model"]["lr"] config["model"]["ts"] = args.ts if args.ts else config["model"]["ts"] config["model"]["nn"] = args.nn if args.nn else config["model"]["nn"] config["model"][ "loss"] = args.loss if args.loss else config["model"]["loss"] config["model"]["da"] = args.da if args.da else config["model"]["da"] config["model"]["dap"] = args.dap if args.dap else config["model"]["dap"] check_classes(config) check_channels(config) check_model(config) if not os.path.isdir(os.path.expanduser(args.dataset)): sys.exit("ERROR: dataset {} is not a directory".format(args.dataset)) log = Logs(os.path.join(args.out, "log")) if torch.cuda.is_available(): log.log("RoboSat.pink - training on {} GPUs, with {} workers".format( torch.cuda.device_count(), args.workers)) log.log("(Torch:{} Cuda:{} CudNN:{})".format( torch.__version__, torch.version.cuda, torch.backends.cudnn.version())) device = torch.device("cuda") torch.backends.cudnn.benchmark = True else: log.log("RoboSat.pink - training on CPU, with {} workers - (Torch:{})". format(args.workers, torch.__version__)) log.log( "WARNING: Are you really sure sure about not training on GPU ?") device = torch.device("cpu") try: loader = import_module("robosat_pink.loaders.{}".format( config["model"]["loader"].lower())) loader_train = getattr(loader, config["model"]["loader"])( config, config["model"]["ts"], os.path.join(args.dataset, "training"), "train") loader_val = getattr(loader, config["model"]["loader"])( config, config["model"]["ts"], os.path.join(args.dataset, "validation"), "train") except: sys.exit("ERROR: Unable to load data loaders") try: model_module = import_module("robosat_pink.models.{}".format( config["model"]["nn"].lower())) except: sys.exit("ERROR: Unable to load {} model".format( config["model"]["nn"])) nn = getattr(model_module, config["model"]["nn"])( loader_train.shape_in, loader_train.shape_out, config["model"]["pretrained"]).to(device) nn = torch.nn.DataParallel(nn) optimizer = Adam(nn.parameters(), lr=config["model"]["lr"]) resume = 0 if args.checkpoint: try: chkpt = torch.load(os.path.expanduser(args.checkpoint), map_location=device) nn.load_state_dict(chkpt["state_dict"]) log.log("Using checkpoint: {}".format(args.checkpoint)) except: sys.exit("ERROR: Unable to load {} checkpoint".format( args.checkpoint)) if args.resume: optimizer.load_state_dict(chkpt["optimizer"]) resume = chkpt["epoch"] if resume >= args.epochs: sys.exit( "ERROR: Epoch {} already reached by the given checkpoint". format(config["model"]["epochs"])) try: loss_module = import_module("robosat_pink.losses.{}".format( config["model"]["loss"].lower())) criterion = getattr(loss_module, config["model"]["loss"])().to(device) except: sys.exit("ERROR: Unable to load {} loss".format( config["model"]["loss"])) bs = config["model"]["bs"] train_loader = DataLoader(loader_train, batch_size=bs, shuffle=True, drop_last=True, num_workers=args.workers) val_loader = DataLoader(loader_val, batch_size=bs, shuffle=False, drop_last=True, num_workers=args.workers) log.log("--- Input tensor from Dataset: {} ---".format(args.dataset)) num_channel = 1 # 1-based numerotation for channel in config["channels"]: for band in channel["bands"]: log.log("Channel {}:\t\t {}[band: {}]".format( num_channel, channel["name"], band)) num_channel += 1 log.log("--- Hyper Parameters ---") for hp in config["model"]: log.log("{}{}".format(hp.ljust(25, " "), config["model"][hp])) for epoch in range(resume, args.epochs): UUID = uuid.uuid1() log.log("---{}Epoch: {}/{} -- UUID: {}".format(os.linesep, epoch + 1, args.epochs, UUID)) train(train_loader, config, log, device, nn, optimizer, criterion) validate(val_loader, config, log, device, nn, criterion) try: # https://github.com/pytorch/pytorch/issues/9176 nn_doc = nn.module.doc nn_version = nn.module.version except AttributeError: nn_version = nn.version nn_doc == nn.doc states = { "uuid": UUID, "model_version": nn_version, "producer_name": "RoboSat.pink", "producer_version": "0.4.0", "model_licence": "MIT", "domain": "pink.RoboSat", # reverse-DNS "doc_string": nn_doc, "shape_in": loader_train.shape_in, "shape_out": loader_train.shape_out, "state_dict": nn.state_dict(), "epoch": epoch + 1, "nn": config["model"]["nn"], "optimizer": optimizer.state_dict(), "loader": config["model"]["loader"], } checkpoint_path = os.path.join( args.out, "checkpoint-{:05d}.pth".format(epoch + 1)) try: torch.save(states, checkpoint_path) except: sys.exit( "ERROR: Unable to save checkpoint {}".format(checkpoint_path))
def main(args): try: tiles = list(tiles_from_csv(args.cover)) except: sys.exit("ERROR: unable to read cover file: {}".format(args.cover)) if not args.workers: args.workers = max(1, math.floor(os.cpu_count() * 0.5)) try: os.makedirs(os.path.expanduser(args.out), exist_ok=True) except: sys.exit("ERROR: unable to create output dir: {}".format(args.out)) log = Logs(os.path.join(args.out, "log"), out=sys.stderr) log.log( "RoboSat.pink - download with {} workers, at max {} req/s, from: {}". format(args.workers, args.rate, args.url)) already_dl = 0 dl = 0 with requests.Session() as session: progress = tqdm(total=len(tiles), ascii=True, unit="image") with futures.ThreadPoolExecutor(args.workers) as executor: def worker(tile): tick = time.monotonic() progress.update() x, y, z = map(str, [tile.x, tile.y, tile.z]) try: os.makedirs(os.path.join(args.out, z, x), exist_ok=True) except: return tile, None, False path = os.path.join(args.out, z, x, "{}.{}".format(y, args.format)) if os.path.isfile(path): # already downloaded return tile, None, True if args.type == "XYZ": url = args.url.format(x=tile.x, y=tile.y, z=tile.z) elif args.type == "TMS": tile.y = (2**tile.z) - tile.y - 1 url = args.url.format(x=tile.x, y=tile.y, z=tile.z) elif args.type == "WMS": xmin, ymin, xmax, ymax = xy_bounds(tile) url = args.url.format(xmin=xmin, ymin=ymin, xmax=xmax, ymax=ymax) res = tile_image_from_url(session, url, args.timeout) if res is None: # let's retry once res = tile_image_from_url(session, url, args.timeout) if res is None: return tile, url, False try: tile_image_to_file(args.out, tile, res) # cv2.imwrite(path, cv2.imdecode(np.fromstring(res.read(), np.uint8), cv2.IMREAD_COLOR)) except OSError: return tile, url, False tock = time.monotonic() time_for_req = tock - tick time_per_worker = args.workers / args.rate if time_for_req < time_per_worker: time.sleep(time_per_worker - time_for_req) return tile, url, True for tile, url, ok in executor.map(worker, tiles): if url and ok: dl += 1 elif not url and ok: already_dl += 1 else: log.log("Warning:\n {} failed, skipping.\n {}\n".format( tile, url)) if already_dl: log.log( "Notice: {} tiles were already downloaded previously, and so skipped now." .format(already_dl)) if already_dl + dl == len(tiles): log.log("Notice: Coverage is fully downloaded.") if not args.no_web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template base_url = args.web_ui_base_url if args.web_ui_base_url else "./" web_ui(args.out, base_url, tiles, tiles, args.format, template)
def main(args): if (args.geojson and args.postgis) or (not args.geojson and not args.postgis): sys.exit( "ERROR: Input features to rasterize must be either GeoJSON or PostGIS" ) if args.postgis and not args.pg_dsn: sys.exit( "ERROR: With PostGIS input features, --pg_dsn must be provided") config = load_config(args.config) check_classes(config) palette = make_palette(*[classe["color"] for classe in config["classes"]], complementary=True) burn_value = 1 args.out = os.path.expanduser(args.out) os.makedirs(args.out, exist_ok=True) log = Logs(os.path.join(args.out, "log"), out=sys.stderr) def geojson_parse_polygon(zoom, srid, feature_map, polygon, i): try: if srid != 4326: polygon = [ xy for xy in geojson_reproject( { "type": "feature", "geometry": polygon }, srid, 4326) ][0] for i, ring in enumerate( polygon["coordinates"] ): # GeoJSON coordinates could be N dimensionals polygon["coordinates"][i] = [[ x, y ] for point in ring for x, y in zip([point[0]], [point[1]])] if polygon["coordinates"]: for tile in burntiles.burn([{ "type": "feature", "geometry": polygon }], zoom=zoom): feature_map[mercantile.Tile(*tile)].append({ "type": "feature", "geometry": polygon }) except ValueError: log.log("Warning: invalid feature {}, skipping".format(i)) return feature_map def geojson_parse_geometry(zoom, srid, feature_map, geometry, i): if geometry["type"] == "Polygon": feature_map = geojson_parse_polygon(zoom, srid, feature_map, geometry, i) elif geometry["type"] == "MultiPolygon": for polygon in geometry["coordinates"]: feature_map = geojson_parse_polygon(zoom, srid, feature_map, { "type": "Polygon", "coordinates": polygon }, i) else: log.log( "Notice: {} is a non surfacic geometry type, skipping feature {}" .format(geometry["type"], i)) return feature_map if args.geojson: try: tiles = [ tile for tile in tiles_from_csv(os.path.expanduser(args.cover)) ] zoom = tiles[0].z assert not [tile for tile in tiles if tile.z != zoom] except: sys.exit("ERROR: Inconsistent cover {}".format(args.cover)) feature_map = collections.defaultdict(list) log.log("RoboSat.pink - rasterize - Compute spatial index") for geojson_file in args.geojson: with open(os.path.expanduser(geojson_file)) as geojson: try: feature_collection = json.load(geojson) except: sys.exit("ERROR: {} is not a valid JSON file.".format( geojson_file)) try: crs_mapping = {"CRS84": "4326", "900913": "3857"} srid = feature_collection["crs"]["properties"][ "name"].split(":")[-1] srid = int(srid) if srid not in crs_mapping else int( crs_mapping[srid]) except: srid = int(4326) for i, feature in enumerate( tqdm(feature_collection["features"], ascii=True, unit="feature")): try: if feature["geometry"]["type"] == "GeometryCollection": for geometry in feature["geometry"]["geometries"]: feature_map = geojson_parse_geometry( zoom, srid, feature_map, geometry, i) else: feature_map = geojson_parse_geometry( zoom, srid, feature_map, feature["geometry"], i) except: sys.exit( "ERROR: Unable to parse {} file. Seems not a valid GEOJSON file." .format(geojson_file)) log.log( "RoboSat.pink - rasterize - rasterizing tiles from {} on cover {}". format(args.geojson, args.cover)) with open(os.path.join(os.path.expanduser(args.out), "instances.cover"), mode="w") as cover: for tile in tqdm(list( tiles_from_csv(os.path.expanduser(args.cover))), ascii=True, unit="tile"): try: if tile in feature_map: cover.write("{},{},{} {}{}".format( tile.x, tile.y, tile.z, len(feature_map[tile]), os.linesep)) out = geojson_tile_burn(tile, feature_map[tile], 4326, args.ts, burn_value) else: cover.write("{},{},{} {}{}".format( tile.x, tile.y, tile.z, 0, os.linesep)) out = np.zeros(shape=(args.ts, args.ts), dtype=np.uint8) tile_label_to_file(args.out, tile, palette, out) except: log.log("Warning: Unable to rasterize tile. Skipping {}". format(str(tile))) if args.postgis: try: pg_conn = psycopg2.connect(args.pg_dsn) pg = pg_conn.cursor() except Exception: sys.exit("Unable to connect PostgreSQL: {}".format(args.pg_dsn)) log.log( "RoboSat.pink - rasterize - rasterizing tiles from PostGIS on cover {}" .format(args.cover)) log.log(" SQL {}".format(args.postgis)) try: pg.execute( "SELECT ST_Srid(geom) AS srid FROM ({} LIMIT 1) AS sub".format( args.postgis)) srid = pg.fetchone()[0] except Exception: sys.exit("Unable to retrieve geometry SRID.") for tile in tqdm(list(tiles_from_csv(args.cover)), ascii=True, unit="tile"): s, w, e, n = mercantile.bounds(tile) raster = np.zeros((args.ts, args.ts)) query = """ WITH bbox AS (SELECT ST_Transform(ST_MakeEnvelope({},{},{},{}, 4326), {} ) AS bbox), bbox_merc AS (SELECT ST_Transform(ST_MakeEnvelope({},{},{},{}, 4326), 3857) AS bbox), rast_a AS (SELECT ST_AddBand( ST_SetSRID( ST_MakeEmptyRaster({}, {}, ST_Xmin(bbox), ST_Ymax(bbox), (ST_YMax(bbox) - ST_YMin(bbox)) / {}), 3857), '8BUI'::text, 0) AS rast FROM bbox_merc), features AS (SELECT ST_Union(ST_Transform(ST_Force2D(geom), 3857)) AS geom FROM ({}) AS sub, bbox WHERE ST_Intersects(geom, bbox)), rast_b AS (SELECT ST_AsRaster(geom, rast, '8BUI', {}) AS rast FROM features, rast_a WHERE NOT ST_IsEmpty(geom)) SELECT ST_AsBinary(ST_MapAlgebra(rast_a.rast, rast_b.rast, '{}', NULL, 'FIRST')) AS wkb FROM rast_a, rast_b """.format(s, w, e, n, srid, s, w, e, n, args.ts, args.ts, args.ts, args.postgis, burn_value, burn_value) try: pg.execute(query) row = pg.fetchone() if row: raster = np.squeeze(wkb_to_numpy(io.BytesIO(row[0])), axis=2) except Exception: log.log( "Warning: Invalid geometries, skipping {}".format(tile)) pg_conn = psycopg2.connect(args.pg_dsn) pg = pg_conn.cursor() try: tile_label_to_file(args.out, tile, palette, raster) except: log.log( "Warning: Unable to rasterize tile. Skipping {}".format( str(tile))) if not args.no_web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template base_url = args.web_ui_base_url if args.web_ui_base_url else "./" tiles = [tile for tile in tiles_from_csv(args.cover)] web_ui(args.out, base_url, tiles, tiles, "png", template)
def main(args): config = load_config(args.config) tile_size = args.tile_size if args.tile_size else config["model"]["tile_size"] colors = [classe["color"] for classe in config["classes"]] os.makedirs(args.out, exist_ok=True) # We can only rasterize all tiles at a single zoom. assert all(tile.z == args.zoom for tile in tiles_from_csv(args.cover)) # Find all tiles the features cover and make a map object for quick lookup. feature_map = collections.defaultdict(list) log = Logs(os.path.join(args.out, "log"), out=sys.stderr) def parse_polygon(feature_map, polygon, i): try: for i, ring in enumerate(polygon["coordinates"]): # GeoJSON coordinates could be N dimensionals polygon["coordinates"][i] = [[x, y] for point in ring for x, y in zip([point[0]], [point[1]])] for tile in burntiles.burn([{"type": "feature", "geometry": polygon}], zoom=args.zoom): feature_map[mercantile.Tile(*tile)].append({"type": "feature", "geometry": polygon}) except ValueError: log.log("Warning: invalid feature {}, skipping".format(i)) return feature_map def parse_geometry(feature_map, geometry, i): if geometry["type"] == "Polygon": feature_map = parse_polygon(feature_map, geometry, i) elif geometry["type"] == "MultiPolygon": for polygon in geometry["coordinates"]: feature_map = parse_polygon(feature_map, {"type": "Polygon", "coordinates": polygon}, i) else: log.log("Notice: {} is a non surfacic geometry type, skipping feature {}".format(geometry["type"], i)) return feature_map for feature in args.features: with open(feature) as f: fc = json.load(f) for i, feature in enumerate(tqdm(fc["features"], ascii=True, unit="feature")): if feature["geometry"]["type"] == "GeometryCollection": for geometry in feature["geometry"]["geometries"]: feature_map = parse_geometry(feature_map, geometry, i) else: feature_map = parse_geometry(feature_map, feature["geometry"], i) # Burn features to tiles and write to a slippy map directory. for tile in tqdm(list(tiles_from_csv(args.cover)), ascii=True, unit="tile"): if tile in feature_map: out = burn(tile, feature_map[tile], tile_size) else: out = np.zeros(shape=(tile_size, tile_size), dtype=np.uint8) out_dir = os.path.join(args.out, str(tile.z), str(tile.x)) os.makedirs(out_dir, exist_ok=True) out_path = os.path.join(out_dir, "{}.png".format(tile.y)) if os.path.exists(out_path): prev = np.array(Image.open(out_path)) out = np.maximum(out, prev) out = Image.fromarray(out, mode="P") out_path = os.path.join(args.out, str(tile.z), str(tile.x)) os.makedirs(out_path, exist_ok=True) out.putpalette(complementary_palette(make_palette(colors[0], colors[1]))) out.save(os.path.join(out_path, "{}.png".format(tile.y)), optimize=True) if args.web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template base_url = args.web_ui_base_url if args.web_ui_base_url else "./" tiles = [tile for tile in tiles_from_csv(args.cover)] web_ui(args.out, base_url, tiles, tiles, "png", template)
def main(args): config = load_config(args.config) check_channels(config) check_classes(config) args.workers = torch.cuda.device_count() * 2 if torch.device( "cuda") and not args.workers else args.workers log = Logs(os.path.join(args.out, "log")) if torch.cuda.is_available(): log.log("RoboSat.pink - predict on {} GPUs, with {} workers".format( torch.cuda.device_count(), args.workers)) log.log("(Torch:{} Cuda:{} CudNN:{})".format( torch.__version__, torch.version.cuda, torch.backends.cudnn.version())) device = torch.device("cuda") torch.backends.cudnn.benchmark = True else: log.log("RoboSat.pink - predict on CPU, with {} workers".format( args.workers)) device = torch.device("cpu") try: chkpt = torch.load(args.checkpoint, map_location=device) assert chkpt["producer_name"] == "RoboSat.pink" model_module = import_module("robosat_pink.models.{}".format( chkpt["nn"].lower())) nn = getattr(model_module, chkpt["nn"])(chkpt["shape_in"], chkpt["shape_out"]).to(device) nn = torch.nn.DataParallel(nn) nn.load_state_dict(chkpt["state_dict"]) nn.eval() except: sys.exit("ERROR: Unable to load {} checkpoint.".format( args.checkpoint)) log.log("Model {} - UUID: {}".format(chkpt["nn"], chkpt["uuid"])) try: loader_module = import_module("robosat_pink.loaders.{}".format( chkpt["loader"].lower())) loader_predict = getattr(loader_module, chkpt["loader"])(config, chkpt["shape_in"][1:3], args.tiles, mode="predict") except: sys.exit("ERROR: Unable to load {} data loader.".format( chkpt["loader"])) loader = DataLoader(loader_predict, batch_size=args.bs, num_workers=args.workers) palette = make_palette(config["classes"][0]["color"], config["classes"][1]["color"]) with torch.no_grad( ): # don't track tensors with autograd during prediction for images, tiles in tqdm(loader, desc="Eval", unit="batch", ascii=True): images = images.to(device) try: outputs = nn(images) probs = torch.nn.functional.softmax(outputs, dim=1).data.cpu().numpy() except: log.log("WARNING: Skipping batch:") for tile, prob in zip(tiles, probs): log.log(" - {}".format(str(tile))) continue for tile, prob in zip(tiles, probs): try: x, y, z = list(map(int, tile)) mask = np.around(prob[1:, :, :]).astype(np.uint8).squeeze() tile_label_to_file(args.out, mercantile.Tile(x, y, z), palette, mask) except: log.log("WARNING: Skipping tile {}".format(str(tile))) if not args.no_web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template base_url = args.web_ui_base_url if args.web_ui_base_url else "./" tiles = [tile for tile, _ in tiles_from_slippy_map(args.out)] web_ui(args.out, base_url, tiles, tiles, "png", template)