def main(args): module_search_path = [args.path] if args.path else [] module_search_path.append(os.path.join(Path(__file__).parent.parent, "features")) modules = [(path, name) for path, name, _ in pkgutil.iter_modules(module_search_path) if name != "core"] if args.type not in [name for _, name in modules]: sys.exit("Unknown type, thoses available are {}".format([name for _, name in modules])) config = load_config(args.config) labels = config["classes"]["titles"] if args.type not in labels: sys.exit("The type you asked is not consistent with yours classes in the config file provided.") index = labels.index(args.type) if args.path: sys.path.append(args.path) module = import_module(args.type) else: module = import_module("robosat.features.{}".format(args.type)) handler = getattr(module, "{}Handler".format(args.type.title()))() for tile, path in tqdm(list(tiles_from_slippy_map(args.masks)), ascii=True, unit="mask"): image = np.array(Image.open(path).convert("P"), dtype=np.uint8) mask = (image == index).astype(np.uint8) handler.apply(tile, mask) handler.save(args.out)
def main(args): if args.weights and len(args.probs) != len(args.weights): sys.exit( "Error: number of slippy map directories and weights must be the same" ) tilesets = map(tiles_from_slippy_map, args.probs) for tileset in tqdm(list(zip(*tilesets)), desc="Masks", unit="tile", ascii=True): tiles = [tile for tile, _ in tileset] paths = [path for _, path in tileset] assert len(set(tiles)), "tilesets in sync" x, y, z = tiles[0] # Un-quantize the probabilities in [0,255] to floating point values in [0,1] anchors = np.linspace(0, 1, 256) def load(path): # Note: assumes binary case and probability sums up to one. # Needs to be in sync with how we store them in prediction. quantized = np.array(Image.open(path).convert("P")) # (512, 512, 1) -> (1, 512, 512) foreground = np.rollaxis(np.expand_dims(anchors[quantized], axis=0), axis=0) background = np.rollaxis(1. - foreground, axis=0) # (1, 512, 512) + (1, 512, 512) -> (2, 512, 512) return np.concatenate((background, foreground), axis=0) probs = [load(path) for path in paths] mask = softvote(probs, axis=0, weights=args.weights) mask = mask.astype(np.uint8) config = load_config(args.config) palette = make_palette(config["classes"]["colors"][0], config["classes"]["colors"][1]) out = Image.fromarray(mask, mode="P") out.putpalette(palette) os.makedirs(os.path.join(args.masks, str(z), str(x)), exist_ok=True) path = os.path.join(args.masks, str(z), str(x), str(y) + ".png") out.save(path, optimize=True) if args.web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template tiles = [ tile for tile, _ in list(tiles_from_slippy_map(args.probs[0])) ] web_ui(args.masks, args.web_ui, tiles, tiles, "png", template)
def test_slippy_map_directory(self): root = 'tests/fixtures/images' tiles = [tile for tile in tiles_from_slippy_map(root)] self.assertEqual(len(tiles), 3) tile, path = tiles[0] self.assertEqual(type(tile), mercantile.Tile) self.assertEqual(path, 'tests/fixtures/images/18/69105/105093.jpg')
def __init__(self, root, transform=None): super().__init__() self.tiles = [] self.transform = transform self.tiles = [(tile, path) for tile, path in tiles_from_slippy_map(root)] self.tiles.sort(key=lambda tile: tile[0])
def main(args): imageTiles = [] imageTileIds = set() discardedTiles = 0 for dir in args.directories: print(path.exists(dir)) _theseTiles = list(tiles.tiles_from_slippy_map(dir)) _theseTileIds, _ = zip(*_theseTiles) if len(_theseTiles) == 0: print(f"Error reading tiles from: {dir}") else: imageTiles.extend(_theseTiles) imageTileIds = imageTileIds.union(set(_theseTileIds)) maskTiles = [] maskTileIds = set() if args.mask_dir is not None: num_original_tiles = len(imageTiles) maskTiles = list(tiles.tiles_from_slippy_map(args.mask_dir)) maskTileIds, _ = zip(*maskTiles) imageTileIds = imageTileIds.intersection(set(maskTileIds)) discardedTiles = num_original_tiles - len(imageTileIds) print(f"Discarded tiles: {discardedTiles}") makedirs(args.output_dir, exist_ok=True) ids = {t: 0 for t in imageTileIds} for tile, tile_file in imageTiles: if tile not in list(ids.keys()): print("skipping tile {}".format(tile)) continue # ignore tile that's been discarded new_filename = FILENAME_TEMPLATE.format(z=tile.z, x=tile.x, y=tile.y, id=ids[tile]) ids[tile] = ids[tile] + 1 print("{} => {}".format(tile_file, path.join(args.output_dir, new_filename))) copy2(tile_file, path.join(args.output_dir, new_filename))
def main(args): handler = handlers[args.type]() tiles = list(tiles_from_slippy_map(args.masks)) for tile, path in tqdm(tiles, ascii=True, unit="mask"): image = np.array(Image.open(path).convert("L"), dtype=np.uint8) handler.apply(tile, image) handler.save(args.out)
def features_splitted(masks, out, type, dataset): print ('Splitted') handler = handlers[type]() tiles = list(tiles_from_slippy_map(masks)) for tile, path in tqdm(tiles, ascii=True, unit="mask"): image = np.array(Image.open(path).convert("L"), dtype=np.uint8) handler.apply(tile, image, path) handler.save(out)
def main(args): images = tiles_from_slippy_map(args.images) tiles = set(tiles_from_csv(args.tiles)) for tile, src in tqdm(list(images), desc="Subset", unit="image", ascii=True): if tile not in tiles: continue # The extention also includes the period. extention = os.path.splitext(src)[1] os.makedirs(os.path.join(args.out, str(tile.z), str(tile.x)), exist_ok=True) dst = os.path.join(args.out, str(tile.z), str(tile.x), "{}{}".format(tile.y, extention)) shutil.copyfile(src, dst)
def main(args): images = tiles_from_slippy_map(args.images) for tile, path in tqdm(list(images), desc='Compare', unit='image', ascii=True): x, y, z = list(map(str, tile)) image = Image.open(path).convert('RGB') label = Image.open(os.path.join(args.labels, z, x, '{}.png'.format(y))).convert('P') assert image.size == label.size keep = False masks = [] for path in args.masks: mask = Image.open(os.path.join(path, z, x, '{}.png'.format(y))).convert('P') assert image.size == mask.size masks.append(mask) # TODO: The calculation below does not work for multi-class. percentage = np.sum(np.array(mask) != 0) / np.prod(image.size) # Keep this image when percentage is within required threshold. if percentage >= args.minimum and percentage <= args.maximum: keep = True if not keep: continue width, height = image.size # Columns for image, label and all the masks. columns = 2 + len(masks) combined = Image.new(mode='RGB', size=(columns * width, height)) combined.paste(image, box=(0 * width, 0)) combined.paste(label, box=(1 * width, 0)) for i, mask in enumerate(masks): combined.paste(mask, box=((2 + i) * width, 0)) os.makedirs(os.path.join(args.out, z, x), exist_ok=True) path = os.path.join(args.out, z, x, '{}.png'.format(y)) combined.save(path, optimize=True)
def main(args): dataset = load_config(args.dataset) labels = dataset['common']['classes'] assert set(labels).issuperset(set(handlers.keys())), 'handlers have a class label' index = labels.index(args.type) handler = handlers[args.type]() tiles = list(tiles_from_slippy_map(args.masks)) for tile, path in tqdm(tiles, ascii=True, unit='mask'): image = np.array(Image.open(path).convert('P'), dtype=np.uint8) mask = (image == index).astype(np.uint8) handler.apply(tile, mask) handler.save(args.out)
def main(args): dataset = load_config(args.dataset) labels = dataset["common"]["classes"] assert set(labels).issuperset(set(handlers.keys())), "handlers have a class label" index = labels.index(args.type) handler = handlers[args.type]() tiles = list(tiles_from_slippy_map(args.masks)) for tile, path in tqdm(tiles, ascii=True, unit="mask"): image = np.array(Image.open(path).convert("P"), dtype=np.uint8) mask = (image == index).astype(np.uint8) handler.apply(tile, mask) handler.save(args.out)
def __init__(self, root, transform=None, overlap=32): ''' Args: root: the slippy map directory root with a `z/x/y.png` sub-structure. transform: the transformation to run on the buffered tile. overlap: the tile border to add on every side; in pixel. Note: The overlap must not span multiple tiles. Use `unbuffer` to get back the original tile. ''' super().__init__() assert self.width == self.height, 'tiles are quadratic' self.transform = transform self.overlap = overlap self.tiles = list(tiles_from_slippy_map(root))
def __init__(self, root, transform=None, size=512, overlap=32): ''' Args: root: the slippy map directory root with a `z/x/y.png` sub-structure. transform: the transformation to run on the buffered tile. size: the Slippy Map tile size in pixels overlap: the tile border to add on every side; in pixel. Note: The overlap must not span multiple tiles. Use `unbuffer` to get back the original tile. ''' super().__init__() assert overlap >= 0 assert size >= 256 self.transform = transform self.size = size self.overlap = overlap self.tiles = list(tiles_from_slippy_map(root))
def main(args): config = load_config(args.config) num_classes = len(config["classes"]["titles"]) if torch.cuda.is_available(): device = torch.device("cuda") torch.backends.cudnn.benchmark = True else: device = torch.device("cpu") def map_location(storage, _): return storage.cuda() if torch.cuda.is_available() else storage.cpu() # https://github.com/pytorch/pytorch/issues/7178 chkpt = torch.load(args.checkpoint, map_location=map_location) net = UNet(num_classes).to(device) net = nn.DataParallel(net) net.load_state_dict(chkpt["state_dict"]) net.eval() mean, std = [0.485, 0.456, 0.406], [0.229, 0.224, 0.225] transform = Compose([ImageToTensor(), Normalize(mean=mean, std=std)]) directory = BufferedSlippyMapDirectory(args.tiles, transform=transform, size=args.tile_size, overlap=args.overlap) loader = DataLoader(directory, batch_size=args.batch_size, num_workers=args.workers) if args.masks_output: palette = make_palette(config["classes"]["colors"][0], config["classes"]["colors"][1]) else: palette = continuous_palette_for_color("pink", 256) # don't track tensors with autograd during prediction with torch.no_grad(): for images, tiles in tqdm(loader, desc="Eval", unit="batch", ascii=True): images = images.to(device) outputs = net(images) # manually compute segmentation mask class probabilities per pixel probs = nn.functional.softmax(outputs, dim=1).data.cpu().numpy() for tile, prob in zip(tiles, probs): x, y, z = list(map(int, tile)) # we predicted on buffered tiles; now get back probs for original image prob = directory.unbuffer(prob) assert prob.shape[ 0] == 2, "single channel requires binary model" assert np.allclose( np.sum(prob, axis=0), 1.0 ), "single channel requires probabilities to sum up to one" if args.masks_output: image = np.around(prob[1:, :, :]).astype( np.uint8).squeeze() else: image = (prob[1:, :, :] * 255).astype(np.uint8).squeeze() out = Image.fromarray(image, mode="P") out.putpalette(palette) os.makedirs(os.path.join(args.probs, str(z), str(x)), exist_ok=True) path = os.path.join(args.probs, str(z), str(x), str(y) + ".png") out.save(path, optimize=True) if args.web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template tiles = [tile for tile, _ in tiles_from_slippy_map(args.tiles)] web_ui(args.probs, args.web_ui, tiles, tiles, "png", template)
def run(self): tiles = list(tile for tile, path in tiles_from_slippy_map(self.source_images_directory)) vectorize_displaced_tiles(tiles, self.biased_tiles_directory)
def main(args): if not args.masks or not args.labels or not args.config: if args.mode == "list": sys.exit( "Parameters masks, labels and config, are all mandatories in list mode." ) if args.minimum_fg > 0 or args.maximum_fg < 100 or args.minimum_qod > 0 or args.maximum_qod < 100: sys.exit( "Parameters masks, labels and config, are all mandatories in QoD filtering." ) if args.images: tiles = [tile for tile, _ in tiles_from_slippy_map(args.images[0])] for image in args.images[1:]: assert sorted(tiles) == sorted([ tile for tile, _ in tiles_from_slippy_map(image) ]), "inconsistent coverages" if args.labels and args.masks: tiles_masks = [tile for tile, _ in tiles_from_slippy_map(args.masks)] tiles_labels = [tile for tile, _ in tiles_from_slippy_map(args.labels)] if args.images: assert sorted(tiles) == sorted(tiles_masks) == sorted( tiles_labels), "inconsistent coverages" else: assert sorted(tiles_masks) == sorted( tiles_labels), "inconsistent coverages" tiles = tiles_masks if args.mode == "list": out = open(args.out, mode="w") if args.geojson: out.write('{"type":"FeatureCollection","features":[') first = True tiles_compare = [] for tile in tqdm(list(tiles), desc="Compare", unit="tile", ascii=True): x, y, z = list(map(str, tile)) if args.masks and args.labels and args.config: classes = load_config(args.config)["classes"]["classes"] dist, fg_ratio, qod = compare(args.masks, args.labels, tile, classes) if not args.minimum_fg <= fg_ratio <= args.maximum_fg or not args.minimum_qod <= qod <= args.maximum_qod: continue tiles_compare.append(tile) if args.mode == "side": for i, image in enumerate(args.images): img = tile_image(image, x, y, z) if i == 0: side = np.zeros( (img.shape[0], img.shape[1] * len(args.images), 3)) side = np.swapaxes(side, 0, 1) if args.vertical else side image_shape = img.shape else: assert image_shape == img.shape, "Unconsistent image size to compare" if args.vertical: side[i * image_shape[0]:(i + 1) * image_shape[0], :, :] = img else: side[:, i * image_shape[0]:(i + 1) * image_shape[0], :] = img os.makedirs(os.path.join(args.out, z, x), exist_ok=True) side = Image.fromarray(np.uint8(side)) side.save(os.path.join(args.out, z, x, "{}.{}".format(y, args.ext)), optimize=True) elif args.mode == "stack": for i, image in enumerate(args.images): img = tile_image(image, x, y, z) if i == 0: image_shape = img.shape[0:2] stack = img / len(args.images) else: assert image_shape == img.shape[ 0:2], "Unconsistent image size to compare" stack = stack + (img / len(args.images)) os.makedirs(os.path.join(args.out, str(z), str(x)), exist_ok=True) stack = Image.fromarray(np.uint8(stack)) stack.save(os.path.join(args.out, str(z), str(x), "{}.{}".format(y, args.ext)), optimize=True) elif args.mode == "list": if args.geojson: prop = '"properties":{{"x":{},"y":{},"z":{},"fg":{:.1f},"qod":{:.1f}}}'.format( x, y, z, fg_ratio, qod) geom = '"geometry":{}'.format( json.dumps(feature(tile, precision=6)["geometry"])) out.write('{}{{"type":"Feature",{},{}}}'.format( "," if not first else "", geom, prop)) first = False else: out.write("{},{},{}\t\t{:.1f}\t\t{:.1f}{}".format( x, y, z, fg_ratio, qod, os.linesep)) if args.mode == "list": if args.geojson: out.write("]}") out.close() elif args.mode == "side" and args.web_ui: template = "compare.html" if not args.web_ui_template else args.web_ui_template web_ui(args.out, args.web_ui, None, tiles_compare, args.ext, template) elif args.mode == "stack" and args.web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template tiles = [tile for tile, _ in tiles_from_slippy_map(args.images[0])] web_ui(args.out, args.web_ui, tiles, tiles_compare, args.ext, template)
from robosat.config import load_config from robosat.features.parking import ParkingHandler # Register post-processing handlers here; they need to support a `apply(tile, mask)` function # for handling one mask and a `save(path)` function for GeoJSON serialization to a file. handlers = {"parking": ParkingHandler} args_dataset = r'/Users/zhangqi/Documents/GitHub/robosat/data/dataset-building-predict.toml' args_masks = r'/Users/zhangqi/Documents/GitHub/robosat/data/predict_segmentation-masks' args_type = 'parking' args_out = r'/Users/zhangqi/Documents/GitHub/robosat/data/predict_geojson_features' dataset = load_config(args_dataset) labels = dataset["common"]["classes"] assert set(labels).issuperset(set( handlers.keys())), "handlers have a class label" index = labels.index(args_type) handler = handlers[args_type]() tiles = list(tiles_from_slippy_map(args_masks)) for tile, path in tqdm(tiles, ascii=True, unit="mask"): image = np.array(Image.open(path).convert("P"), dtype=np.uint8) mask = (image == index).astype(np.uint8) handler.apply(tile, mask) handler.save(args_out)