def segment(self, image): # don't track tensors with autograd during prediction with torch.no_grad(): mean, std = self.dataset['stats']['mean'], self.dataset['stats'][ 'std'] transform = Compose([ ConvertImageMode(mode='RGB'), ImageToTensor(), Normalize(mean=mean, std=std) ]) image = transform(image) batch = image.unsqueeze(0).to(self.device) output = self.net(batch) output = output.cpu().data.numpy() output = output.squeeze(0) mask = output.argmax(axis=0).astype(np.uint8) mask = Image.fromarray(mask, mode='P') palette = make_palette(*self.dataset['common']['colors']) mask.putpalette(palette) return mask
def segment(self, image): # don't track tensors with autograd during prediction with torch.no_grad(): mean, std = [0.485, 0.456, 0.406], [0.229, 0.224, 0.225] transform = Compose( [ImageToTensor(), Normalize(mean=mean, std=std)]) image = transform(image) batch = image.unsqueeze(0).to(self.device) output = self.net(batch) output = output.cpu().data.numpy() output = output.squeeze(0) mask = output.argmax(axis=0).astype(np.uint8) mask = Image.fromarray(mask, mode="P") palette = make_palette(*self.config["common"]["colors"]) mask.putpalette(palette) return mask
def multi_burning(args, feature_map, bg, fg): # Burn features to tiles and write to a slippy map directory. for tile in tqdm(list(tiles_from_csv(args.tiles)), ascii=True, unit="tile"): if tile in feature_map: out = burn(tile, feature_map[tile], args.size, args.multicolors) else: out = np.zeros(shape=(args.size, args.size), dtype=np.uint8) out_dir = os.path.join(args.out, str(tile.z), str(tile.x)) os.makedirs(out_dir, exist_ok=True) out_path = os.path.join(out_dir, "{}.png".format(tile.y)) if os.path.exists(out_path): prev = np.array(Image.open(out_path)) out = np.maximum(out, prev) out = Image.fromarray(out, mode="P") if args.multicolors is True: random_colors = [] for graycolor in randomgrayscale(): random_colors.append(graycolor) palette = make_palette_with_random(bg, random_colors=random_colors) else: palette = make_palette(bg, fg) out.putpalette(palette) out.save(out_path, optimize=True)
def segment(self, image): # don't track tensors with autograd during prediction with torch.no_grad(): mean, std = self.dataset['stats']['mean'], self.dataset['stats']['std'] transform = Compose([ ConvertImageMode(mode='RGB'), ImageToTensor(), Normalize(mean=mean, std=std) ]) image = transform(image) batch = image.unsqueeze(0).to(self.device) output = self.net(batch) output = output.cpu().data.numpy() output = output.squeeze(0) mask = output.argmax(axis=0).astype(np.uint8) mask = Image.fromarray(mask, mode='P') palette = make_palette(*self.dataset['common']['colors']) mask.putpalette(palette) return mask
def main(args): if args.weights and len(args.probs) != len(args.weights): sys.exit( "Error: number of slippy map directories and weights must be the same" ) tilesets = map(tiles_from_slippy_map, args.probs) for tileset in tqdm(list(zip(*tilesets)), desc="Masks", unit="tile", ascii=True): tiles = [tile for tile, _ in tileset] paths = [path for _, path in tileset] assert len(set(tiles)), "tilesets in sync" x, y, z = tiles[0] # Un-quantize the probabilities in [0,255] to floating point values in [0,1] anchors = np.linspace(0, 1, 256) def load(path): # Note: assumes binary case and probability sums up to one. # Needs to be in sync with how we store them in prediction. quantized = np.array(Image.open(path).convert("P")) # (512, 512, 1) -> (1, 512, 512) foreground = np.rollaxis(np.expand_dims(anchors[quantized], axis=0), axis=0) background = np.rollaxis(1. - foreground, axis=0) # (1, 512, 512) + (1, 512, 512) -> (2, 512, 512) return np.concatenate((background, foreground), axis=0) probs = [load(path) for path in paths] mask = softvote(probs, axis=0, weights=args.weights) mask = mask.astype(np.uint8) config = load_config(args.config) palette = make_palette(config["classes"]["colors"][0], config["classes"]["colors"][1]) out = Image.fromarray(mask, mode="P") out.putpalette(palette) os.makedirs(os.path.join(args.masks, str(z), str(x)), exist_ok=True) path = os.path.join(args.masks, str(z), str(x), str(y) + ".png") out.save(path, optimize=True) if args.web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template tiles = [ tile for tile, _ in list(tiles_from_slippy_map(args.probs[0])) ] web_ui(args.masks, args.web_ui, tiles, tiles, "png", template)
def main(args): dataset = load_config(args.dataset) classes = dataset["common"]["classes"] colors = dataset["common"]["colors"] assert len(classes) == len(colors), "classes and colors coincide" assert len(colors) == 2, "only binary models supported right now" bg = colors[0] fg = colors[1] os.makedirs(args.out, exist_ok=True) # We can only rasterize all tiles at a single zoom. assert all(tile.z == args.zoom for tile in tiles_from_csv(args.tiles)) with open(args.features) as f: fc = json.load(f) # Find all tiles the features cover and make a map object for quick lookup. feature_map = collections.defaultdict(list) for i, feature in enumerate( tqdm(fc["features"], ascii=True, unit="feature")): if feature["geometry"]["type"] != "Polygon": continue try: for tile in burntiles.burn([feature], zoom=args.zoom): feature_map[mercantile.Tile(*tile)].append(feature) except ValueError as e: print("Warning: invalid feature {}, skipping".format(i), file=sys.stderr) continue # Burn features to tiles and write to a slippy map directory. for tile in tqdm(list(tiles_from_csv(args.tiles)), ascii=True, unit="tile"): if tile in feature_map: out = burn(tile, feature_map[tile], args.size) else: out = np.zeros(shape=(args.size, args.size), dtype=np.uint8) out_dir = os.path.join(args.out, str(tile.z), str(tile.x)) os.makedirs(out_dir, exist_ok=True) out_path = os.path.join(out_dir, "{}.png".format(tile.y)) if os.path.exists(out_path): prev = np.array(Image.open(out_path)) out = np.maximum(out, prev) out = Image.fromarray(out, mode="P") palette = make_palette(bg, fg) out.putpalette(palette) out.save(out_path, optimize=True)
def main(args): dataset = load_config(args.dataset) classes = dataset['common']['classes'] colors = dataset['common']['colors'] assert len(classes) == len(colors), 'classes and colors coincide' assert len(colors) == 2, 'only binary models supported right now' bg = colors[0] fg = colors[1] os.makedirs(args.out, exist_ok=True) # We can only rasterize all tiles at a single zoom. assert all(tile.z == args.zoom for tile in tiles_from_csv(args.tiles)) with open(args.features) as f: fc = json.load(f) # Find all tiles the features cover and make a map object for quick lookup. feature_map = collections.defaultdict(list) for i, feature in enumerate( tqdm(fc['features'], ascii=True, unit='feature')): if feature['geometry']['type'] != 'Polygon': continue try: for tile in burntiles.burn([feature], zoom=args.zoom): feature_map[mercantile.Tile(*tile)].append(feature) except ValueError as e: print('Warning: invalid feature {}, skipping'.format(i), file=sys.stderr) continue # Burn features to tiles and write to a slippy map directory. for tile in tqdm(list(tiles_from_csv(args.tiles)), ascii=True, unit='tile'): if tile in feature_map: out = burn(tile, feature_map[tile], args.size) else: out = Image.fromarray(np.zeros(shape=(args.size, args.size)).astype(int), mode='P') palette = make_palette(bg, fg) out.putpalette(palette) out_path = os.path.join(args.out, str(tile.z), str(tile.x)) os.makedirs(out_path, exist_ok=True) out.save(os.path.join(out_path, '{}.png'.format(tile.y)), optimize=True)
def main(args): dataset = load_config(args.dataset) classes = dataset['common']['classes'] colors = dataset['common']['colors'] assert len(classes) == len(colors), 'classes and colors coincide' assert len(colors) == 2, 'only binary models supported right now' bg = colors[0] fg = colors[1] os.makedirs(args.out, exist_ok=True) # We can only rasterize all tiles at a single zoom. assert all(tile.z == args.zoom for tile in tiles_from_csv(args.tiles)) with open(args.features) as f: fc = json.load(f) # Find all tiles the features cover and make a map object for quick lookup. feature_map = collections.defaultdict(list) for i, feature in enumerate(tqdm(fc['features'], ascii=True, unit='feature')): if feature['geometry']['type'] != 'Polygon': continue try: for tile in burntiles.burn([feature], zoom=args.zoom): feature_map[mercantile.Tile(*tile)].append(feature) except ValueError as e: print('Warning: invalid feature {}, skipping'.format(i), file=sys.stderr) continue # Burn features to tiles and write to a slippy map directory. for tile in tqdm(list(tiles_from_csv(args.tiles)), ascii=True, unit='tile'): if tile in feature_map: out = burn(tile, feature_map[tile], args.size) else: out = Image.fromarray(np.zeros(shape=(args.size, args.size)).astype(int), mode='P') palette = make_palette(bg, fg) out.putpalette(palette) out_path = os.path.join(args.out, str(tile.z), str(tile.x)) os.makedirs(out_path, exist_ok=True) out.save(os.path.join(out_path, '{}.png'.format(tile.y)), optimize=True)
def main(args): if args.weights and len(args.probs) != len(args.weights): sys.exit( 'Error: number of slippy map directories and weights must be the same' ) tilesets = map(tiles_from_slippy_map, args.probs) for tileset in zip(*tilesets): tiles = [tile for tile, _ in tileset] paths = [path for _, path in tileset] assert len(set(tiles)), 'tilesets in sync' x, y, z = tiles[0] # Un-quantize the probabilities in [0,255] to floating point values in [0,1] anchors = np.linspace(0, 1, 256) def load(path): # Note: assumes binary case and probability sums up to one. # Needs to be in sync with how we store them in prediction. quantized = np.array(Image.open(path).convert('P')) # (512, 512, 1) -> (1, 512, 512) foreground = np.rollaxis(np.expand_dims(anchors[quantized], axis=0), axis=0) background = np.rollaxis(1. - foreground, axis=0) # (1, 512, 512) + (1, 512, 512) -> (2, 512, 512) return np.concatenate((background, foreground), axis=0) probs = [load(path) for path in paths] mask = softvote(probs, axis=0, weights=args.weights) mask = mask.astype(np.uint8) palette = make_palette('denim', 'orange') out = Image.fromarray(mask, mode='P') out.putpalette(palette) os.makedirs(os.path.join(args.masks, str(z), str(x)), exist_ok=True) path = os.path.join(args.masks, str(z), str(x), str(y) + '.png') out.save(path, optimize=True)
def main(args): if args.weights and len(args.probs) != len(args.weights): sys.exit('Error: number of slippy map directories and weights must be the same') tilesets = map(tiles_from_slippy_map, args.probs) for tileset in zip(*tilesets): tiles = [tile for tile, _ in tileset] paths = [path for _, path in tileset] assert len(set(tiles)), 'tilesets in sync' x, y, z = tiles[0] # Un-quantize the probabilities in [0,255] to floating point values in [0,1] anchors = np.linspace(0, 1, 256) def load(path): # Note: assumes binary case and probability sums up to one. # Needs to be in sync with how we store them in prediction. quantized = np.array(Image.open(path).convert('P')) # (512, 512, 1) -> (1, 512, 512) foreground = np.rollaxis(np.expand_dims(anchors[quantized], axis=0), axis=0) background = np.rollaxis(1. - foreground, axis=0) # (1, 512, 512) + (1, 512, 512) -> (2, 512, 512) return np.concatenate((background, foreground), axis=0) probs = [load(path) for path in paths] mask = softvote(probs, axis=0, weights=args.weights) mask = mask.astype(np.uint8) palette = make_palette('denim', 'orange') out = Image.fromarray(mask, mode='P') out.putpalette(palette) os.makedirs(os.path.join(args.masks, str(z), str(x)), exist_ok=True) path = os.path.join(args.masks, str(z), str(x), str(y) + '.png') out.save(path, optimize=True)
def save_out(out, args, tile, bg, fg, index): out_dir = os.path.join(args.out, str(tile.z), str(tile.x)) os.makedirs(out_dir, exist_ok=True) out_path = os.path.join(out_dir, "{}_{}.png".format(tile.y, index)) if os.path.exists(out_path): prev = np.array(Image.open(out_path)) out = np.maximum(out, prev) out = Image.fromarray(out, mode="P") if args.multicolors is True: random_colors = [] for graycolor in randomgrayscale(): random_colors.append(graycolor) palette = make_palette_with_random(bg, random_colors=random_colors) else: palette = make_palette(bg, fg) out.putpalette(palette) out.save(out_path, optimize=True)
def main(args): if args.type == "label": try: config = load_config(args.config) except: sys.exit("Error: Unable to load DataSet config file") classes = config["classes"]["title"] colors = config["classes"]["colors"] assert len(classes) == len(colors), "classes and colors coincide" assert len(colors) == 2, "only binary models supported right now" try: raster = rasterio_open(args.raster) w, s, e, n = bounds = transform_bounds(raster.crs, "EPSG:4326", *raster.bounds) transform, _, _ = calculate_default_transform(raster.crs, "EPSG:3857", raster.width, raster.height, *bounds) except: sys.exit("Error: Unable to load raster or deal with it's projection") tiles = [ mercantile.Tile(x=x, y=y, z=z) for x, y, z in mercantile.tiles(w, s, e, n, args.zoom) ] tiles_nodata = [] for tile in tqdm(tiles, desc="Tiling", unit="tile", ascii=True): w, s, e, n = tile_bounds = mercantile.xy_bounds(tile) # Inspired by Rio-Tiler, cf: https://github.com/mapbox/rio-tiler/pull/45 warp_vrt = WarpedVRT( raster, crs="EPSG:3857", resampling=Resampling.bilinear, add_alpha=False, transform=from_bounds(*tile_bounds, args.size, args.size), width=math.ceil((e - w) / transform.a), height=math.ceil((s - n) / transform.e), ) data = warp_vrt.read(out_shape=(len(raster.indexes), args.size, args.size), window=warp_vrt.window(w, s, e, n)) # If no_data is set, remove all tiles with at least one whole border filled only with no_data (on all bands) if type(args.no_data) is not None and ( np.all(data[:, 0, :] == args.no_data) or np.all(data[:, -1, :] == args.no_data) or np.all(data[:, :, 0] == args.no_data) or np.all(data[:, :, -1] == args.no_data)): tiles_nodata.append(tile) continue C, W, H = data.shape os.makedirs(os.path.join(args.out, str(args.zoom), str(tile.x)), exist_ok=True) path = os.path.join(args.out, str(args.zoom), str(tile.x), str(tile.y)) if args.type == "label": assert C == 1, "Error: Label raster input should be 1 band" ext = "png" img = Image.fromarray(np.squeeze(data, axis=0), mode="P") img.putpalette(make_palette(colors[0], colors[1])) img.save("{}.{}".format(path, ext), optimize=True) elif args.type == "image": assert C == 1 or C == 3, "Error: Image raster input should be either 1 or 3 bands" # GeoTiff could be 16 or 32bits if data.dtype == "uint16": data = np.uint8(data / 256) elif data.dtype == "uint32": data = np.uint8(data / (256 * 256)) if C == 1: ext = "png" Image.fromarray(np.squeeze(data, axis=0), mode="L").save("{}.{}".format(path, ext), optimize=True) elif C == 3: ext = "webp" Image.fromarray(np.moveaxis(data, 0, 2), mode="RGB").save("{}.{}".format(path, ext), optimize=True) if args.web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template tiles = [tile for tile in tiles if tile not in tiles_nodata] web_ui(args.out, args.web_ui, tiles, tiles, ext, template)
def main(args): config = load_config(args.config) num_classes = len(config["classes"]["titles"]) if torch.cuda.is_available(): device = torch.device("cuda") torch.backends.cudnn.benchmark = True else: device = torch.device("cpu") def map_location(storage, _): return storage.cuda() if torch.cuda.is_available() else storage.cpu() # https://github.com/pytorch/pytorch/issues/7178 chkpt = torch.load(args.checkpoint, map_location=map_location) net = UNet(num_classes).to(device) net = nn.DataParallel(net) net.load_state_dict(chkpt["state_dict"]) net.eval() mean, std = [0.485, 0.456, 0.406], [0.229, 0.224, 0.225] transform = Compose([ImageToTensor(), Normalize(mean=mean, std=std)]) directory = BufferedSlippyMapDirectory(args.tiles, transform=transform, size=args.tile_size, overlap=args.overlap) loader = DataLoader(directory, batch_size=args.batch_size, num_workers=args.workers) if args.masks_output: palette = make_palette(config["classes"]["colors"][0], config["classes"]["colors"][1]) else: palette = continuous_palette_for_color("pink", 256) # don't track tensors with autograd during prediction with torch.no_grad(): for images, tiles in tqdm(loader, desc="Eval", unit="batch", ascii=True): images = images.to(device) outputs = net(images) # manually compute segmentation mask class probabilities per pixel probs = nn.functional.softmax(outputs, dim=1).data.cpu().numpy() for tile, prob in zip(tiles, probs): x, y, z = list(map(int, tile)) # we predicted on buffered tiles; now get back probs for original image prob = directory.unbuffer(prob) assert prob.shape[ 0] == 2, "single channel requires binary model" assert np.allclose( np.sum(prob, axis=0), 1.0 ), "single channel requires probabilities to sum up to one" if args.masks_output: image = np.around(prob[1:, :, :]).astype( np.uint8).squeeze() else: image = (prob[1:, :, :] * 255).astype(np.uint8).squeeze() out = Image.fromarray(image, mode="P") out.putpalette(palette) os.makedirs(os.path.join(args.probs, str(z), str(x)), exist_ok=True) path = os.path.join(args.probs, str(z), str(x), str(y) + ".png") out.save(path, optimize=True) if args.web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template tiles = [tile for tile, _ in tiles_from_slippy_map(args.tiles)] web_ui(args.probs, args.web_ui, tiles, tiles, "png", template)
def main(args): config = load_config(args.config) classes = config["classes"]["titles"] colors = config["classes"]["colors"] assert len(classes) == len(colors), "classes and colors coincide" assert len(colors) == 2, "only binary models supported right now" os.makedirs(args.out, exist_ok=True) # We can only rasterize all tiles at a single zoom. assert all(tile.z == args.zoom for tile in tiles_from_csv(args.cover)) # Find all tiles the features cover and make a map object for quick lookup. feature_map = collections.defaultdict(list) log = Log(os.path.join(args.out, "log"), out=sys.stderr) def parse_polygon(feature_map, polygon, i): try: for i, ring in enumerate( polygon["coordinates"] ): # GeoJSON coordinates could be N dimensionals polygon["coordinates"][i] = [[ x, y ] for point in ring for x, y in zip([point[0]], [point[1]])] for tile in burntiles.burn([{ "type": "feature", "geometry": polygon }], zoom=args.zoom): feature_map[mercantile.Tile(*tile)].append({ "type": "feature", "geometry": polygon }) except ValueError as e: log.log("Warning: invalid feature {}, skipping".format(i)) return feature_map def parse_geometry(feature_map, geometry, i): if geometry["type"] == "Polygon": feature_map = parse_polygon(feature_map, geometry, i) elif geometry["type"] == "MultiPolygon": for polygon in geometry["coordinates"]: feature_map = parse_polygon(feature_map, { "type": "Polygon", "coordinates": polygon }, i) else: log.log( "Notice: {} is a non surfacic geometry type, skipping feature {}" .format(geometry["type"], i)) return feature_map for feature in args.features: with open(feature) as f: fc = json.load(f) for i, feature in enumerate( tqdm(fc["features"], ascii=True, unit="feature")): if feature["geometry"]["type"] == "GeometryCollection": for geometry in feature["geometry"]["geometries"]: feature_map = parse_geometry(feature_map, geometry, i) else: feature_map = parse_geometry(feature_map, feature["geometry"], i) # Burn features to tiles and write to a slippy map directory. for tile in tqdm(list(tiles_from_csv(args.cover)), ascii=True, unit="tile"): if tile in feature_map: out = burn(tile, feature_map[tile], args.size) else: out = np.zeros(shape=(args.size, args.size), dtype=np.uint8) out_dir = os.path.join(args.out, str(tile.z), str(tile.x)) os.makedirs(out_dir, exist_ok=True) out_path = os.path.join(out_dir, "{}.png".format(tile.y)) if os.path.exists(out_path): prev = np.array(Image.open(out_path)) out = np.maximum(out, prev) out = Image.fromarray(out, mode="P") out_path = os.path.join(args.out, str(tile.z), str(tile.x)) os.makedirs(out_path, exist_ok=True) out.putpalette( complementary_palette(make_palette(colors[0], colors[1]))) out.save(os.path.join(out_path, "{}.png".format(tile.y)), optimize=True) if args.web_ui: template = "leaflet.html" if not args.web_ui_template else args.web_ui_template tiles = [tile for tile in tiles_from_csv(args.cover)] web_ui(args.out, args.web_ui, tiles, tiles, "png", template)