def main(args):
    dataset = load_config(args.dataset)

    classes = dataset["common"]["classes"]
    colors = dataset["common"]["colors"]
    assert len(classes) == len(colors), "classes and colors coincide"

    assert len(colors) == 2, "only binary models supported right now"
    bg = colors[0]
    fg = colors[1]

    os.makedirs(args.out, exist_ok=True)

    # We can only rasterize all tiles at a single zoom.
    assert all(tile.z == args.zoom for tile in tiles_from_csv(args.tiles))

    with open(args.features) as f:
        fc = json.load(f)

    # Find all tiles the features cover and make a map object for quick lookup.
    feature_map = collections.defaultdict(list)
    for i, feature in enumerate(
            tqdm(fc["features"], ascii=True, unit="feature")):

        if feature["geometry"]["type"] != "Polygon":
            continue

        try:
            for tile in burntiles.burn([feature], zoom=args.zoom):
                feature_map[mercantile.Tile(*tile)].append(feature)
        except ValueError as e:
            print("Warning: invalid feature {}, skipping".format(i),
                  file=sys.stderr)
            continue

    # Burn features to tiles and write to a slippy map directory.
    for tile in tqdm(list(tiles_from_csv(args.tiles)), ascii=True,
                     unit="tile"):
        if tile in feature_map:
            out = burn(tile, feature_map[tile], args.size)
        else:
            out = np.zeros(shape=(args.size, args.size), dtype=np.uint8)

        out_dir = os.path.join(args.out, str(tile.z), str(tile.x))
        os.makedirs(out_dir, exist_ok=True)

        out_path = os.path.join(out_dir, "{}.png".format(tile.y))

        if os.path.exists(out_path):
            prev = np.array(Image.open(out_path))
            out = np.maximum(out, prev)

        out = Image.fromarray(out, mode="P")

        palette = make_palette(bg, fg)
        out.putpalette(palette)

        out.save(out_path, optimize=True)
Exemplo n.º 2
0
def main(args):
    dataset = load_config(args.dataset)

    classes = dataset['common']['classes']
    colors = dataset['common']['colors']
    assert len(classes) == len(colors), 'classes and colors coincide'

    assert len(colors) == 2, 'only binary models supported right now'
    bg = colors[0]
    fg = colors[1]

    os.makedirs(args.out, exist_ok=True)

    # We can only rasterize all tiles at a single zoom.
    assert all(tile.z == args.zoom for tile in tiles_from_csv(args.tiles))

    with open(args.features) as f:
        fc = json.load(f)

    # Find all tiles the features cover and make a map object for quick lookup.
    feature_map = collections.defaultdict(list)
    for i, feature in enumerate(
            tqdm(fc['features'], ascii=True, unit='feature')):

        if feature['geometry']['type'] != 'Polygon':
            continue

        try:
            for tile in burntiles.burn([feature], zoom=args.zoom):
                feature_map[mercantile.Tile(*tile)].append(feature)
        except ValueError as e:
            print('Warning: invalid feature {}, skipping'.format(i),
                  file=sys.stderr)
            continue

    # Burn features to tiles and write to a slippy map directory.
    for tile in tqdm(list(tiles_from_csv(args.tiles)), ascii=True,
                     unit='tile'):
        if tile in feature_map:
            out = burn(tile, feature_map[tile], args.size)
        else:
            out = Image.fromarray(np.zeros(shape=(args.size,
                                                  args.size)).astype(int),
                                  mode='P')

        palette = make_palette(bg, fg)
        out.putpalette(palette)

        out_path = os.path.join(args.out, str(tile.z), str(tile.x))
        os.makedirs(out_path, exist_ok=True)

        out.save(os.path.join(out_path, '{}.png'.format(tile.y)),
                 optimize=True)
Exemplo n.º 3
0
    def test_read_tiles(self):
        filename = "tests/fixtures/tiles.csv"
        tiles = [tile for tile in tiles_from_csv(filename)]
        tiles.sort()

        self.assertEqual(len(tiles), 3)
        self.assertEqual(tiles[1], mercantile.Tile(69623, 104945, 18))
Exemplo n.º 4
0
def multi_burning(args, feature_map, bg, fg):
    # Burn features to tiles and write to a slippy map directory.
    for tile in tqdm(list(tiles_from_csv(args.tiles)), ascii=True,
                     unit="tile"):
        if tile in feature_map:
            out = burn(tile, feature_map[tile], args.size, args.multicolors)
        else:
            out = np.zeros(shape=(args.size, args.size), dtype=np.uint8)

        out_dir = os.path.join(args.out, str(tile.z), str(tile.x))
        os.makedirs(out_dir, exist_ok=True)

        out_path = os.path.join(out_dir, "{}.png".format(tile.y))

        if os.path.exists(out_path):
            prev = np.array(Image.open(out_path))
            out = np.maximum(out, prev)

        out = Image.fromarray(out, mode="P")

        if args.multicolors is True:
            random_colors = []
            for graycolor in randomgrayscale():
                random_colors.append(graycolor)
            palette = make_palette_with_random(bg, random_colors=random_colors)
        else:
            palette = make_palette(bg, fg)

        out.putpalette(palette)

        out.save(out_path, optimize=True)
Exemplo n.º 5
0
def main(args):
    def difference_set(x, y):
        """
        x包含y(set)
        :param x:
        :param y:
        :return:
        """
        if y is not None:
            return x - y, None
        else:
            return x, None

    path = args.tiles
    label = ['training', 'validation', 'evaluation']
    data_set = {'training': None, 'validation': None, 'evaluation': None}
    data_rate = {
        'training': args.training,
        'validation': args.validation,
        'evaluation': args.evaluation
    }

    tiles = set(tiles_from_csv(path))
    num = len(tiles)
    for data_label in tqdm(label, desc="split", ascii=True):
        out = os.path.join(args.out, 'csv_' + data_label + '.tiles')
        rate = data_rate[data_label]
        for i in data_set.keys():
            if i != data_label:
                tiles, data_set[i] = difference_set(tiles, data_set[i])
        tiles_list = list(tiles)
        tiles_out = random.sample(tiles_list, int(num * rate))
        with open(out, "w") as fp:
            writer = csv.writer(fp)
            writer.writerows(tiles_out)
Exemplo n.º 6
0
def main(args):
    log = Log(os.path.join(args.out, "log"), out=sys.stderr)

    tiles = set(tiles_from_csv(args.cover))
    extension = ""

    for tile in tqdm(tiles, desc="Subset", unit="tiles", ascii=True):

        paths = glob(os.path.join(args.dir, str(tile.z), str(tile.x), "{}.*".format(tile.y)))
        if len(paths) != 1:
            log.log("Warning: {} skipped.".format(tile))
            continue
        src = paths[0]

        try:
            extension = os.path.splitext(src)[1][1:]
            dst = os.path.join(args.out, str(tile.z), str(tile.x), "{}.{}".format(tile.y, extension))
            if not os.path.isdir(os.path.join(args.out, str(tile.z), str(tile.x))):
                os.makedirs(os.path.join(args.out, str(tile.z), str(tile.x)), exist_ok=True)
            if args.move:
                assert os.path.isfile(src)
                shutil.move(src, dst)
            else:
                shutil.copyfile(src, dst)
        except:
            sys.exit("Error: Unable to process {}".format(tile))

    if args.web_ui:
        template = "leaflet.html" if not args.web_ui_template else args.web_ui_template
        web_ui(args.out, args.web_ui, tiles, tiles, extension, template)
def main(args):
    dataset = load_config(args.dataset)

    classes = dataset['common']['classes']
    colors = dataset['common']['colors']
    assert len(classes) == len(colors), 'classes and colors coincide'

    assert len(colors) == 2, 'only binary models supported right now'
    bg = colors[0]
    fg = colors[1]

    os.makedirs(args.out, exist_ok=True)

    # We can only rasterize all tiles at a single zoom.
    assert all(tile.z == args.zoom for tile in tiles_from_csv(args.tiles))

    with open(args.features) as f:
        fc = json.load(f)

    # Find all tiles the features cover and make a map object for quick lookup.
    feature_map = collections.defaultdict(list)
    for i, feature in enumerate(tqdm(fc['features'], ascii=True, unit='feature')):

        if feature['geometry']['type'] != 'Polygon':
            continue

        try:
            for tile in burntiles.burn([feature], zoom=args.zoom):
                feature_map[mercantile.Tile(*tile)].append(feature)
        except ValueError as e:
            print('Warning: invalid feature {}, skipping'.format(i), file=sys.stderr)
            continue

    # Burn features to tiles and write to a slippy map directory.
    for tile in tqdm(list(tiles_from_csv(args.tiles)), ascii=True, unit='tile'):
        if tile in feature_map:
            out = burn(tile, feature_map[tile], args.size)
        else:
            out = Image.fromarray(np.zeros(shape=(args.size, args.size)).astype(int), mode='P')

        palette = make_palette(bg, fg)
        out.putpalette(palette)

        out_path = os.path.join(args.out, str(tile.z), str(tile.x))
        os.makedirs(out_path, exist_ok=True)

        out.save(os.path.join(out_path, '{}.png'.format(tile.y)), optimize=True)
Exemplo n.º 8
0
def main(args):
    tiles = list(tiles_from_csv(args.tiles))

    with requests.Session() as session:
        num_workers = args.rate

        # tqdm has problems with concurrent.futures.ThreadPoolExecutor; explicitly call `.update`
        # https://github.com/tqdm/tqdm/issues/97
        progress = tqdm(total=len(tiles), ascii=True, unit="image")

        with futures.ThreadPoolExecutor(num_workers) as executor:

            def worker(tile):
                tick = time.monotonic()

                x, y, z = map(str, [tile.x, tile.y, tile.z])

                os.makedirs(os.path.join(args.out, z, x), exist_ok=True)
                path = os.path.join(args.out, z, x,
                                    "{}.{}".format(y, args.ext))

                if os.path.isfile(path):
                    return tile, True

                url = args.url.format(x=tile.x, y=tile.y, z=tile.z)

                res = fetch_image(session, url)

                if not res:
                    return tile, False

                try:
                    image = Image.open(res)
                    image.save(path, optimize=True)
                except OSError:
                    return tile, False

                tock = time.monotonic()

                time_for_req = tock - tick
                time_per_worker = num_workers / args.rate

                if time_for_req < time_per_worker:
                    time.sleep(time_per_worker - time_for_req)

                progress.update()

                return tile, True

            for tile, ok in executor.map(worker, tiles):
                if not ok:
                    print("Warning: {} failed, skipping".format(tile),
                          file=sys.stderr)
Exemplo n.º 9
0
def single_burning(args, feature_map, bg, fg):
    # Burn features to tiles and write to a slippy map directory.
    for tile in tqdm(list(tiles_from_csv(args.tiles)), ascii=True,
                     unit="tile"):
        if tile in feature_map:
            index = 0
            for feature in feature_map[tile]:
                out = burn_single(tile, feature, args.size, args.multicolors)
                save_out(out, args, tile, bg, fg, index)
                index = index + 1
        else:
            out = np.zeros(shape=(args.size, args.size), dtype=np.uint8)
            save_out(out, args, tile, bg, fg, 0)
Exemplo n.º 10
0
def main(args):
    images = tiles_from_slippy_map(args.images)

    tiles = set(tiles_from_csv(args.tiles))

    for tile, src in tqdm(list(images), desc="Subset", unit="image", ascii=True):
        if tile not in tiles:
            continue

        # The extention also includes the period.
        extention = os.path.splitext(src)[1]

        os.makedirs(os.path.join(args.out, str(tile.z), str(tile.x)), exist_ok=True)
        dst = os.path.join(args.out, str(tile.z), str(tile.x), "{}{}".format(tile.y, extention))

        shutil.copyfile(src, dst)
Exemplo n.º 11
0
def main(args):
    dataset = load_config(args.dataset)

    classes = dataset["common"]["classes"]
    colors = dataset["common"]["colors"]
    assert len(classes) == len(colors), "classes and colors coincide"

    assert len(colors) == 2, "only binary models supported right now"
    bg = colors[0]
    fg = colors[1]

    os.makedirs(args.out, exist_ok=True)

    # We can only rasterize all tiles at a single zoom.
    assert all(tile.z == args.zoom for tile in tiles_from_csv(args.tiles))

    with open(args.features) as f:
        fc = json.load(f)

    # Find all tiles the features cover and make a map object for quick lookup.
    feature_map = collections.defaultdict(list)
    for i, feature in enumerate(
            tqdm(fc["features"], ascii=True, unit="feature")):

        if feature["geometry"]["type"] != "Polygon":
            continue

        try:
            for tile in burntiles.burn([feature], zoom=args.zoom):
                feature_map[mercantile.Tile(*tile)].append(feature)
        except ValueError as e:
            print("Warning: invalid feature {}, skipping".format(i),
                  file=sys.stderr)
            continue

    single_burning(args, feature_map, bg, fg)
    def test_read_tiles(self):
        filename = 'tests/fixtures/tiles.csv'
        tiles = [tile for tile in tiles_from_csv(filename)]

        self.assertEqual(len(tiles), 3)
        self.assertEqual(tiles[0], mercantile.Tile(69623, 104945, 18))
Exemplo n.º 13
0
def main(args):
    tiles = list(tiles_from_csv(args.tiles))
    already_dl = 0
    dl = 0

    with requests.Session() as session:
        num_workers = args.rate

        os.makedirs(os.path.join(args.out), exist_ok=True)
        log = Log(os.path.join(args.out, "log"), out=sys.stderr)
        log.log("Begin download from {}".format(args.url))

        # tqdm has problems with concurrent.futures.ThreadPoolExecutor; explicitly call `.update`
        # https://github.com/tqdm/tqdm/issues/97
        progress = tqdm(total=len(tiles), ascii=True, unit="image")

        with futures.ThreadPoolExecutor(num_workers) as executor:

            def worker(tile):
                tick = time.monotonic()

                x, y, z = map(str, [tile.x, tile.y, tile.z])

                os.makedirs(os.path.join(args.out, z, x), exist_ok=True)
                path = os.path.join(args.out, z, x, "{}.{}".format(y, args.ext))

                if os.path.isfile(path):
                    return tile, None, True

                if args.type == "XYZ":
                    url = args.url.format(x=tile.x, y=tile.y, z=tile.z)
                elif args.type == "TMS":
                    tile.y = (2 ** tile.z) - tile.y - 1
                    url = args.url.format(x=tile.x, y=tile.y, z=tile.z)
                elif args.type == "WMS":
                    xmin, ymin, xmax, ymax = xy_bounds(tile)
                    url = args.url.format(xmin=xmin, ymin=ymin, xmax=xmax, ymax=ymax)

                res = fetch_image(session, url, args.timeout)
                if not res:
                    return tile, url, False

                try:
                    image = Image.open(res)
                    image.save(path, optimize=True)
                except OSError:
                    return tile, url, False

                tock = time.monotonic()

                time_for_req = tock - tick
                time_per_worker = num_workers / args.rate

                if time_for_req < time_per_worker:
                    time.sleep(time_per_worker - time_for_req)

                progress.update()

                return tile, url, True

            for tile, url, ok in executor.map(worker, tiles):
                if url and ok:
                    dl += 1
                elif not url and ok:
                    already_dl += 1
                else:
                    log.log("Warning:\n {} failed, skipping.\n {}\n".format(tile, url))

    if already_dl:
        log.log("Notice:\n {} tiles were already downloaded previously, and so skipped now.".format(already_dl))
    if already_dl + dl == len(tiles):
        log.log(" Coverage is fully downloaded.")

    if args.web_ui:
        template = "leaflet.html" if not args.web_ui_template else args.web_ui_template
        web_ui(args.out, args.web_ui, tiles, tiles, args.ext, template)
Exemplo n.º 14
0
def main(args):
    config = load_config(args.config)

    classes = config["classes"]["titles"]
    colors = config["classes"]["colors"]
    assert len(classes) == len(colors), "classes and colors coincide"
    assert len(colors) == 2, "only binary models supported right now"

    os.makedirs(args.out, exist_ok=True)

    # We can only rasterize all tiles at a single zoom.
    assert all(tile.z == args.zoom for tile in tiles_from_csv(args.cover))

    # Find all tiles the features cover and make a map object for quick lookup.
    feature_map = collections.defaultdict(list)
    log = Log(os.path.join(args.out, "log"), out=sys.stderr)

    def parse_polygon(feature_map, polygon, i):

        try:
            for i, ring in enumerate(
                    polygon["coordinates"]
            ):  # GeoJSON coordinates could be N dimensionals
                polygon["coordinates"][i] = [[
                    x, y
                ] for point in ring for x, y in zip([point[0]], [point[1]])]

            for tile in burntiles.burn([{
                    "type": "feature",
                    "geometry": polygon
            }],
                                       zoom=args.zoom):
                feature_map[mercantile.Tile(*tile)].append({
                    "type": "feature",
                    "geometry": polygon
                })

        except ValueError as e:
            log.log("Warning: invalid feature {}, skipping".format(i))

        return feature_map

    def parse_geometry(feature_map, geometry, i):

        if geometry["type"] == "Polygon":
            feature_map = parse_polygon(feature_map, geometry, i)

        elif geometry["type"] == "MultiPolygon":
            for polygon in geometry["coordinates"]:
                feature_map = parse_polygon(feature_map, {
                    "type": "Polygon",
                    "coordinates": polygon
                }, i)
        else:
            log.log(
                "Notice: {} is a non surfacic geometry type, skipping feature {}"
                .format(geometry["type"], i))

        return feature_map

    for feature in args.features:
        with open(feature) as f:
            fc = json.load(f)
            for i, feature in enumerate(
                    tqdm(fc["features"], ascii=True, unit="feature")):

                if feature["geometry"]["type"] == "GeometryCollection":
                    for geometry in feature["geometry"]["geometries"]:
                        feature_map = parse_geometry(feature_map, geometry, i)
                else:
                    feature_map = parse_geometry(feature_map,
                                                 feature["geometry"], i)

    # Burn features to tiles and write to a slippy map directory.
    for tile in tqdm(list(tiles_from_csv(args.cover)), ascii=True,
                     unit="tile"):
        if tile in feature_map:
            out = burn(tile, feature_map[tile], args.size)
        else:
            out = np.zeros(shape=(args.size, args.size), dtype=np.uint8)

        out_dir = os.path.join(args.out, str(tile.z), str(tile.x))
        os.makedirs(out_dir, exist_ok=True)

        out_path = os.path.join(out_dir, "{}.png".format(tile.y))

        if os.path.exists(out_path):
            prev = np.array(Image.open(out_path))
            out = np.maximum(out, prev)

        out = Image.fromarray(out, mode="P")

        out_path = os.path.join(args.out, str(tile.z), str(tile.x))
        os.makedirs(out_path, exist_ok=True)

        out.putpalette(
            complementary_palette(make_palette(colors[0], colors[1])))
        out.save(os.path.join(out_path, "{}.png".format(tile.y)),
                 optimize=True)

    if args.web_ui:
        template = "leaflet.html" if not args.web_ui_template else args.web_ui_template
        tiles = [tile for tile in tiles_from_csv(args.cover)]
        web_ui(args.out, args.web_ui, tiles, tiles, "png", template)