def gen_indexes_and_check(): # generate indexes list(zoom_index_gen( mp=mp, zoom=zoom, out_dir=mp.config.output.path, geojson=True, txt=True, vrt=True )) # assert GeoJSON exists with fiona.open(os.path.join(mp.config.output.path, "%s.geojson" % zoom)) as src: assert len(src) == 2 # assert TXT exists txt_index = os.path.join(mp.config.output.path, "%s.txt" % zoom) bucket = get_boto3_bucket(txt_index.split("/")[2]) key = "/".join(txt_index.split("/")[3:]) for obj in bucket.objects.filter(Prefix=key): if obj.key == key: content = obj.get()['Body'].read().decode() assert len([l + '\n' for l in content.split('\n') if l]) == 2 # assert VRT exists with rasterio.open(os.path.join(mp.config.output.path, "%s.vrt" % zoom)) as src: assert src.read().any()
def _process_single_tile(debug=None, raw_conf_process_pyramid=None, mapchete_config=None, tile=None, mode=None, input_file=None, verbose_dst=None, vrt=None, idx_out_dir=None, no_pbar=None): with click_spinner.spinner(disable=debug) as spinner: with mapchete.Timer() as t: tile = raw_conf_process_pyramid( raw_conf(mapchete_config)).tile(*tile) with mapchete.open(mapchete_config, mode=mode, bounds=tile.bounds, zoom=tile.zoom, single_input_file=input_file) as mp: spinner.stop() tqdm.tqdm.write("processing 1 tile", file=verbose_dst) # run process on tile for result in mp.batch_processor(tile=tile): write_verbose_msg(result, dst=verbose_dst) tqdm.tqdm.write( ("processing %s finished in %s" % (mapchete_config, t) if isinstance(mapchete_config, str) else "processing finished in %s" % t), file=verbose_dst) # write VRT index if vrt: with mapchete.Timer() as t_vrt: tqdm.tqdm.write("creating VRT", file=verbose_dst) for tile in tqdm.tqdm(zoom_index_gen( mp=mp, zoom=tile.zoom, out_dir=idx_out_dir or mp.config.output.path, vrt=vrt, ), total=mp.count_tiles( tile.zoom, tile.zoom), unit="tile", disable=debug or no_pbar): logger.debug("%s indexed", tile) tqdm.tqdm.write(("VRT(s) for %s created in %s" % (mapchete_config, t_vrt) if isinstance( mapchete_config, str) else "VRT(s) created in %s" % t_vrt), file=verbose_dst)
def _process_area( debug=None, mapchete_config=None, mode=None, zoom=None, wkt_geometry=None, point=None, bounds=None, input_file=None, multi=None, verbose_dst=None, max_chunksize=None, no_pbar=None, vrt=None, idx_out_dir=None, ): multi = multi or cpu_count() with click_spinner.spinner(disable=debug) as spinner: with mapchete.Timer() as t: with mapchete.open( mapchete_config, mode=mode, zoom=zoom, bounds=bounds_from_opts( wkt_geometry=wkt_geometry, point=point, bounds=bounds, raw_conf=raw_conf(mapchete_config) ), single_input_file=input_file ) as mp: spinner.stop() tiles_count = mp.count_tiles( min(mp.config.init_zoom_levels), max(mp.config.init_zoom_levels) ) tqdm.tqdm.write( "processing %s tile(s) on %s worker(s)" % (tiles_count, multi), file=verbose_dst ) # run process on tiles for process_info in tqdm.tqdm( mp.batch_processor( multi=multi, zoom=zoom, max_chunksize=max_chunksize ), total=tiles_count, unit="tile", disable=debug or no_pbar ): write_verbose_msg(process_info, dst=verbose_dst) tqdm.tqdm.write( ( "processing %s finished in %s" % (mapchete_config, t) if isinstance(mapchete_config, str) else "processing finished in %s" % t ), file=verbose_dst ) # write VRT index if vrt: with mapchete.Timer() as t_vrt: tqdm.tqdm.write("creating VRT(s)", file=verbose_dst) for tile in tqdm.tqdm( zoom_index_gen( mp=mp, zoom=mp.config.init_zoom_levels, out_dir=idx_out_dir or mp.config.output.path, vrt=vrt ), total=mp.count_tiles( min(mp.config.init_zoom_levels), max(mp.config.init_zoom_levels) ), unit="tile", disable=debug or no_pbar ): logger.debug("%s indexed", tile) tqdm.tqdm.write( ( "VRT(s) for %s created in %s" % (mapchete_config, t_vrt) if isinstance(mapchete_config, str) else "VRT(s) created in %s" % t_vrt ), file=verbose_dst )
def test_vrt(mp_tmpdir, cleantopo_br): zoom = 8 with mapchete.open(dict(cleantopo_br.dict, zoom_levels=dict(min=0, max=zoom))) as mp: # generate output mp.batch_process(zoom=zoom) # generate index list(zoom_index_gen( mp=mp, zoom=zoom, out_dir=mp.config.output.path, vrt=True, )) output_tiles = list( mp.config.output_pyramid.tiles_from_bounds( mp.config.bounds_at_zoom(zoom=zoom), zoom=zoom ) ) bounds = ( min([t.left for t in output_tiles]), min([t.bottom for t in output_tiles]), max([t.right for t in output_tiles]), max([t.top for t in output_tiles]), ) # bounds = mp.config.effective_bounds vrt_index = os.path.join(mp.config.output.path, "%s.vrt" % zoom) with rasterio.open(vrt_index) as vrt: assert vrt.driver == "VRT" assert vrt.dtypes[0] == "uint16" assert vrt.meta["dtype"] == "uint16" assert vrt.count == 1 assert vrt.nodata == 0 assert vrt.bounds == bounds vrt_data = vrt.read() assert vrt_data.any() # generate a VRT using GDAL and compare out_dir = os.path.join(mp_tmpdir, "cleantopo_br") temp_vrt = os.path.join(out_dir, str(zoom)+"_gdal.vrt") gdalbuildvrt = "gdalbuildvrt %s %s/%s/*/*.tif > /dev/null" % (temp_vrt, out_dir, zoom) os.system(gdalbuildvrt) with rasterio.open(temp_vrt, "r") as gdal_vrt: assert gdal_vrt.dtypes[0] == "uint16" assert gdal_vrt.meta["dtype"] == "uint16" assert gdal_vrt.count == 1 assert gdal_vrt.nodata == 0 assert gdal_vrt.bounds == bounds gdal_vrt_data = gdal_vrt.read() assert np.array_equal(vrt_data, gdal_vrt_data) # make sure handling an existing VRT works with mapchete.open(dict(cleantopo_br.dict, zoom_levels=dict(min=0, max=zoom))) as mp: # generate output mp.batch_process(zoom=zoom) # generate index list(zoom_index_gen( mp=mp, zoom=zoom, out_dir=mp.config.output.path, vrt=True, ))
def index(inputs, idx_out_dir=None, geojson=False, gpkg=False, shp=False, vrt=False, txt=False, fieldname=None, basepath=None, for_gdal=False, zoom=None, bounds=None, bounds_crs=None, area=None, area_crs=None, point=None, point_crs=None, wkt_geometry=None, tile=None, username=None, password=None, verbose=False, no_pbar=False, debug=False, logfile=None): if not any([geojson, gpkg, shp, txt, vrt]): raise click.MissingParameter( """At least one of '--geojson', '--gpkg', '--shp', '--vrt' or '--txt'""" """must be provided.""", param_type="option") # send verbose messages to /dev/null if not activated verbose_dst = open(os.devnull, 'w') if debug or not verbose else sys.stdout for input_ in inputs: tqdm.tqdm.write("create index(es) for %s" % input_, file=verbose_dst) with click_spinner.spinner(disable=debug) as spinner: # process single tile if tile: with mapchete.open(input_, mode="readonly", username=username, password=password) as mp: spinner.stop() tile = mp.config.process_pyramid.tile(*tile) for tile in tqdm.tqdm( zoom_index_gen(mp=mp, zoom=tile.zoom, out_dir=idx_out_dir if idx_out_dir else mp.config.output.path, geojson=geojson, gpkg=gpkg, shapefile=shp, vrt=vrt, txt=txt, fieldname=fieldname, basepath=basepath, for_gdal=for_gdal), total=mp.count_tiles(tile.zoom, tile.zoom), unit="tile", disable=debug or no_pbar): logger.debug("%s indexed", tile) # process area else: with mapchete.open(input_, mode="readonly", zoom=zoom, wkt_geometry=wkt_geometry, point=point, point_crs=point_crs, bounds=bounds, bounds_crs=bounds_crs, area=area, area_crs=area_crs, username=username, password=password) as mp: spinner.stop() logger.debug("process bounds: %s", mp.config.init_bounds) logger.debug("process area: %s", mp.config.init_area) logger.debug("process zooms: %s", mp.config.init_zoom_levels) logger.debug("fieldname: %s", fieldname) for tile in tqdm.tqdm(zoom_index_gen( mp=mp, zoom=mp.config.init_zoom_levels, out_dir=(idx_out_dir if idx_out_dir else mp.config.output.path), geojson=geojson, gpkg=gpkg, shapefile=shp, vrt=vrt, txt=txt, fieldname=fieldname, basepath=basepath, for_gdal=for_gdal), total=mp.count_tiles( min(mp.config.init_zoom_levels), max(mp.config.init_zoom_levels)), unit="tile", disable=debug or no_pbar): logger.debug("%s indexed", tile) tqdm.tqdm.write("index(es) creation for %s finished" % input_, file=verbose_dst)
def index(args): if args.debug: logging.getLogger("mapchete").setLevel(logging.DEBUG) stream_handler.setLevel(logging.DEBUG) if not any([args.geojson, args.gpkg, args.shp, args.txt]): raise ValueError( "one of 'geojson', 'gpkg', 'shp', or 'txt' must be provided") # process single tile if args.tile: conf = _map_to_new_config( yaml.load(open(args.mapchete_file, "r").read())) tile = BufferedTilePyramid( conf["pyramid"]["grid"], metatiling=conf["pyramid"].get("metatiling", 1), pixelbuffer=conf["pyramid"].get("pixelbuffer", 0)).tile(*args.tile) with mapchete.open(args.mapchete_file, mode="readonly", bounds=tile.bounds, zoom=tile.zoom) as mp: out_dir = args.out_dir if args.out_dir else mp.config.output.path for tile in tqdm.tqdm(zoom_index_gen(mp=mp, zoom=tile.zoom, out_dir=out_dir, geojson=args.geojson, gpkg=args.gpkg, shapefile=args.shp, txt=args.txt, fieldname=args.fieldname, basepath=args.basepath, for_gdal=args.for_gdal), total=mp.count_tiles(tile.zoom, tile.zoom), unit="tile", disable=args.debug): logger.debug(tile) else: if args.wkt_geometry: bounds = wkt.loads(args.wkt_geometry).bounds else: bounds = args.bounds with mapchete.open(args.mapchete_file, mode="readonly", zoom=args.zoom, bounds=bounds) as mp: out_dir = args.out_dir if args.out_dir else mp.config.output.path logger.debug("process bounds: %s", mp.config.init_bounds) logger.debug("process zooms: %s", mp.config.init_zoom_levels) logger.debug("fieldname: %s", args.fieldname) for z in mp.config.init_zoom_levels: logger.debug("zoom %s", z) for tile in tqdm.tqdm(zoom_index_gen(mp=mp, zoom=z, out_dir=out_dir, geojson=args.geojson, gpkg=args.gpkg, shapefile=args.shp, txt=args.txt, fieldname=args.fieldname, basepath=args.basepath, for_gdal=args.for_gdal), total=mp.count_tiles(z, z), unit="tile", disable=args.debug): logger.debug(tile)
def index( mapchete_files, out_dir=None, geojson=False, gpkg=False, shp=False, txt=False, fieldname=None, basepath=None, for_gdal=False, zoom=None, bounds=None, wkt_geometry=None, tile=None, verbose=False, debug=False, logfile=None ): if not any([geojson, gpkg, shp, txt]): raise click.MissingParameter( "At least one of '--geojson', '--gpkg', '--shp', or '--txt' must be provided.", param_type="option" ) # send verbose output to /dev/null if not activated verbose_dst = open(os.devnull, 'w') if debug or not verbose else sys.stdout for mapchete_file in mapchete_files: tqdm.tqdm.write("create index for %s" % mapchete_file, file=verbose_dst) # process single tile if tile: conf = _map_to_new_config( yaml.load(open(mapchete_file, "r").read())) tile = BufferedTilePyramid( conf["pyramid"]["grid"], metatiling=conf["pyramid"].get("metatiling", 1), pixelbuffer=conf["pyramid"].get("pixelbuffer", 0) ).tile(*tile) with mapchete.open( mapchete_file, mode="readonly", bounds=tile.bounds, zoom=tile.zoom ) as mp: out_dir = out_dir if out_dir else mp.config.output.path for tile in tqdm.tqdm( zoom_index_gen( mp=mp, zoom=tile.zoom, out_dir=out_dir, geojson=geojson, gpkg=gpkg, shapefile=shp, txt=txt, fieldname=fieldname, basepath=basepath, for_gdal=for_gdal), total=mp.count_tiles(tile.zoom, tile.zoom), unit="tile", disable=debug ): logger.debug(tile) else: with mapchete.open( mapchete_file, mode="readonly", zoom=zoom, bounds=wkt.loads(wkt_geometry).bounds if wkt_geometry else bounds ) as mp: out_dir = out_dir if out_dir else mp.config.output.path logger.debug("process bounds: %s", mp.config.init_bounds) logger.debug("process zooms: %s", mp.config.init_zoom_levels) logger.debug("fieldname: %s", fieldname) for z in mp.config.init_zoom_levels: logger.debug("zoom %s", z) for tile in tqdm.tqdm( zoom_index_gen( mp=mp, zoom=z, out_dir=out_dir, geojson=geojson, gpkg=gpkg, shapefile=shp, txt=txt, fieldname=fieldname, basepath=basepath, for_gdal=for_gdal), total=mp.count_tiles(z, z), unit="tile", disable=debug ): logger.debug(tile) tqdm.tqdm.write("index creation finished", file=verbose_dst)