def test_index_text(cleantopo_br): # execute process run_cli(["execute", cleantopo_br.path, "-z", "5", "--debug"]) # generate index run_cli(["index", cleantopo_br.path, "-z", "5", "--txt", "--debug"]) with mapchete.open(cleantopo_br.dict) as mp: files = os.listdir(mp.config.output.path) assert "5.txt" in files with open(os.path.join(mp.config.output.path, "5.txt")) as src: lines = list(src) assert len(lines) == 1 for l in lines: assert l.endswith("7.tif\n") # write again and assert there is no new entry because there is already one run_cli(["index", cleantopo_br.path, "-z", "5", "--txt", "--debug"]) with mapchete.open(cleantopo_br.dict) as mp: files = os.listdir(mp.config.output.path) assert "5.txt" in files with open(os.path.join(mp.config.output.path, "5.txt")) as src: lines = list(src) assert len(lines) == 1 for l in lines: assert l.endswith("7.tif\n")
def test_input_data_read(): """Check GeoJSON as input data.""" try: mp = mapchete.open(os.path.join(SCRIPTDIR, "testdata/geojson.mapchete")) for tile in mp.get_process_tiles(): assert isinstance(tile, BufferedTile) input_tile = geojson.InputTile(tile, mp) assert isinstance(input_tile.read(), list) for feature in input_tile.read(): assert isinstance(feature, dict) # reprojected GeoJSON with open(os.path.join(SCRIPTDIR, "testdata/geojson.mapchete")) as src: config = yaml.load(src.read()) config["input_files"].update( file1=os.path.join(TESTDATA_DIR, "landpoly_3857.geojson")) config.update(config_dir=TESTDATA_DIR) mp = mapchete.open(config, mode="readonly") for tile in mp.get_process_tiles(4): assert isinstance(tile, BufferedTile) with mp.config.output.open(tile, mp) as input_tile: # input_tile = geojson.InputTile(tile, mp) assert input_tile.is_empty() in [False] assert isinstance(input_tile.read(), list) for feature in input_tile.read(): assert isinstance(feature, dict) finally: shutil.rmtree(TEMP_DIR, ignore_errors=True)
def test_get_raw_output_readonly(): """Get raw process output using readonly flag.""" try: tile = (5, 0, 0) readonly_mp = mapchete.open(os.path.join( SCRIPTDIR, "testdata/cleantopo_tl.mapchete"), mode="readonly") write_mp = mapchete.open(os.path.join( SCRIPTDIR, "testdata/cleantopo_tl.mapchete"), mode="continue") # read non-existing data (returns empty) out_tile = readonly_mp.get_raw_output(tile) assert out_tile.data.mask.all() # try to process and save empty data try: # TODO readonly_mp.write(readonly_mp.get_raw_output(tile)) raise Exception() except ValueError: pass # actually process and save write_mp.write(write_mp.get_raw_output(tile)) # read written output out_tile = readonly_mp.get_raw_output(tile) assert not out_tile.data.mask.all() finally: shutil.rmtree(OUT_DIR, ignore_errors=True)
def test_no_metadata_json(mp_tmpdir, cleantopo_br_tiledir): """Read raster data.""" # prepare data with pytest.raises(MapcheteDriverError): mapchete.open( dict(cleantopo_br_tiledir.dict, input=dict(file1="tmp/cleantopo_br")))
def test_input_data_read(mp_tmpdir, geojson, landpoly_3857): """Check GeoJSON as input data.""" with mapchete.open(geojson.path) as mp: for tile in mp.get_process_tiles(): assert isinstance(tile, BufferedTile) input_tile = formats.default.geojson.InputTile(tile, mp) assert isinstance(input_tile.read(), list) for feature in input_tile.read(): assert isinstance(feature, dict) # reprojected GeoJSON config = geojson.dict config["input"].update(file1=landpoly_3857) # first, write tiles with mapchete.open(config, mode="overwrite") as mp: for tile in mp.get_process_tiles(4): assert isinstance(tile, BufferedTile) output = mp.get_raw_output(tile) mp.write(tile, output) # then, read output with mapchete.open(config, mode="readonly") as mp: any_data = False for tile in mp.get_process_tiles(4): with mp.config.output.open(tile, mp) as input_tile: if input_tile.is_empty(): continue any_data = True assert isinstance(input_tile.read(), list) for feature in input_tile.read(): assert isinstance(feature, dict) assert any_data
def test_index_text(mp_tmpdir, cleantopo_br): # execute process MapcheteCLI([None, 'execute', cleantopo_br.path, '-z', '5', '--debug']) # generate index MapcheteCLI( [None, 'index', cleantopo_br.path, '-z', '5', '--txt', '--debug']) with mapchete.open(cleantopo_br.dict) as mp: files = os.listdir(mp.config.output.path) assert "5.txt" in files with open(os.path.join(mp.config.output.path, "5.txt")) as src: lines = list(src) assert len(lines) == 1 for l in lines: assert l.endswith("7.tif\n") # write again and assert there is no new entry because there is already one MapcheteCLI( [None, 'index', cleantopo_br.path, '-z', '5', '--txt', '--debug']) with mapchete.open(cleantopo_br.dict) as mp: files = os.listdir(mp.config.output.path) assert "5.txt" in files with open(os.path.join(mp.config.output.path, "5.txt")) as src: lines = list(src) assert len(lines) == 1 for l in lines: assert l.endswith("7.tif\n")
def test_index_gpkg(mp_tmpdir, cleantopo_br): # execute process MapcheteCLI([None, 'execute', cleantopo_br.path, '-z', '5', '--debug']) # generate index MapcheteCLI( [None, 'index', cleantopo_br.path, '-z', '5', '--gpkg', '--debug']) with mapchete.open(cleantopo_br.dict) as mp: files = os.listdir(mp.config.output.path) assert "5.gpkg" in files with fiona.open(os.path.join(mp.config.output.path, "5.gpkg")) as src: for f in src: assert "location" in f["properties"] assert len(list(src)) == 1 # write again and assert there is no new entry because there is already one MapcheteCLI( [None, 'index', cleantopo_br.path, '-z', '5', '--gpkg', '--debug']) with mapchete.open(cleantopo_br.dict) as mp: files = os.listdir(mp.config.output.path) assert "5.gpkg" in files with fiona.open(os.path.join(mp.config.output.path, "5.gpkg")) as src: for f in src: assert "location" in f["properties"] assert len(list(src)) == 1
def main(args=None): """Execute a Mapchete process.""" parsed = args if parsed.input_file and not (os.path.isfile(parsed.input_file) or os.path.isdir(parsed.input_file)): raise IOError("input_file not found") multi = parsed.multi if parsed.multi else cpu_count() mode = "overwrite" if parsed.overwrite else "continue" zoom = parsed.zoom if parsed.zoom else None # process single tile if parsed.tile: with mapchete.open(parsed.mapchete_file, mode=mode, single_input_file=parsed.input_file, debug=parsed.debug) as mp: mp.batch_process(tile=parsed.tile, quiet=parsed.quiet, debug=parsed.debug) # initialize and run process else: with mapchete.open(parsed.mapchete_file, bounds=parsed.bounds, mode=mode, single_input_file=parsed.input_file, debug=parsed.debug) as mp: mp.batch_process(multi=multi, quiet=parsed.quiet, debug=parsed.debug, zoom=zoom)
def test_invalid_input_type(example_mapchete): """Raise MapcheteDriverError.""" # invalid input type config = example_mapchete.dict config.update(input=dict(invalid_type=1)) with pytest.raises(errors.MapcheteConfigError): mapchete.open(config)
def test_convert_raster(cleantopo_tl, cleantopo_tl_tif, landpoly): with mapchete.open( dict(cleantopo_tl.dict, input=dict(inp=cleantopo_tl_tif))) as mp: zoom = max(mp.config.zoom_levels) # execute without clip tile = next(mp.get_process_tiles(zoom)) user_process = mapchete.MapcheteProcess( tile=tile, params=mp.config.params_at_zoom(tile.zoom), input=mp.config.get_inputs_for_tile(tile), ) assert isinstance(convert.execute(user_process), np.ndarray) # execute on empty tile tile = mp.config.process_pyramid.tile( zoom, mp.config.process_pyramid.matrix_height(zoom) - 1, mp.config.process_pyramid.matrix_width(zoom) - 1) user_process = mapchete.MapcheteProcess( tile=tile, params=mp.config.params_at_zoom(tile.zoom), input=mp.config.get_inputs_for_tile(tile), ) assert convert.execute(user_process) == "empty" with mapchete.open( dict(cleantopo_tl.dict, input=dict(inp=cleantopo_tl_tif, clip=landpoly))) as mp: zoom = max(mp.config.zoom_levels) tile = next(mp.get_process_tiles(zoom)) user_process = mapchete.MapcheteProcess( tile=tile, params=mp.config.params_at_zoom(tile.zoom), input=mp.config.get_inputs_for_tile(tile), ) # tile with data default = convert.execute(user_process) assert isinstance(default, np.ndarray) # scale_offset offset = convert.execute(user_process, scale_offset=2) assert isinstance(offset, np.ndarray) # scale_ratio ratio = convert.execute(user_process, scale_ratio=0.5) assert isinstance(ratio, np.ndarray) # clip_to_output_dtype clip_dtype = convert.execute(user_process, scale_ratio=2, clip_to_output_dtype="uint8") assert isinstance(clip_dtype, np.ndarray) # execute on empty tile tile = mp.config.process_pyramid.tile( zoom, mp.config.process_pyramid.matrix_height(zoom) - 1, mp.config.process_pyramid.matrix_width(zoom) - 1) user_process = mapchete.MapcheteProcess( tile=tile, params=mp.config.params_at_zoom(tile.zoom), input=mp.config.get_inputs_for_tile(tile), ) assert convert.execute(user_process) == "empty"
def test_read(example_mapchete): """Mapchete read() errors.""" # in memory mode with mapchete.open(example_mapchete.path, mode="memory") as mp: with pytest.raises(ValueError): mp.read(mp.get_process_tiles()) # wrong tile type with mapchete.open(example_mapchete.path) as mp: with pytest.raises(TypeError): mp.read("invalid")
def test_execute(example_mapchete): """Mapchete execute() errors.""" # in readonly mode with mapchete.open(example_mapchete.path, mode="readonly") as mp: with pytest.raises(ValueError): mp.execute(mp.get_process_tiles()) # wrong tile type with mapchete.open(example_mapchete.path) as mp: with pytest.raises(TypeError): mp.execute("invalid")
def test_output_single_gtiff_errors(output_single_gtiff): # single gtiff does not work on multiple zoom levels with pytest.raises(ValueError): mapchete.open(dict(output_single_gtiff.dict, zoom_levels=[5, 6])) # provide either process_tile or output_tile with mapchete.open(output_single_gtiff.path) as mp: tile = mp.config.process_pyramid.tile(5, 3, 7) with pytest.raises(ValueError): mp.config.output.tiles_exist(process_tile=tile, output_tile=tile)
def test_skip_tiles(mp_tmpdir, cleantopo_tl): """Test batch_process function.""" zoom = 2 with mapchete.open(cleantopo_tl.path, mode="continue") as mp: mp.batch_process(zoom=zoom) for tile, skip in mp.skip_tiles(tiles=mp.get_process_tiles(zoom=zoom)): assert skip with mapchete.open(cleantopo_tl.path, mode="overwrite") as mp: for tile, skip in mp.skip_tiles(tiles=mp.get_process_tiles(zoom=zoom)): assert not skip
def test_custom_grid(mp_tmpdir, custom_grid): """Cutom grid processing.""" # process and save with mapchete.open(custom_grid.dict) as mp: mp.batch_process() # read written output with mapchete.open(custom_grid.dict) as mp: for tile in mp.get_process_tiles(5): data = mp.config.output.read(tile) assert data.any() assert isinstance(data, ma.masked_array) assert not data.mask.all()
def test_contours(cleantopo_tl, cleantopo_tl_tif, landpoly): with mapchete.open( dict(cleantopo_tl.dict, input=dict(dem=cleantopo_tl_tif))) as mp: zoom = max(mp.config.zoom_levels) # execute without clip tile = next(mp.get_process_tiles(zoom)) user_process = mapchete.MapcheteProcess( tile=tile, params=mp.config.params_at_zoom(tile.zoom), input=mp.config.get_inputs_for_tile(tile), ) output = contours.execute(user_process) assert isinstance(output, list) assert output # execute on empty tile tile = mp.config.process_pyramid.tile( zoom, mp.config.process_pyramid.matrix_height(zoom) - 1, mp.config.process_pyramid.matrix_width(zoom) - 1) user_process = mapchete.MapcheteProcess( tile=tile, params=mp.config.params_at_zoom(tile.zoom), input=mp.config.get_inputs_for_tile(tile), ) assert contours.execute(user_process) == "empty" with mapchete.open( dict(cleantopo_tl.dict, input=dict(dem=cleantopo_tl_tif, clip=landpoly))) as mp: zoom = max(mp.config.zoom_levels) tile = next(mp.get_process_tiles(zoom)) user_process = mapchete.MapcheteProcess( tile=tile, params=mp.config.params_at_zoom(tile.zoom), input=mp.config.get_inputs_for_tile(tile), ) output = contours.execute(user_process) assert isinstance(output, list) assert output # execute on empty tile tile = mp.config.process_pyramid.tile( zoom, mp.config.process_pyramid.matrix_height(zoom) - 1, mp.config.process_pyramid.matrix_width(zoom) - 1) user_process = mapchete.MapcheteProcess( tile=tile, params=mp.config.params_at_zoom(tile.zoom), input=mp.config.get_inputs_for_tile(tile), ) assert contours.execute(user_process) == "empty"
def test_parse_bounds(geojson_tiledir): """Read and configure bounds.""" # fall back to pyramid bounds with mapchete.open(geojson_tiledir.dict) as mp: ip = mp.config.params_at_zoom(4)["input"]["file1"] assert ip.bbox().bounds == (-180, -90, 180, 90) # user defined bounds user_bounds = (0, 0, 30, 30) geojson_tiledir.dict["input"]["file1"].update(bounds=user_bounds) with mapchete.open(geojson_tiledir.dict) as mp: ip = mp.config.params_at_zoom(4)["input"]["file1"] assert ip.bbox().bounds == user_bounds # reproject assert ip.bbox(out_crs="3857")
def test_custom_grid(mp_tmpdir, custom_grid): """Cutom grid processing.""" # process and save with mapchete.open(custom_grid.dict) as mp: mp.batch_process() # read written output with mapchete.open(custom_grid.dict, mode="readonly") as mp: for tile in mp.get_process_tiles(5): mp_tile = mapchete.MapcheteProcess( tile, config=mp.config, params=mp.config.params_at_zoom(5)) data = mp_tile.read() assert data.any() assert isinstance(data, ma.masked_array) assert not data.mask.all()
def test_update_baselevels(mp_tmpdir, baselevels): """Baselevel interpolation.""" conf = dict(baselevels.dict) conf.update( zoom_levels=[7, 8], baselevels=dict( min=8, max=8 ) ) baselevel_tile = (8, 125, 260) overview_tile = (7, 62, 130) with mapchete.open(conf, mode="continue") as mp: tile_bounds = mp.config.output_pyramid.tile(*baselevel_tile).bounds # process using bounds of just one baselevel tile with mapchete.open(conf, mode="continue", bounds=tile_bounds) as mp: mp.batch_process() with rasterio.open( mp.config.output.get_path(mp.config.output_pyramid.tile(*overview_tile)) ) as src: overview_before = src.read() assert overview_before.any() # process full area which leaves out overview tile for baselevel tile above with mapchete.open(conf, mode="continue") as mp: mp.batch_process() # delete baselevel tile written_tile = os.path.join(*[ baselevels.dict["config_dir"], baselevels.dict["output"]["path"], *map(str, baselevel_tile), ]) + ".tif" os.remove(written_tile) assert not os.path.exists(written_tile) # run again in continue mode. this processes the missing tile on zoom 5 but overwrites # the tile in zoom 4 with mapchete.open(conf, mode="continue") as mp: # process data before getting baselevels mp.batch_process() with rasterio.open( mp.config.output.get_path(mp.config.output_pyramid.tile(*overview_tile)) ) as src: overview_after = src.read() assert overview_after.any() assert not np.array_equal(overview_before, overview_after)
def test_processing(): """Test correct processing (read and write) outputs.""" for cleantopo_process in [ "testdata/cleantopo_tl.mapchete", "testdata/cleantopo_br.mapchete" ]: mp = mapchete.open(os.path.join(SCRIPTDIR, cleantopo_process)) for zoom in range(6): tiles = [] for tile in mp.get_process_tiles(zoom): output = mp.execute(tile) tiles.append(output) assert isinstance(output, BufferedTile) assert isinstance(output.data, ma.MaskedArray) assert output.data.shape == output.shape assert not ma.all(output.data.mask) mp.write(output) mosaic, mosaic_affine = create_mosaic(tiles) try: temp_vrt = os.path.join(OUT_DIR, str(zoom) + ".vrt") gdalbuildvrt = "gdalbuildvrt %s %s/%s/*/*.tif > /dev/null" % ( temp_vrt, OUT_DIR, zoom) os.system(gdalbuildvrt) with rasterio.open(temp_vrt, "r") as testfile: for file_item, mosaic_item in zip( testfile.meta["transform"], mosaic_affine): assert file_item == mosaic_item band = testfile.read(1, masked=True) assert band.shape == mosaic.shape assert ma.allclose(band, mosaic) assert ma.allclose(band.mask, mosaic.mask) finally: shutil.rmtree(OUT_DIR, ignore_errors=True)
def test_input(): """Parse configuration using "input" instead of "input".""" config = yaml.load( open(os.path.join(SCRIPTDIR, "example.mapchete"), "r").read()) # config["input"] = config.pop("input") config["config_dir"] = SCRIPTDIR assert mapchete.open(config)
def test_output_error(mp_tmpdir, cleantopo_br, output_error_py): """Assert output error is raised.""" config = cleantopo_br.dict config.update(process=output_error_py) with mapchete.open(config) as mp: with pytest.raises(errors.MapcheteProcessOutputError): mp.execute((5, 0, 0))
def test_process_exception(mp_tmpdir, cleantopo_br, process_error_py): """Assert process exception is raised.""" config = cleantopo_br.dict config.update(process=process_error_py) with mapchete.open(config) as mp: with pytest.raises(errors.MapcheteProcessException): mp.execute((5, 0, 0))
def test_input_data(mp_tmpdir, cleantopo_br): """Check GeoTIFF proces output as input data.""" with mapchete.open(cleantopo_br.path) as mp: tp = BufferedTilePyramid("geodetic") # TODO tile with existing but empty data tile = tp.tile(5, 5, 5) output_params = dict(type="geodetic", format="GeoTIFF", path=mp_tmpdir, pixelbuffer=0, metatiling=1, bands=2, dtype="int16") output = gtiff.OutputData(output_params) with output.open(tile, mp, resampling="nearest") as input_tile: assert input_tile.resampling == "nearest" for data in [ input_tile.read(), input_tile.read(1), input_tile.read([1]), # TODO assert valid indexes are passed input_tile.read([1, 2]) ]: assert isinstance(data, ma.masked_array) assert input_tile.is_empty() # open without resampling with output.open(tile, mp) as input_tile: pass
def test_output_data(mp_tmpdir, geojson): """Check GeoJSON as output data.""" output_params = dict(grid="geodetic", format="GeoJSON", path=mp_tmpdir, schema=dict(properties=dict(id="int"), geometry="Polygon"), pixelbuffer=0, metatiling=1) output = formats.default.geojson.OutputDataWriter(output_params) assert output.path == mp_tmpdir assert output.file_extension == ".geojson" assert isinstance(output_params, dict) with mapchete.open(geojson.path) as mp: tile = mp.config.process_pyramid.tile(4, 3, 7) # write empty mp.write(tile, None) # write data raw_output = mp.get_raw_output(tile) mp.write(tile, raw_output) # read data read_output = mp.get_raw_output(tile) assert isinstance(read_output, list) assert len(read_output)
def test_batch_process(): """Test batch_process function.""" mp = mapchete.open( os.path.join(SCRIPTDIR, "testdata/cleantopo_tl.mapchete")) try: # invalid parameters errors try: mp.batch_process(zoom=1, tile=(1, 0, 0)) raise Exception() except ValueError: pass try: mp.batch_process(debug=True, quiet=True) raise Exception() except ValueError: pass # process single tile mp.batch_process(tile=(2, 0, 0)) mp.batch_process(tile=(2, 0, 0), quiet=True) mp.batch_process(tile=(2, 0, 0), debug=True) # process using multiprocessing mp.batch_process(zoom=2, multi=2) # process without multiprocessing mp.batch_process(zoom=2, multi=1) finally: shutil.rmtree(OUT_DIR, ignore_errors=True)
def test_baselevels_buffer_antimeridian(): """Baselevel interpolation using buffers.""" try: mp_config = yaml.load( open(os.path.join(SCRIPTDIR, "testdata/baselevels.mapchete"), "r").read()) mp_config.update(pixelbuffer=10, config_dir=os.path.join(SCRIPTDIR, "testdata"), input_files=None) zoom = 5 row = 0 with mapchete.open(mp_config) as mp: # write data left and right of antimeridian west = mp.config.process_pyramid.tile(zoom, row, 0) shape = (3, ) + west.shape west.data = np.ones(shape) * 0 mp.write(west) east = mp.config.process_pyramid.tile( zoom, row, mp.config.process_pyramid.matrix_width(zoom) - 1) east.data = np.ones(shape) * 10 mp.write(east) # use baselevel generation to interpolate tile and somehow # assert no data from across the antimeridian is read. lower_tile = mp.get_raw_output(west.get_parent()) assert np.where(lower_tile.data.data != 10, True, False).all() finally: shutil.rmtree(OUT_DIR, ignore_errors=True)
def test_baselevels_buffer(): """Baselevel interpolation using buffers.""" try: with open(os.path.join(SCRIPTDIR, "testdata/baselevels.mapchete"), "r") as src: config = yaml.load(src.read()) config.update(pixelbuffer=10, config_dir=os.path.join(SCRIPTDIR, "testdata")) mp = mapchete.open(config, mode="continue") # get tile from lower zoom level lower_tile = mp.get_process_tiles(4).next() # process and save for tile in lower_tile.get_children(): output = mp.get_raw_output(tile) mp.write(output) # read from baselevel out_tile = mp.get_raw_output(lower_tile) assert not out_tile.data.mask.all() # get tile from higher zoom level tile = mp.get_process_tiles(6).next() # process and save output = mp.get_raw_output(tile) mp.write(output) # read from baselevel assert any([ not mp.get_raw_output(upper_tile).data.mask.all() for upper_tile in tile.get_children() ]) finally: shutil.rmtree(OUT_DIR, ignore_errors=True)
def test_baselevels(): """Baselevel interpolation.""" try: mp = mapchete.open(os.path.join(SCRIPTDIR, "testdata/baselevels.mapchete"), mode="continue") # process data before getting baselevels mp.batch_process(quiet=True) # get tile from lower zoom level for t in mp.get_process_tiles(4): tile = mp.get_raw_output(t) assert not tile.data.mask.all() # write for next zoom level mp.write(tile) assert not mp.get_raw_output(tile.get_parent()).data.mask.all() # get tile from higher zoom level tile = mp.get_process_tiles(6).next() # process and save output = mp.get_raw_output(tile) mp.write(output) # read from baselevel assert any([ not mp.get_raw_output(upper_tile).data.mask.all() for upper_tile in tile.get_children() ]) finally: shutil.rmtree(OUT_DIR, ignore_errors=True)
def test_read_raster_window_input_list(cleantopo_br): process_zoom = 5 conf = dict(**cleantopo_br.dict) conf["output"].update(metatiling=1) with mapchete.open(conf) as mp: mp.batch_process(process_zoom) tiles = [(tile, mp.config.output.get_path(tile)) for tile in mp.config.output_pyramid.tiles_from_bounds( mp.config.bounds, process_zoom) if path_exists(mp.config.output.get_path(tile))] upper_tile = next(mp.get_process_tiles(process_zoom - 1)) assert len(tiles) > 1 resampled = resample_from_array(in_raster=create_mosaic([ (tile, read_raster_window(path, tile)) for tile, path in tiles ]), out_tile=upper_tile) resampled2 = read_raster_window([p for _, p in tiles], upper_tile, src_nodata=0, dst_nodata=0) assert resampled.dtype == resampled2.dtype assert resampled.shape == resampled2.shape assert np.array_equal(resampled.mask, resampled2.mask) # TODO slight rounding errors occur assert np.allclose(resampled, resampled2, rtol=0.01)