def basetest_buffer_ext(input_path, output_path): ### Init ### layerinfo_input = gfo.get_layerinfo(input_path) assert layerinfo_input.crs is not None distance = 1 if layerinfo_input.crs.is_projected is False: # 1 degree = 111 km or 111000 m distance /= 111000 ### Check if columns parameter works (case insensitive) ### columns = ['OIDN', 'uidn', 'HFDTLT', 'lblhfdtlt', 'GEWASGROEP', 'lengte', 'OPPERVL'] gfo.buffer( input_path=input_path, columns=columns, output_path=output_path, distance=distance, nb_parallel=get_nb_parallel()) # Now check if the tmp file is correctly created layerinfo_input = gfo.get_layerinfo(input_path) layerinfo_output = gfo.get_layerinfo(output_path) assert layerinfo_input.featurecount == layerinfo_output.featurecount assert 'OIDN' in layerinfo_output.columns assert 'UIDN' in layerinfo_output.columns assert len(layerinfo_output.columns) == len(columns) # Read result for some more detailed checks output_gdf = gfo.read_file(output_path) assert output_gdf['geometry'][0] is not None area_default_buffer = sum(output_gdf.area)
def basetest_buffer_ext(input_path, output_path): ### Init ### layerinfo_input = gfo.get_layerinfo(input_path) assert layerinfo_input.crs is not None distance = 1 if layerinfo_input.crs.is_projected is False: # 1 degree = 111 km or 111000 m distance /= 111000 ### Check if columns parameter works (case insensitive) ### columns = [ 'OIDN', 'uidn', 'HFDTLT', 'lblhfdtlt', 'GEWASGROEP', 'lengte', 'OPPERVL' ] gfo.buffer(input_path=input_path, columns=columns, output_path=output_path, distance=distance, nb_parallel=get_nb_parallel()) # Now check if the tmp file is correctly created layerinfo_input = gfo.get_layerinfo(input_path) layerinfo_output = gfo.get_layerinfo(output_path) assert layerinfo_input.featurecount == layerinfo_output.featurecount assert 'OIDN' in layerinfo_output.columns assert 'UIDN' in layerinfo_output.columns assert len(layerinfo_output.columns) == len(columns) # Read result for some more detailed checks output_gdf = gfo.read_file(output_path) assert output_gdf['geometry'][0] is not None area_default_buffer = sum(output_gdf.area) ### Test polygon buffer with square endcaps ### output_path = output_path.parent / f"{output_path.stem}_endcap_join{output_path.suffix}" gfo.buffer(input_path=input_path, output_path=output_path, distance=distance, endcap_style=geometry_util.BufferEndCapStyle.SQUARE, join_style=geometry_util.BufferJoinStyle.MITRE, nb_parallel=get_nb_parallel()) # Now check if the output file is correctly created assert output_path.exists() == True layerinfo_output = gfo.get_layerinfo(output_path) assert layerinfo_input.featurecount == layerinfo_output.featurecount assert len(layerinfo_output.columns) == len(layerinfo_input.columns) assert layerinfo_output.geometrytype == GeometryType.MULTIPOLYGON # Read result for some more detailed checks output_gdf = gfo.read_file(output_path) assert output_gdf['geometry'][0] is not None area_square_buffer = sum(output_gdf.area) assert area_square_buffer > area_default_buffer
def benchmark_buffer( input_path: Path, tmpdir: Path) -> List[RunResult]: # Init if input_path.exists() is False: raise Exception(f"input_path doesn't exist: {input_path}") results = [] # Go! print("buffer: start") start_time = datetime.now() output_path = tmpdir / f"{input_path.stem}_buf.gpkg" gfo.buffer(input_path, output_path, distance=1, force=True) secs_taken = (datetime.now()-start_time).total_seconds() results.append(geofileops_RunResult( operation="buffer", operation_descr="buffer agri parcels BEFL (~500.000 polygons)", secs_taken=secs_taken, run_details={"nb_cpu": multiprocessing.cpu_count()})) print(f"buffer: ready in {secs_taken:.2f} secs") print("buffer with gpd: start") start_time = datetime.now() output_path = tmpdir / f"{input_path.stem}_buf_gpd.gpkg" geofileops_gpd.buffer(input_path, output_path, distance=1, force=True) secs_taken = (datetime.now()-start_time).total_seconds() results.append(geofileops_RunResult( operation="buffer_gpd", operation_descr="buffer agri parcels BEFL (~500.000 polygons), using gpd", secs_taken=secs_taken, run_details={"nb_cpu": multiprocessing.cpu_count()})) print(f"buffer with gpd: ready in {secs_taken:.2f} secs") print("buffer with spatialite: start") start_time = datetime.now() output_path = tmpdir / f"{input_path.stem}_buf_sql.gpkg" geofileops_sql.buffer(input_path, output_path, distance=1, force=True) secs_taken = (datetime.now()-start_time).total_seconds() results.append(geofileops_RunResult( operation="buffer_spatialite", operation_descr="buffer agri parcels BEFL (~500.000 polygons), using spatialite", secs_taken=secs_taken, run_details={"nb_cpu": multiprocessing.cpu_count()})) print(f"buffer with spatialite: ready in {secs_taken:.2f} secs") return results
def basetest_buffer(input_path: Path, output_path: Path, input_geometry_type: GeometryType): ### Init ### layerinfo_input = gfo.get_layerinfo(input_path) assert layerinfo_input.crs is not None distance = 1 if layerinfo_input.crs.is_projected is False: # 1 degree = 111 km or 111000 m distance /= 111000 ### Test positive buffer ### gfo.buffer(input_path=input_path, output_path=output_path, distance=distance, nb_parallel=get_nb_parallel(), batchsize=get_batchsize()) # Now check if the output file is correctly created assert output_path.exists() == True layerinfo_output = gfo.get_layerinfo(output_path) assert layerinfo_input.featurecount == layerinfo_output.featurecount assert len(layerinfo_output.columns) == len(layerinfo_input.columns) # Check geometry type assert layerinfo_output.geometrytype == GeometryType.MULTIPOLYGON # Read result for some more detailed checks output_gdf = gfo.read_file(output_path) assert output_gdf['geometry'][0] is not None ### Test buffer to existing output path ### assert output_path.exists() is True mtime_orig = output_path.stat().st_mtime gfo.buffer(input_path=input_path, output_path=output_path, distance=distance, nb_parallel=get_nb_parallel()) assert output_path.stat().st_mtime == mtime_orig # With force=True gfo.buffer(input_path=input_path, output_path=output_path, distance=distance, nb_parallel=get_nb_parallel(), force=True) assert output_path.stat().st_mtime != mtime_orig ### Test negative buffer ### distance = -10 if layerinfo_input.crs.is_projected is False: # 1 degree = 111 km or 111000 m distance /= 111000 output_path = output_path.parent / f"{output_path.stem}_m10m{output_path.suffix}" gfo.buffer(input_path=input_path, output_path=output_path, distance=distance, nb_parallel=get_nb_parallel()) # Now check if the output file is correctly created if input_geometry_type in [ GeometryType.MULTIPOINT, GeometryType.MULTILINESTRING ]: # A Negative buffer of points or linestrings doesn't give a result. assert output_path.exists() == False else: # A Negative buffer of polygons gives a result for large polygons. assert output_path.exists() == True layerinfo_output = gfo.get_layerinfo(output_path) assert len(layerinfo_output.columns) == len(layerinfo_input.columns) if layerinfo_input.crs.is_projected is True: # 7 polygons disappear because of the negative buffer assert layerinfo_output.featurecount == layerinfo_input.featurecount - 7 else: assert layerinfo_output.featurecount == layerinfo_input.featurecount - 4 # Check geometry type assert layerinfo_output.geometrytype == GeometryType.MULTIPOLYGON # Read result for some more detailed checks output_gdf = gfo.read_file(output_path) assert output_gdf['geometry'][0] is not None ### Test negative buffer with explodecollections ### output_path = output_path.parent / f"{output_path.stem}_m10m_explode{output_path.suffix}" gfo.buffer(input_path=input_path, output_path=output_path, distance=distance, explodecollections=True, nb_parallel=get_nb_parallel()) # Now check if the output file is correctly created if input_geometry_type in [ GeometryType.MULTIPOINT, GeometryType.MULTILINESTRING ]: # A Negative buffer of points or linestrings doesn't give a result. assert output_path.exists() == False else: # A Negative buffer of polygons gives a result for large polygons assert output_path.exists() == True layerinfo_output = gfo.get_layerinfo(output_path) assert len(layerinfo_output.columns) == len(layerinfo_input.columns) if layerinfo_input.crs.is_projected is True: # 6 polygons disappear because of the negative buffer, 3 polygons are # split in 2 because of the negative buffer and/or explodecollections=True. assert layerinfo_output.featurecount == layerinfo_input.featurecount - 7 + 3 else: assert layerinfo_output.featurecount == layerinfo_input.featurecount - 3 + 3 # Check geometry type assert layerinfo_output.geometrytype == GeometryType.MULTIPOLYGON # Read result for some more detailed checks output_gdf = gfo.read_file(output_path) assert output_gdf['geometry'][0] is not None
# Go! logger.info(gfo.get_layerinfo(input_path)) logger.info("Start dissolve buildings") buildings_diss_path = str(tempdir / f"{output_basename}_diss.gpkg") gfo.dissolve(input_path=input_path, tiles_path=tiles_path, output_path=buildings_diss_path, explodecollections=True, clip_on_tiles=True) logger.info("Ready dissolve buildings") logger.info("Start buffer 50m") buildings_diss_buf50m_path = str(tempdir / f"{output_basename}_diss_buf50m.gpkg") gfo.buffer(input_path=buildings_diss_path, output_path=buildings_diss_buf50m_path, distance=50) logger.info("Ready buffer 50m") logger.info("Start dissolve buffer 50m") buildings_diss_buf50m_diss_path = str( output_dir / f"{output_basename}_diss_buf50m_diss.gpkg") gfo.dissolve(input_path=buildings_diss_buf50m_path, tiles_path=tiles_path, output_path=buildings_diss_buf50m_diss_path, explodecollections=True, clip_on_tiles=True) logger.info("Ready dissolve buffer 50m") logger.info("Start buffer 100m") buildings_diss_buf100m_path = str(tempdir /