def _mesh_interpolate_worker(args): coords, raster, crs, chunk_size = args raster = Raster(raster) raster.warp(crs) results = [] for window in raster.iter_windows(chunk_size=chunk_size, overlap=2): xi = raster.get_x(window) yi = raster.get_y(window) zi = raster.get_values(window=window) f = RectBivariateSpline( xi, np.flip(yi), np.fliplr(zi).T, bbox=[np.min(xi), np.max(xi), np.min(yi), np.max(yi)], kx=3, ky=3, s=0) idxs = np.where( np.logical_and( np.logical_and( np.min(xi) < coords[:, 0], np.max(xi) > coords[:, 0]), np.logical_and( np.min(yi) < coords[:, 1], np.max(yi) > coords[:, 1])))[0] values = f.ev(coords[idxs, 0], coords[idxs, 1]) results.append((idxs, values)) return results
def _geom_raster_processing_worker( raster_path, raster_opts, zmin, zmax, join_method, driver, chunk_size, overlap, ): raster = Raster(raster_path) _apply_raster_opts(raster, raster_opts) geom = Geom(raster.get_multipolygon(zmin=zmin, zmax=zmax), join_method=join_method, driver=driver, nprocs=1) return geom
def _main_serial(self): if self._args.use_anti_aliasing is True: self._initial_values = self.mesh.values.copy() for tile in (tqdm(self._rasters) if self._args.verbose else self._rasters): raster = Raster(tile) self.mesh.interpolate(raster) if self._args.use_anti_aliasing is True: self._resolve_aliasing(raster)
def _put_raster_in_cache(self, url, tmpfile): print('debug:_put_raster_in_cache()') datadir = self._cache / 'data' datadir.mkdir(exist_ok=True) target_path = datadir / url.split('/')[-1] if not target_path.is_file(): shutil.copyfile(tmpfile.name, target_path) # self._validate_raster_local( # Raster(target_path), tmpraster.md5 # os.copyfile() # tgtraster.save(target_path) raster = Raster(target_path) bbox = raster.bbox # pylint: disable=no-member geom = box(bbox.xmin, bbox.ymin, bbox.xmax, bbox.ymax) geom = transform_polygon(geom, raster.crs, 4326) # md5 = raster.md5 self._session.add( db.TileIndexRasters(geom=from_shape(geom, srid=4326), url=url, name=target_path.name, md5=raster.md5)) self._session.commit()
def get_hfun(self, geom=None): self._logger.debug('get_hfun()') if 'rasters' in self._hfun: for id, hfun_raster_opts in self._hfun['rasters'].items(): self._logger.debug(f'get_hfun(): processsing group id={id}') for raster_path, raster_opts in self._get_raster_by_id(id): self._logger.debug( f'get_hfun(): appending raster {raster_path} for ' 'parallel processing.') raster = Raster(raster_path, crs=raster_opts.get('crs')) _apply_raster_opts(raster, raster_opts) hfun = Hfun(raster, hmin=self._hfun_hmin, hmax=self._hfun_hmax, nprocs=self._hfun_nprocs) _apply_hfun_raster_opts(hfun, hfun_raster_opts) mesh = hfun.get_mesh(geom=geom) if 'features' in self._hfun: raise NotImplementedError("config.hfun.features not implemented") return Hfun(mesh, crs=self._crs)
def run(self, args): # Get inputs mesh_file = args.mesh mesh_crs = args.mesh_crs shape_path = args.shape refine_upstream = args.upstream refine_factor = args.factor refine_cutoff = args.cutoff contours = args.contours patches = args.patches constants = args.constants sieve = args.sieve interp = args.interpolate out_path = args.output out_format = args.output_format nprocs = args.nprocs # Process inputs contour_defns = [] for contour in contours: if len(contour) > 3: raise ValueError( "Invalid format for contour specification." " It should be level [expansion target-size].") level, expansion_rate, target_size = [ *contour, *[None] * (3 - len(contour)) ] contour_defns.append((level, expansion_rate, target_size)) patch_defns = [] for lower_bound, target_size in patches: patch_defns.append((lower_bound, expansion_rate, target_size)) constant_defns = [] for lower_bound, target_size in constants: constant_defns.append((lower_bound, target_size)) interp_rast_list = [] for dem in interp: interp_rast_list.append(Raster(dem)) mesh = Mesh.open(mesh_file, crs=mesh_crs) geom = Geom(deepcopy(mesh)) geom_jig = geom.msh_t() initial_hfun = Hfun(deepcopy(mesh)) initial_hfun.size_from_mesh() # DO NOT DEEPCOPY initial_hfun_jig = initial_hfun.msh_t() ref_crs = initial_hfun_jig.crs utils.reproject(geom_jig, ref_crs) init_jig = None # If there's an input shape, refine in the shape, otherwise # refine the whole domain by the factor if shape_path or refine_cutoff is not None: mesh_poly = mesh.hull.multipolygon() gdf_mesh_poly = gpd.GeoDataFrame(geometry=gpd.GeoSeries(mesh_poly), crs=mesh.crs) if shape_path: gdf_shape = gpd.read_file(shape_path) gdf_shape = gdf_shape.to_crs(mesh.crs) gdf_to_refine = gpd.overlay(gdf_mesh_poly, gdf_shape, how='intersection') gdf_diff = gpd.overlay(gdf_mesh_poly, gdf_shape, how='difference') diff_polys = [] for geom in gdf_diff.geometry: if isinstance(geom, Polygon): diff_polys.append(geom) elif isinstance(geom, MultiPolygon): diff_polys.extend(geom) if refine_upstream: # TODO: Check for multipolygon and single polygon in multi assumption area_ref = 0.05 * np.sum( [i.area for i in gdf_to_refine.geometry]) upstream_polys = [] for ipoly in diff_polys: if ipoly.area < area_ref: upstream_polys.append(ipoly) if upstream_polys: gdf_upstream = gpd.GeoDataFrame( geometry=gpd.GeoSeries(upstream_polys), crs=gdf_diff.crs) gdf_to_refine = gpd.overlay(gdf_upstream, gdf_to_refine, how='union') else: gdf_to_refine = gdf_mesh_poly gdf_to_refine = gdf_to_refine.to_crs(ref_crs) if refine_cutoff is not None: cutoff_mp = mesh.get_multipolygon(zmin=refine_cutoff) cutoff_gdf = gpd.GeoDataFrame( geometry=gpd.GeoSeries(cutoff_mp), crs=mesh.crs) cutoff_gdf = cutoff_gdf.to_crs(ref_crs) gdf_to_refine = gpd.overlay(gdf_to_refine, cutoff_gdf, how='intersection') refine_polys = gdf_to_refine.unary_union # Initial mesh for the refinement (all except refinement area) init_jig = deepcopy(mesh.msh_t) utils.reproject(init_jig, ref_crs) utils.clip_mesh_by_shape(init_jig, refine_polys, fit_inside=True, inverse=True, in_place=True) # Fix elements in the inital mesh that are NOT clipped by refine # polygon init_jig.vert2['IDtag'][:] = -1 # Preparing refinement size function vert_in = utils.get_verts_in_shape(initial_hfun_jig, refine_polys) # Reduce hfun by factor in refinement area; modifying in-place refine_hfun_jig = utils.clip_mesh_by_shape(initial_hfun_jig, refine_polys, fit_inside=False) utils.clip_mesh_by_shape(initial_hfun_jig, refine_polys, fit_inside=True, inverse=True, in_place=True) else: # Refine the whole domain by factor refine_hfun_jig = deepcopy(initial_hfun_jig) # Prepare refinement size function with additional criteria refine_hfun_jig.value[:] = refine_hfun_jig.value / refine_factor hfun_refine = Hfun(Mesh(deepcopy(refine_hfun_jig))) transformer = Transformer.from_crs(mesh.crs, ref_crs, always_xy=True) for level, expansion_rate, target_size in contour_defns: if expansion_rate is None: expansion_rate = 0.1 if target_size is None: target_size = np.min(refine_hfun_jig.value) refine_ctr = mesh.get_contour(level=level) refine_ctr = transform(transformer.transform, refine_ctr) hfun_refine.add_feature(refine_ctr, expansion_rate, target_size, nprocs=nprocs) for lower_bound, expansion_rate, target_size in patch_defns: refine_mp = mesh.get_multipolygon(zmin=lower_bound) refine_mp = transform(transformer.transform, refine_mp) hfun_refine.add_patch(refine_mp, expansion_rate, target_size, nprocs) for lower_bound, target_size in constant_defns: refine_mp = mesh.get_multipolygon(zmin=lower_bound) refine_mp = transform(transformer.transform, refine_mp) hfun_refine.add_patch(refine_mp, None, target_size, nprocs) refine_hfun_jig = hfun_refine.msh_t() utils.reproject(refine_hfun_jig, ref_crs) final_hfun_jig = utils.merge_msh_t(initial_hfun_jig, refine_hfun_jig, out_crs=ref_crs, drop_by_bbox=False) if not (geom_jig.crs == ref_crs and (init_jig and init_jig.crs == ref_crs)): raise ValueError( "CRS for geometry, hfun and init mesh is not the same") opts = jigsawpy.jigsaw_jig_t() opts.hfun_scal = "absolute" opts.hfun_hmin = np.min(final_hfun_jig.value) opts.hfun_hmax = np.max(final_hfun_jig.value) opts.mesh_dims = +2 remesh_jig = jigsawpy.jigsaw_msh_t() remesh_jig.mshID = 'euclidean-mesh' remesh_jig.ndims = 2 remesh_jig.crs = init_jig.crs jigsawpy.lib.jigsaw(opts, geom_jig, remesh_jig, init=init_jig, hfun=final_hfun_jig) utils.finalize_mesh(remesh_jig, sieve) # Interpolate from inpu mesh and DEM if any utils.interpolate_euclidean_mesh_to_euclidean_mesh( mesh.msh_t, remesh_jig) final_mesh = Mesh(remesh_jig) if interp_rast_list: final_mesh.interpolate(interp_rast_list, nprocs=nprocs) # Write to disk final_mesh.write(str(out_path), format=out_format, overwrite=True)
def run(self, args): logging.info(args) base_path = pathlib.Path(args.basemesh) demlo_paths = args.demlo demhi_paths = args.demhi out_path = pathlib.Path(args.out) out_path.parent.mkdir(exist_ok=True, parents=True) base_mesh_4_hfun = Mesh.open(base_path, crs="EPSG:4326") base_mesh_4_geom = Mesh.open(base_path, crs="EPSG:4326") geom_rast_list = [] hfun_rast_list = [] hfun_hirast_list = [] hfun_lorast_list = [] interp_rast_list = [] for dem_path in demlo_paths: hfun_lorast_list.append(Raster(dem_path)) interp_rast_list.append(Raster(dem_path)) for dem_path in demhi_paths: geom_rast_list.append(Raster(dem_path)) hfun_hirast_list.append(Raster(dem_path)) interp_rast_list.append(Raster(dem_path)) hfun_rast_list = [*hfun_lorast_list, *hfun_hirast_list] geom = Geom(geom_rast_list, base_mesh=base_mesh_4_geom, zmax=15, nprocs=4) hfun = Hfun(hfun_rast_list, base_mesh=base_mesh_4_hfun, hmin=30, hmax=15000, nprocs=4) ## Add contour refinements at 0 separately for GEBCO and NCEI ctr1 = Contour(level=0, sources=hfun_hirast_list) hfun.add_contour(None, 1e-3, 30, contour_defn=ctr1) ctr2 = Contour(level=0, sources=hfun_lorast_list) hfun.add_contour(None, 1e-2, 500, contour_defn=ctr2) ## Add constant values from 0 to inf on hi-res rasters hfun.add_constant_value(30, 0, source_index=list(range(len(demhi_paths)))) # Calculate geom geom_mp = geom.get_multipolygon() # Write to disk gpd.GeoDataFrame({ 'geometry': geom_mp }, crs="EPSG:4326").to_file(str(out_path) + '.geom.shp') del geom_mp # Calculate hfun hfun_msh_t = hfun.msh_t() # Write to disk sms2dm.writer(msh_t_to_2dm(hfun_msh_t), str(out_path) + '.hfun.2dm', True) del hfun_msh_t # Read back stored values to pass to mesh driver read_gdf = gpd.read_file(str(out_path) + '.geom.shp') geom_from_disk = MultiPolygonGeom(MultiPolygon(list( read_gdf.geometry)), crs=read_gdf.crs) read_hfun = Mesh.open(str(out_path) + '.hfun.2dm', crs="EPSG:4326") hfun_from_disk = HfunMesh(read_hfun) jigsaw = JigsawDriver(geom_from_disk, hfun=hfun_from_disk, initial_mesh=None) jigsaw.verbosity = 1 ## Execute mesher (processing of geom and hfun happens here) mesh = jigsaw.run() ## Free-up memory del read_gdf del geom_from_disk del read_hfun del hfun_from_disk gc.collect() mesh.write(str(out_path) + '.raw.2dm', format='2dm', overwrite=True) ## Interpolate DEMs on the mesh mesh.interpolate(interp_rast_list, nprocs=4) ## Output mesh.write(out_path, format='2dm', overwrite=True)
def get_geom(self): self._logger.debug('get_geom()') geom_collection = [] if self._geom_nprocs is not None: job_args = [] hashes = [] geom = None for id, geom_opts in self._geom.items(): self._logger.debug(f'get_geom(): processsing group id={id}') zmin = geom_opts.get("zmin") zmax = geom_opts.get("zmax") driver = geom_opts.get("driver", "matplotlib") for raster_path, raster_opts in self._get_raster_by_id(id): self._logger.debug( f'get_geom(): appending raster {raster_path} for ' 'parallel processing.') hash = _geom_identifier(zmin, zmax, driver, Raster(raster_path).md5) query = self._session.query(db.GeomCollection).get(hash) if query is None: chunk_size = raster_opts.get("chunk_size") if self._geom_nprocs is not None: if chunk_size == 0: job_args.append( (raster_path, raster_opts, zmin, zmax, geom_opts.get("join_method"), driver, chunk_size, raster_opts.get("overlap", 2))) hashes.append(hash) else: geom = _geom_raster_processing_worker( raster_path, raster_opts, zmin, zmax, geom_opts.get("join_method"), driver, chunk_size, raster_opts.get("overlap")) else: geom = _geom_raster_processing_worker( raster_path, raster_opts, zmin, zmax, geom_opts.get("join_method"), driver, chunk_size, raster_opts.get("overlap")) self._save_geom_to_db(geom, raster_path.name, zmin, zmax, driver, hash) self._session.commit() else: geom = Geom(geoalchemy2.shape.to_shape(query.geom), crs=self._crs) if geom is not None: geom_collection.append(geom) if self._geom_nprocs is not None: self._logger.debug( 'get_geom(): executing parallel geom computations...') with Pool(processes=self._geom_nprocs) as pool: res = pool.starmap(_geom_raster_processing_worker, job_args) pool.join() for i, geom in enumerate(res): geom_collection.append(geom) self._save_geom_to_db(geom, job_args[i][0].name, job_args[i][2], job_args[i][3], job_args[i][5], hashes[i]) self._session.commit() del res for feature in self._features: raise NotImplementedError('features') mpc = [] for geom in geom_collection: mpc.append(geom.multipolygon) self._logger.debug('get_geom(): apply unary_union...') mp = ops.unary_union(mpc) return Geom(mp, crs=self._crs)