def bath(self, **kwargs): kwargs["grid_x"] = self.mesh.Dataset.lons.values kwargs["grid_y"] = self.mesh.Dataset.lats.values dpath = get_value(self, kwargs, "dem_source", None) kwargs.update({"dem_source": dpath}) flag = get_value(self, kwargs, "update", []) # check if files exist if flag: if ("dem" in flag) | ("all" in flag): kwargs.update({ "lon_min": self.lon_min, "lat_min": self.lat_min, "lon_max": self.lon_max, "lat_max": self.lat_max, }) self.dem = pdem.Dem(**kwargs) else: logger.info("reading local dem file ..\n") dem_source = z["rpath"] + self.tag + ".dep" rdem = from_dep(dem_source) else: kwargs.update({ "lon_min": self.lon_min, "lat_min": self.lat_min, "lon_max": self.lon_max, "lat_max": self.lat_max, }) self.dem = pdem.Dem(**kwargs)
def make_bgmesh(contours, **kwargs): gglobal = kwargs.get("gglobal", False) if gglobal: lon_min = -180.0 lon_max = 180.0 lat_min = -90.0 lat_max = 90.0 else: lon_min = contours.bounds.minx.min() lon_max = contours.bounds.maxx.max() lat_min = contours.bounds.miny.min() lat_max = contours.bounds.maxy.max() kwargs_ = kwargs.copy() kwargs_.pop("lon_min", None) kwargs_.pop("lon_max", None) kwargs_.pop("lat_min", None) kwargs_.pop("lat_max", None) dem = kwargs.get("dem_source", None) if not isinstance(dem, xr.Dataset): logger.info("Read DEM") dem = pdem.Dem(lon_min=lon_min, lon_max=lon_max, lat_min=lat_min, lat_max=lat_max, **kwargs_) dem = dem.Dataset res_min = kwargs_.pop("resolution_min", 0.01) res_max = kwargs_.pop("resolution_max", 0.5) dhdx = kwargs_.pop("dhdx", 0.15) rpath = kwargs.get("rpath", ".") tag = kwargs.get("tag", "jigsaw") fpos = rpath + "/jigsaw/" + tag + "-hfun.msh" if gglobal: logger.info("Evaluate global bgmesh") nds, lms = make_bgmesh_global(contours, fpos, dem, **kwargs) logger.info("Saving global background scale file") dh = to_global_hfun(nds, lms, fpos, **kwargs) else: logger.info("Evaluate bgmesh") dh = get_hfun(dem, resolution_min=res_min, resolution_max=res_max, dhdx=dhdx, **kwargs_) # resolution in lat/lon degrees logger.info("Saving background scale file") to_hfun_grid(dh, fpos) # write bgmesh file return dh
def test_schism(tmpdir, dem_source, kwargs): mesh = pmesh.set(type="tri2d", mesh_file=(DATA_DIR / "hgrid.gr3").as_posix()) # update kwargs xp = mesh.Dataset.SCHISM_hgrid_node_x.values yp = mesh.Dataset.SCHISM_hgrid_node_y.values kwargs.update({"grid_x": xp, "grid_y": yp}) # get dem df = pdem.Dem(dem_source=dem_source, **kwargs) # get dem on mesh df.Dataset = pdem.dem_on_mesh(df.Dataset, **kwargs) mesh.Dataset["depth"].loc[:] = -df.Dataset.ival.values filename_ = str(tmpdir.join("hgrid_.gr3")) # output to mesh file mesh.to_file(filename_) # read again new mesh mesh_ = pmesh.set(type="tri2d", mesh_file=filename_) # compare assert mesh.Dataset.equals(mesh_.Dataset) is True
def test_d3d(tmpdir, dem_source, kwargs): ## lat,lon grid resolution = 0.1 lon = np.arange(kwargs["lon_min"], kwargs["lon_max"], resolution) lat = np.arange(kwargs["lat_min"], kwargs["lat_max"], resolution) xp, yp = np.meshgrid(lon, lat) # update kwargs kwargs.update({"grid_x": xp, "grid_y": yp}) # get dem df = pdem.Dem(dem_source=dem_source, **kwargs) # get dem on mesh df.Dataset = pdem.dem_on_mesh(df.Dataset, **kwargs) rpath = str(tmpdir) + "/" # output pdem.to_output(df.Dataset, solver_name="d3d", rpath=rpath) # read again dem m = pmodel.set(solver_name="d3d") rd = m.from_dep(rpath + "d3d.dep") # compare c1 = -rd.where(rd != -999) c2 = df.Dataset.ival.where(df.Dataset.ival < 0) assert c1.fillna(0).equals(c2.fillna(0)) is True
def test_dem_source_is_url(): dem_source = "https://coastwatch.pfeg.noaa.gov/erddap/griddap/srtm30plus" kwargs = { "lon_min": 176.5, "lon_max": 177.0, "lat_min": 16.0, "lat_max": 16.5 } df = pdem.Dem(dem_source=dem_source, **kwargs) assert np.isnan(df.Dataset.elevation.values).sum() == 0
def test_d3d_mesh(tmpdir, coasts, dem_source, window): mesh = pmesh.set(type="r2d", geometry=window, resolution=0.1, rpath=str(tmpdir) + "/") gr = mesh.Dataset xp, yp = gr.lons.values, gr.lats.values dem = pdem.Dem(**window, dem_source=dem_source, adjust_dem=False) # get dem dem.Dataset = pdem.dem_on_mesh(dem.Dataset, grid_x=xp, grid_y=yp) # get dem on mesh dem.adjust(coasts) assert np.isnan(dem.Dataset.fval.values).sum() == 0
def test_schism_mesh(tmpdir, coasts, dem_source, window): mesh = pmesh.set( type="tri2d", geometry=window, coastlines=coasts, mesh_generator="jigsaw", rpath=str(tmpdir) + "/", ) xg = mesh.Dataset.SCHISM_hgrid_node_x.values yg = mesh.Dataset.SCHISM_hgrid_node_y.values dem = pdem.Dem(**window, dem_source=dem_source, adjust_dem=False) # get dem dem.Dataset = pdem.dem_on_mesh(dem.Dataset, grid_x=xg, grid_y=yg) # get dem on mesh dem.adjust(coasts) assert np.isnan(dem.Dataset.fval.values).sum() == 0
def test_dem_adjust(coasts, dem_source, window): # Just elevation df = pdem.Dem(**window, dem_source=dem_source) # get dem df.adjust(coasts) assert np.isnan(df.Dataset.adjusted.values).sum() == 0
def make_bgmesh(df, fpos, **kwargs): lon_min = df.bounds.minx.min() lon_max = df.bounds.maxx.max() lat_min = df.bounds.miny.min() lat_max = df.bounds.maxy.max() kwargs_ = kwargs.copy() kwargs_.pop("lon_min", None) kwargs_.pop("lon_max", None) kwargs_.pop("lat_min", None) kwargs_.pop("lat_max", None) dem = kwargs.get("dem_source", None) if not isinstance(dem, xr.Dataset): logger.info("Read DEM") dem = pdem.Dem(lon_min=lon_min, lon_max=lon_max, lat_min=lat_min, lat_max=lat_max, **kwargs_) dem = dem.Dataset res_min = kwargs.get("resolution_min", 0.01) res_max = kwargs.get("resolution_max", 0.5) logger.info("Evaluate bgmesh") # scale bathymetry try: b = dem.adjusted.to_dataframe() except: b = dem.elevation.to_dataframe() b = b.reset_index() b.columns = ["latitude", "longitude", "z"] nodes = scale_dem(b, res_min, res_max, **kwargs) x = dem.longitude.values y = dem.latitude.values quad = MakeQuadFaces(y.shape[0], x.shape[0]) elems = pd.DataFrame(quad, columns=["a", "b", "c", "d"]) df = quads_to_df(elems, nodes) dh = xr.Dataset( { "h": ( ["longitude", "latitude"], nodes.d2.values.flatten().reshape((x.shape[0], y.shape[0])), ) }, coords={ "longitude": ("longitude", x), "latitude": ("latitude", y) }, ) logger.info("Save bgmesh to {}".format(fpos)) to_sq(df, fpos) # save bgmesh kwargs.update({"bgmesh": fpos}) return dh
def get(contours, **kwargs): """ Create a `gmsh` mesh. !!! danger "" Due to a limitation of the Library rendering the docstrings, all arguments are marked as `required`, nevertheless they are all `Optional`. Args: contours GeoDataFrame: Provide boundaries and metadata. rpath str: Path for output. Defaults to `"."`. use_bindings bool: Flag for using python API as opposed to binary. Defaults to `True`. dem_source str: Path or url to bathymetric data. bgmesh str: Path to a mesh scale file. Defaults to `None`. setup_only bool: Flag for setup only (no execution). Defaults to `False`. """ logger.info("Creating grid with GMSH\n") rpath = kwargs.get("rpath", ".") if not os.path.exists(rpath): os.makedirs(rpath) gpath = os.path.join(rpath, "gmsh") if not os.path.exists(gpath): os.makedirs(gpath) use_bindings = kwargs.get("use_bindings", True) setup_only = kwargs.get("setup_only", False) bgmesh = kwargs.get("bgmesh", None) if bgmesh is None: dem_source = kwargs.get("dem_source", None) if dem_source: bgmesh = "auto" kwargs.update({"bgmesh": "auto"}) if bgmesh == "auto": try: rpath = kwargs.get("rpath", ".") if not os.path.exists(rpath + "/gmsh/"): # check if run folder exists os.makedirs(rpath + "/gmsh/") fpos = rpath + "/gmsh/bgmesh.pos" gglobal = kwargs.get("gglobal", False) if gglobal: dem = pdem.Dem(**kwargs) nds, lms = make_bgmesh_global(contours, fpos, dem.Dataset, **kwargs) dh = to_global_pos(nds, lms, fpos, **kwargs) else: dh = make_bgmesh(contours, fpos, **kwargs) kwargs.update({"bgmesh": fpos}) except OSError as e: logger.warning( "bgmesh failed... continuing without background mesh size") dh = None kwargs.update({"bgmesh": None}) if use_bindings: logger.info("using python bindings") make_gmsh(contours, **kwargs) else: to_geo(contours, **kwargs) if not setup_only: logger.info("using GMSH binary") gmsh_execute(**kwargs) if not setup_only: gr = read_msh(rpath + "/gmsh/mymesh.msh", **kwargs) try: bg = dh except: bg = None return gr, bg
def test_dem_shape(dem_source, kwargs, expected): df = pdem.Dem(dem_source=dem_source, **kwargs) assert df.Dataset.elevation.shape == expected
def test_dem(dem_source, kwargs, expected): df = pdem.Dem(dem_source=dem_source, **kwargs) assert np.isnan(df.Dataset.elevation.values).sum() == 0
def __init__(self, **kwargs): """ Set model boundaries !!! danger "" Due to a limitation of the Library rendering the docstrings, all arguments are marked as `required`, nevertheless they are all `Optional` except geometry. Args: geometry Union[dict, str, GeoDataFrame]: A `GeoDataFrame` or the path to a shapefile or a dict defining the lat/lon window. coastlines Union[str, GeoDataFrame]: A `GeoDataFrame` or the path to a shapefile which describes the coastlines. Defaults to `None`. cbuffer float: The buffer in arcs for extending the coastlines. Defaults to `None`. levels list[floats]: The range of DEM values for extracting the boundaries. When one valus is present it defines inner coastlines. When two values exist they define the extent. Defaults to `None`. dem_source str: Path or url to bathymetric data. """ geometry = kwargs.get("geometry", None) coastlines = kwargs.get("coastlines", None) cbuffer = kwargs.get("cbuffer", None) blevels = kwargs.get("blevels", None) prad = kwargs.get("R", 1.0) # COASTLINES if coastlines is None: logger.warning("coastlines not given") self.coasts = None elif isinstance(coastlines, str): logger.info("reading {}".format(coastlines)) coasts = gp.GeoDataFrame.from_file(coastlines) self.coasts = simplify(coasts) elif isinstance(coastlines, gp.GeoDataFrame): logger.warning( "coastlines is not a file, trying with geopandas Dataset") try: self.coasts = simplify(coastlines) except: logger.error("coastlines argument not valid ") sys.exit(1) # GEOMETRY if geometry is None: if levels is None: logger.error("geometry nor levels is given, exiting") sys.exit(1) if isinstance(geometry, dict): if self.coasts is None: logger.warning("coastlines might be required") self.geometry = geometry elif isinstance(geometry, str): if geometry == "global": if self.coasts is None: logger.warning("coastlines might be required") self.geometry = "global" else: try: self.geometry = gp.read_file(geometry) except: logger.warning( "geometry is not a file, trying with geopandas Dataset" ) if isinstance(geometry, gp.GeoDataFrame): self.geometry = geometry else: logger.error("geometry argument not valid ") sys.exit(1) else: try: self.geometry = gp.read_file(geometry) except: logger.warning( "geometry is not a file, trying with geopandas Dataset") if isinstance(geometry, gp.GeoDataFrame): self.geometry = geometry else: logger.error("geometry argument not valid ") sys.exit(1) # Define internal boundary as isovalue of DEM if blevels: dsource = kwargs.get("dem_source", None) if dsource is None: logger.error("dem_source is required") dem = pdem.Dem(geometry=self.geometry, dem_source=dsource) dem_ = dem.Dataset self.coasts = get_dem_contours(blevels, dem_) # get boundaries if isinstance(self.geometry, dict): df = tag(self.geometry, self.coasts, cbuffer, blevels) elif isinstance(self.geometry, str): if self.coasts is None: logger.error("coastlines are missing .. exiting\n") sys.exit(1) df = global_tag(self.coasts, cbuffer, blevels, R=prad) elif isinstance(self.geometry, gp.GeoDataFrame): df = self.geometry # line tag df.loc[df.tag == "island", "lindex"] = np.arange( -df[df.tag == "island"].shape[0], 0).tolist() or 0 df.loc[df.tag == "land", "lindex"] = (1000 + np.arange( 1, df[df.tag == "land"].shape[0] + 1)).tolist() or 0 df.loc[df.tag == "open", "lindex"] = np.arange( 1, df[df.tag == "open"].shape[0] + 1).tolist() or 0 df = df.sort_values("lindex", ascending=False) df.lindex = df.lindex.astype(int) # number of points df["nps"] = df.apply(lambda row: len(row.geometry.xy[1]) - 1, axis=1) self.contours = df.reset_index(drop=True)