def bath(self, **kwargs): kwargs['grid_x'] = self.grid.Dataset.lons.values kwargs['grid_y'] = self.grid.Dataset.lats.values dpath = get_value(self, kwargs, 'dem', None) kwargs.update({'dem': dpath}) flag = get_value(self, kwargs, 'update', []) # check if files exist if flag: if ('dem' in flag) | ('all' in flag): kwargs.update({ 'lon_min': self.lon_min, 'lat_min': self.lat_min, 'lon_max': self.lon_max, 'lat_max': self.lat_max }) self.dem = pdem.dem(**kwargs) else: logger.info('reading local dem file ..\n') dem_source = z['rpath'] + self.tag + '.dep' rdem = from_dep(dem_source) else: kwargs.update({ 'lon_min': self.lon_min, 'lat_min': self.lat_min, 'lon_max': self.lon_max, 'lat_max': self.lat_max }) self.dem = pdem.dem(**kwargs)
def d3d(tmpdir, kwargs): ## lat,lon grid resolution = .1 lon = np.arange(kwargs['lon_min'], kwargs['lon_max'], resolution) lat = np.arange(kwargs['lat_min'], kwargs['lat_max'], resolution) xp, yp = np.meshgrid(lon, lat) kwargs.update({'grid_x': xp, 'grid_y': yp}) #get dem df = pdem.dem(**kwargs) rpath = str(tmpdir) + '/' #output pdem.to_output(df.Dataset, solver='d3d', rpath=rpath) #read again dem m = pmodel(solver='d3d') rd = m.from_dep(rpath + 'd3d.dep') #compare c1 = -rd.where(rd != -999) c2 = df.Dataset.ival.where(df.Dataset.ival < 0) return c1.fillna(0).equals(c2.fillna(0))
def schism(tmpdir, kwargs): grid = pgrid.grid(type='tri2d', grid_file=(DATA_DIR / 'hgrid.gr3').as_posix()) #update dem xp = grid.Dataset.SCHISM_hgrid_node_x.values yp = grid.Dataset.SCHISM_hgrid_node_y.values kwargs.update({'grid_x': xp, 'grid_y': yp}) #get dem df = pdem.dem(**kwargs) grid.Dataset['depth'].loc[:] = -df.Dataset.ival.values filename_ = str(tmpdir.join('hgrid_.gr3')) #output to grid file grid.to_file(filename_) #read again new grid grid_ = pgrid.grid(type='tri2d', grid_file=filename_) #compare return grid.Dataset.equals(grid_.Dataset)
def test_schism_grid(tmpdir, dic): grid = pg.grid(type = 'tri2d',geometry=dic, coastlines=natural_earth, rpath = str(tmpdir)+'/') xg = grid.Dataset.SCHISM_hgrid_node_x.values yg = grid.Dataset.SCHISM_hgrid_node_y.values df = pdem.dem(**dic, grid_x=xg, grid_y=yg, dem_source=DEM_SOURCE) #get dem df.adjust(natural_earth) assert np.isnan(df.Dataset.fval.values).sum() == 0
def test_d3d_grid(tmpdir, dic): grid = pg.grid(type = 'r2d', geometry=dic, resolution=.1, rpath = str(tmpdir)+'/') gr = grid.Dataset xp,yp=gr.lons, gr.lats #get dem df = pdem.dem(**dic, grid_x=xp, grid_y=yp, dem_source=DEM_SOURCE) df.adjust(natural_earth) assert np.isnan(df.Dataset.fval.values).sum() == 0
def test_elevation(tmpdir, dic): # Just elevation df = pdem.dem(**dic, dem_source = DEM_SOURCE) #get dem df.adjust(natural_earth) assert np.isnan(df.Dataset.adjusted.values).sum() == 0
def jigsaw(**kwargs): logger.info('Creating grid with JIGSAW\n') geometry = kwargs.get('geometry', None) if isinstance(geometry, dict): df, bmindx = tag_(**kwargs) gr = jigsaw_(df, bmindx, **kwargs) elif isinstance(geometry, str): if geometry == 'global': hfun0 = hfun_(kwargs.get('coastlines', None), kwargs.get('res', .1), kwargs.get('R', 1.)) kwargs.update({'hfun': hfun0}) df = sgl(**kwargs) bmindx = df.tag.min() gr = jigsaw_(df, bmindx, **kwargs) convert = kwargs.get('to_lat_lon', True) if convert: # convert to lat/lon u, v = gr.SCHISM_hgrid_node_x.values, gr.SCHISM_hgrid_node_y.values rlon, rlat = to_lat_lon(u, v, R=kwargs.get('R', 1.)) gr['SCHISM_hgrid_node_x'].values = rlon gr['SCHISM_hgrid_node_y'].values = rlat else: hfun = kwargs.get('hfun', None) if hfun == 'auto': try: dem = pdem.dem(**geometry) res_min = kwargs.get('resolution_min', .01) res_max = kwargs.get('resolution_max', .5) dhdx = kwargs.get('dhdx', .15) w = hfun(dem.Dataset.elevation, resolution_min=res_min, resolution_max=res_max, dhdx=dhdx) # resolution in lat/lon degrees if not os.path.exists( self.rpath): # check if run folder exists os.makedirs(self.rpath) w.to_netcdf(self.rpath + 'hfun.nc') # save hfun kwargs.update({'hfun': self.rpath + 'hfun.nc'}) except: logger.warning( 'hfun failed... continuing without background mesh size' ) df = jcustom(**kwargs) bmindx = df.tag.min() gr = jigsaw_(df, bmindx, **kwargs) return gr
def make_gmsh(df, **kwargs): logger.info('create grid') model = gmsh.model factory = model.geo gmsh.initialize() model.add("schism") # gmsh.option.setNumber("General.Terminal", 1) interpolate = kwargs.get('interpolate', False) if interpolate: ddf = gset(df, **kwargs) else: ddf = df lc = kwargs.get('lc', .5) ddf['lc'] = lc ddf = ddf.apply(pd.to_numeric) # save boundary configuration for Line0 rb0 = ddf.loc['line0'].copy() if not shapely.geometry.LinearRing(rb0[[ 'lon', 'lat' ]].values).is_ccw: # check for clockwise orientation rb0 = ddf.loc['line0'].iloc[::-1].reset_index(drop=True) rb0.index = rb0.index + 1 # fix index rb0['bounds'] = [[i, i + 1] for i in rb0.index] rb0['bounds'] = rb0.bounds.values.tolist()[:-1] + [[rb0.index[-1], 1] ] # fix last one #store blines blines = {} for tag_ in rb0.tag.unique(): ibs = rb0.loc[rb0.tag == tag_].index.values #ibs lbs = rb0.loc[rb0.tag == tag_].bounds.values.tolist() #lbs ai = np.unique(np.concatenate(lbs)) #ai itags = [i for i in ai if i in ibs] #itags if tag_ > 0: items = set(itags) imask = [ set(x).issubset(items) for x in rb0.loc[rb0.tag == tag_].bounds ] #imask bi = rb0.loc[rb0.tag == tag_][imask].index.values.tolist() else: bi = rb0.loc[rb0.tag == tag_].index.values.tolist() blines.update({tag_: bi}) al = [j for i in list(blines.values()) for j in i] lover = [x for x in rb0.index if x not in al] for i, v in rb0.loc[lover].iterrows(): nns = rb0.loc[v[5], ['tag']].values itag = [x for x in nns if x < 0][0] blines.update({itag[0]: blines[itag[0]] + [i]}) land_lines = { your_key: blines[your_key] for your_key in [x for x in blines.keys() if x < 0] } open_lines = { your_key: blines[your_key] for your_key in [x for x in blines.keys() if x > 0] } logger.info('Define geometry') loops = [] islands = [] all_lines = [] ltag = 1 for row in rb0.itertuples(index=True, name='Pandas'): factory.addPoint(getattr(row, "lon"), getattr(row, "lat"), getattr(row, "z"), getattr(row, "lc"), getattr(row, "Index")) for row in rb0.itertuples(index=True, name='Pandas'): factory.addLine( getattr(row, "bounds")[0], getattr(row, "bounds")[1], getattr(row, "Index")) lines = rb0.index.values all_lines.append(lines) tag = rb0.index.values[-1] factory.addCurveLoop(lines, tag=ltag) #print(loop) loops.append(ltag) all_lines.append(lines) tag += 1 ltag += 1 for contour in tqdm(ddf.index.levels[0][1:]): rb = ddf.loc[contour].copy() if not shapely.geometry.LinearRing(rb[[ 'lon', 'lat' ]].values).is_ccw: # check for clockwise orientation rb = ddf.loc[contour].iloc[::-1].reset_index(drop=True) rb.index = rb.index + tag rb['bounds'] = [[i, i + 1] for i in rb.index] rb['bounds'] = rb.bounds.values.tolist()[:-1] + [[ rb.index[-1], rb.index[0] ]] # fix last one for row in rb.itertuples(index=True, name='Pandas'): factory.addPoint(getattr(row, "lon"), getattr(row, "lat"), getattr(row, "z"), getattr(row, "lc"), getattr(row, "Index")) for row in rb.itertuples(index=True, name='Pandas'): factory.addLine( getattr(row, "bounds")[0], getattr(row, "bounds")[1], getattr(row, "Index")) lines = rb.index.values all_lines.append(lines) tag = rb.index.values[-1] + 1 factory.addCurveLoop(lines, tag=ltag) # print(tag) loops.append(ltag) islands.append(lines) all_lines.append(lines) tag += 1 ltag += 1 factory.addPlaneSurface(loops) logger.info('synchronize') factory.synchronize() ## Group open boundaries lines for key, values in open_lines.items(): gmsh.model.addPhysicalGroup(1, values, 1000 - int(key)) ## Group land boundaries lines for key, values in land_lines.items(): gmsh.model.addPhysicalGroup(1, values, 1000 - int(key)) ntag = 1 for k in tqdm(range(len(islands))): gmsh.model.addPhysicalGroup(1, islands[k], 2000 + ntag) ntag += 1 ps = gmsh.model.addPhysicalGroup(2, [1]) gmsh.model.setPhysicalName(2, ps, "MyMesh") flat_list = [item for sublist in all_lines for item in sublist] ols = [j for i in list(open_lines.values()) for j in i] lists = [x for x in flat_list if x not in ols] model.mesh.field.add("Distance", 1) model.mesh.field.setNumbers(1, "CurvesList", lists) SizeMin = kwargs.get("SizeMin", .1) SizeMax = kwargs.get("SizeMax", .5) DistMin = kwargs.get("DistMin", .01) DistMax = kwargs.get("DistMax", .2) model.mesh.field.add("Threshold", 2) model.mesh.field.setNumber(2, "InField", 1) model.mesh.field.setNumber(2, "SizeMin", SizeMin) model.mesh.field.setNumber(2, "SizeMax", SizeMax) model.mesh.field.setNumber(2, "DistMin", DistMin) model.mesh.field.setNumber(2, "DistMax", DistMax) # Set bgmesh bgmesh = kwargs.get('bgmesh', None) if bgmesh == 'auto': try: logger.info('Read DEM') dem = pdem.dem(**kwargs) res_min = kwargs.get('resolution_min', .01) res_max = kwargs.get('resolution_max', .5) logger.info('Evaluate bgmesh') w = make_bgmesh(dem.Dataset, res_min, res_max) path = kwargs.get('rpath', '.') if not os.path.exists(path): # check if run folder exists os.makedirs(path) logger.info('Save bgmesh to {}/bgmesh/bgmesh.pos'.format(path)) fpos = path + '/bgmesh/bgmesh.pos' to_sq(w, fpos) # save bgmesh kwargs.update({'bgmesh': fpos}) model.mesh.field.setNumber(2, "StopAtDistMax", 1) # Merge a post-processing view containing the target anisotropic mesh sizes gmsh.merge(fpos) model.mesh.field.add("PostView", 3) model.mesh.field.setNumber(3, "ViewIndex", 0) model.mesh.field.add("Min", 4) model.mesh.field.setNumbers(4, "FieldsList", [2, 3]) model.mesh.field.setAsBackgroundMesh(4) except: logger.warning( 'bgmesh failed... continuing without background mesh size') model.mesh.field.setAsBackgroundMesh(2) else: model.mesh.field.setAsBackgroundMesh(2) gmsh.option.setNumber('Mesh.MeshSizeExtendFromBoundary', 0) gmsh.option.setNumber('Mesh.MeshSizeFromPoints', 0) gmsh.option.setNumber('Mesh.MeshSizeFromCurvature', 0) logger.info('execute') gmsh.model.mesh.generate(2) # ... and save it to disk rpath = kwargs.get('rpath', '.') logger.info('save mesh') # gmsh.option.setNumber("Mesh.SaveAll", 1) gmsh.write(rpath + '/gmsh/mymesh.msh') # gmsh.write('mymesh.vtk') gmsh.finalize()
def test_answer(tmpdir, kwargs): df = pdem.dem(**kwargs) assert np.isnan(df.Dataset.elevation.values).sum() == 0