def simple_glacier_masks(gdir): """Compute glacier masks based on much simpler rules than OGGM's default. This is therefore more robust: we use this function to compute glacier hypsometries. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` where to write the data """ # open srtm tif-file: dem_dr = rasterio.open(gdir.get_filepath('dem'), 'r', driver='GTiff') dem = dem_dr.read(1).astype(rasterio.float32) # Grid nx = dem_dr.width ny = dem_dr.height assert nx == gdir.grid.nx assert ny == gdir.grid.ny # Correct the DEM # Currently we just do a linear interp -- filling is totally shit anyway min_z = -999. dem[dem <= min_z] = np.NaN isfinite = np.isfinite(dem) if np.all(~isfinite): raise InvalidDEMError('Not a single valid grid point in DEM') if np.any(~isfinite): xx, yy = gdir.grid.ij_coordinates pnan = np.nonzero(~isfinite) pok = np.nonzero(isfinite) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T try: dem[pnan] = griddata(points, np.ravel(dem[pok]), inter, method='linear') except ValueError: raise InvalidDEMError('DEM interpolation not possible.') log.warning(gdir.rgi_id + ': DEM needed interpolation.') gdir.add_to_diagnostics('dem_needed_interpolation', True) gdir.add_to_diagnostics('dem_invalid_perc', len(pnan[0]) / (nx*ny)) isfinite = np.isfinite(dem) if np.any(~isfinite): # this happens when extrapolation is needed # see how many percent of the dem if np.sum(~isfinite) > (0.5 * nx * ny): log.warning('({}) many NaNs in DEM'.format(gdir.rgi_id)) xx, yy = gdir.grid.ij_coordinates pnan = np.nonzero(~isfinite) pok = np.nonzero(isfinite) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T try: dem[pnan] = griddata(points, np.ravel(dem[pok]), inter, method='nearest') except ValueError: raise InvalidDEMError('DEM extrapolation not possible.') log.warning(gdir.rgi_id + ': DEM needed extrapolation.') gdir.add_to_diagnostics('dem_needed_extrapolation', True) gdir.add_to_diagnostics('dem_extrapol_perc', len(pnan[0]) / (nx*ny)) if np.min(dem) == np.max(dem): raise InvalidDEMError('({}) min equal max in the DEM.' .format(gdir.rgi_id)) # Proj if LooseVersion(rasterio.__version__) >= LooseVersion('1.0'): transf = dem_dr.transform else: raise ImportError('This task needs rasterio >= 1.0 to work properly') x0 = transf[2] # UL corner y0 = transf[5] # UL corner dx = transf[0] dy = transf[4] # Negative assert dx == -dy assert dx == gdir.grid.dx assert y0 == gdir.grid.corner_grid.y0 assert x0 == gdir.grid.corner_grid.x0 profile = dem_dr.profile dem_dr.close() # Clip topography to 0 m a.s.l. dem = dem.clip(0) # Smooth DEM? if cfg.PARAMS['smooth_window'] > 0.: gsize = np.rint(cfg.PARAMS['smooth_window'] / dx) smoothed_dem = gaussian_blur(dem, np.int(gsize)) else: smoothed_dem = dem.copy() if not np.all(np.isfinite(smoothed_dem)): raise InvalidDEMError('({}) NaN in smoothed DEM'.format(gdir.rgi_id)) # Geometries geometry = gdir.read_shapefile('outlines').geometry[0] # simple trick to correct invalid polys: # http://stackoverflow.com/questions/20833344/ # fix-invalid-polygon-python-shapely geometry = geometry.buffer(0) if not geometry.is_valid: raise InvalidDEMError('This glacier geometry is not valid.') # Compute the glacier mask using rasterio # Small detour as mask only accepts DataReader objects with rasterio.io.MemoryFile() as memfile: with memfile.open(**profile) as dataset: dataset.write(dem.astype(np.int16)[np.newaxis, ...]) dem_data = rasterio.open(memfile.name) masked_dem, _ = riomask(dem_data, [shpg.mapping(geometry)], filled=False) glacier_mask = ~masked_dem[0, ...].mask # Smame without nunataks with rasterio.io.MemoryFile() as memfile: with memfile.open(**profile) as dataset: dataset.write(dem.astype(np.int16)[np.newaxis, ...]) dem_data = rasterio.open(memfile.name) poly = shpg.mapping(shpg.Polygon(geometry.exterior)) masked_dem, _ = riomask(dem_data, [poly], filled=False) glacier_mask_nonuna = ~masked_dem[0, ...].mask # Glacier exterior excluding nunataks erode = binary_erosion(glacier_mask_nonuna) glacier_ext = glacier_mask_nonuna ^ erode glacier_ext = np.where(glacier_mask_nonuna, glacier_ext, 0) # Last sanity check based on the masked dem tmp_max = np.max(dem[glacier_mask]) tmp_min = np.min(dem[glacier_mask]) if tmp_max < (tmp_min + 1): raise InvalidDEMError('({}) min equal max in the masked DEM.' .format(gdir.rgi_id)) # hypsometry bsize = 50. dem_on_ice = dem[glacier_mask] bins = np.arange(nicenumber(dem_on_ice.min(), bsize, lower=True), nicenumber(dem_on_ice.max(), bsize) + 0.01, bsize) h, _ = np.histogram(dem_on_ice, bins) h = h / np.sum(h) * 1000 # in permil # We want to convert the bins to ints but preserve their sum to 1000 # Start with everything rounded down, then round up the numbers with the # highest fractional parts until the desired sum is reached. hi = np.floor(h).astype(np.int) hup = np.ceil(h).astype(np.int) aso = np.argsort(hup - h) for i in aso: hi[i] = hup[i] if np.sum(hi) == 1000: break # slope sy, sx = np.gradient(dem, dx) aspect = np.arctan2(np.mean(-sx[glacier_mask]), np.mean(sy[glacier_mask])) aspect = np.rad2deg(aspect) if aspect < 0: aspect += 360 slope = np.arctan(np.sqrt(sx ** 2 + sy ** 2)) avg_slope = np.rad2deg(np.mean(slope[glacier_mask])) # write df = pd.DataFrame() df['RGIId'] = [gdir.rgi_id] df['GLIMSId'] = [gdir.glims_id] df['Zmin'] = [dem_on_ice.min()] df['Zmax'] = [dem_on_ice.max()] df['Zmed'] = [np.median(dem_on_ice)] df['Area'] = [gdir.rgi_area_km2] df['Slope'] = [avg_slope] df['Aspect'] = [aspect] for b, bs in zip(hi, (bins[1:] + bins[:-1])/2): df['{}'.format(np.round(bs).astype(int))] = [b] df.to_csv(gdir.get_filepath('hypsometry'), index=False) # write out the grids in the netcdf file nc = gdir.create_gridded_ncdf_file('gridded_data') v = nc.createVariable('topo', 'f4', ('y', 'x', ), zlib=True) v.units = 'm' v.long_name = 'DEM topography' v[:] = dem v = nc.createVariable('topo_smoothed', 'f4', ('y', 'x', ), zlib=True) v.units = 'm' v.long_name = ('DEM topography smoothed ' 'with radius: {:.1} m'.format(cfg.PARAMS['smooth_window'])) v[:] = smoothed_dem v = nc.createVariable('glacier_mask', 'i1', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier mask' v[:] = glacier_mask v = nc.createVariable('glacier_ext', 'i1', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier external boundaries' v[:] = glacier_ext # add some meta stats and close nc.max_h_dem = np.max(dem) nc.min_h_dem = np.min(dem) dem_on_g = dem[np.where(glacier_mask)] nc.max_h_glacier = np.max(dem_on_g) nc.min_h_glacier = np.min(dem_on_g) nc.close()
def simple_glacier_masks(gdir): """Compute glacier masks based on much simpler rules than OGGM's default. This is therefore more robust: we use this function to compute glacier hypsometries. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` where to write the data """ # open srtm tif-file: dem_dr = rasterio.open(gdir.get_filepath('dem'), 'r', driver='GTiff') dem = dem_dr.read(1).astype(rasterio.float32) # Grid nx = dem_dr.width ny = dem_dr.height assert nx == gdir.grid.nx assert ny == gdir.grid.ny # Correct the DEM # Currently we just do a linear interp -- filling is totally shit anyway min_z = -999. dem[dem <= min_z] = np.NaN isfinite = np.isfinite(dem) if np.all(~isfinite): raise InvalidDEMError('Not a single valid grid point in DEM') if np.any(~isfinite): xx, yy = gdir.grid.ij_coordinates pnan = np.nonzero(~isfinite) pok = np.nonzero(isfinite) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T try: dem[pnan] = griddata(points, np.ravel(dem[pok]), inter, method='linear') except ValueError: raise InvalidDEMError('DEM interpolation not possible.') log.warning(gdir.rgi_id + ': DEM needed interpolation.') gdir.add_to_diagnostics('dem_needed_interpolation', True) gdir.add_to_diagnostics('dem_invalid_perc', len(pnan[0]) / (nx * ny)) isfinite = np.isfinite(dem) if np.any(~isfinite): # this happens when extrapolation is needed # see how many percent of the dem if np.sum(~isfinite) > (0.5 * nx * ny): log.warning('({}) many NaNs in DEM'.format(gdir.rgi_id)) xx, yy = gdir.grid.ij_coordinates pnan = np.nonzero(~isfinite) pok = np.nonzero(isfinite) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T try: dem[pnan] = griddata(points, np.ravel(dem[pok]), inter, method='nearest') except ValueError: raise InvalidDEMError('DEM extrapolation not possible.') log.warning(gdir.rgi_id + ': DEM needed extrapolation.') gdir.add_to_diagnostics('dem_needed_extrapolation', True) gdir.add_to_diagnostics('dem_extrapol_perc', len(pnan[0]) / (nx * ny)) if np.min(dem) == np.max(dem): raise InvalidDEMError('({}) min equal max in the DEM.'.format( gdir.rgi_id)) # Proj if LooseVersion(rasterio.__version__) >= LooseVersion('1.0'): transf = dem_dr.transform else: raise ImportError('This task needs rasterio >= 1.0 to work properly') x0 = transf[2] # UL corner y0 = transf[5] # UL corner dx = transf[0] dy = transf[4] # Negative assert dx == -dy assert dx == gdir.grid.dx assert y0 == gdir.grid.corner_grid.y0 assert x0 == gdir.grid.corner_grid.x0 profile = dem_dr.profile dem_dr.close() # Clip topography to 0 m a.s.l. dem = dem.clip(0) # Smooth DEM? if cfg.PARAMS['smooth_window'] > 0.: gsize = np.rint(cfg.PARAMS['smooth_window'] / dx) smoothed_dem = gaussian_blur(dem, np.int(gsize)) else: smoothed_dem = dem.copy() if not np.all(np.isfinite(smoothed_dem)): raise InvalidDEMError('({}) NaN in smoothed DEM'.format(gdir.rgi_id)) # Geometries geometry = gdir.read_shapefile('outlines').geometry[0] # simple trick to correct invalid polys: # http://stackoverflow.com/questions/20833344/ # fix-invalid-polygon-python-shapely geometry = geometry.buffer(0) if not geometry.is_valid: raise InvalidDEMError('This glacier geometry is not valid.') # Compute the glacier mask using rasterio # Small detour as mask only accepts DataReader objects with rasterio.io.MemoryFile() as memfile: with memfile.open(**profile) as dataset: dataset.write(dem.astype(np.int16)[np.newaxis, ...]) dem_data = rasterio.open(memfile.name) masked_dem, _ = riomask(dem_data, [shpg.mapping(geometry)], filled=False) glacier_mask = ~masked_dem[0, ...].mask # Smame without nunataks with rasterio.io.MemoryFile() as memfile: with memfile.open(**profile) as dataset: dataset.write(dem.astype(np.int16)[np.newaxis, ...]) dem_data = rasterio.open(memfile.name) poly = shpg.mapping(shpg.Polygon(geometry.exterior)) masked_dem, _ = riomask(dem_data, [poly], filled=False) glacier_mask_nonuna = ~masked_dem[0, ...].mask # Glacier exterior excluding nunataks erode = binary_erosion(glacier_mask_nonuna) glacier_ext = glacier_mask_nonuna ^ erode glacier_ext = np.where(glacier_mask_nonuna, glacier_ext, 0) # Last sanity check based on the masked dem tmp_max = np.max(dem[glacier_mask]) tmp_min = np.min(dem[glacier_mask]) if tmp_max < (tmp_min + 1): raise InvalidDEMError('({}) min equal max in the masked DEM.'.format( gdir.rgi_id)) # hypsometry bsize = 50. dem_on_ice = dem[glacier_mask] bins = np.arange(nicenumber(dem_on_ice.min(), bsize, lower=True), nicenumber(dem_on_ice.max(), bsize) + 0.01, bsize) h, _ = np.histogram(dem_on_ice, bins) h = h / np.sum(h) * 1000 # in permil # We want to convert the bins to ints but preserve their sum to 1000 # Start with everything rounded down, then round up the numbers with the # highest fractional parts until the desired sum is reached. hi = np.floor(h).astype(np.int) hup = np.ceil(h).astype(np.int) aso = np.argsort(hup - h) for i in aso: hi[i] = hup[i] if np.sum(hi) == 1000: break # slope sy, sx = np.gradient(dem, dx) aspect = np.arctan2(np.mean(-sx[glacier_mask]), np.mean(sy[glacier_mask])) aspect = np.rad2deg(aspect) if aspect < 0: aspect += 360 slope = np.arctan(np.sqrt(sx**2 + sy**2)) avg_slope = np.rad2deg(np.mean(slope[glacier_mask])) # write df = pd.DataFrame() df['RGIId'] = [gdir.rgi_id] df['GLIMSId'] = [gdir.glims_id] df['Zmin'] = [dem_on_ice.min()] df['Zmax'] = [dem_on_ice.max()] df['Zmed'] = [np.median(dem_on_ice)] df['Area'] = [gdir.rgi_area_km2] df['Slope'] = [avg_slope] df['Aspect'] = [aspect] for b, bs in zip(hi, (bins[1:] + bins[:-1]) / 2): df['{}'.format(np.round(bs).astype(int))] = [b] df.to_csv(gdir.get_filepath('hypsometry'), index=False) # write out the grids in the netcdf file nc = gdir.create_gridded_ncdf_file('gridded_data') v = nc.createVariable('topo', 'f4', ( 'y', 'x', ), zlib=True) v.units = 'm' v.long_name = 'DEM topography' v[:] = dem v = nc.createVariable('topo_smoothed', 'f4', ( 'y', 'x', ), zlib=True) v.units = 'm' v.long_name = ('DEM topography smoothed ' 'with radius: {:.1} m'.format(cfg.PARAMS['smooth_window'])) v[:] = smoothed_dem v = nc.createVariable('glacier_mask', 'i1', ( 'y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier mask' v[:] = glacier_mask v = nc.createVariable('glacier_ext', 'i1', ( 'y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier external boundaries' v[:] = glacier_ext # add some meta stats and close nc.max_h_dem = np.max(dem) nc.min_h_dem = np.min(dem) dem_on_g = dem[np.where(glacier_mask)] nc.max_h_glacier = np.max(dem_on_g) nc.min_h_glacier = np.min(dem_on_g) nc.close()
def add_basemap(ax, zoom=ZOOM, source=None, interpolation=INTERPOLATION, attribution=None, attribution_size=ATTRIBUTION_SIZE, reset_extent=True, crs=None, resampling=Resampling.bilinear, url=None, **extra_imshow_args): """ Add a (web/local) basemap to `ax`. Parameters ---------- ax : AxesSubplot Matplotlib axes object on which to add the basemap. The extent of the axes is assumed to be in Spherical Mercator (EPSG:3857), unless the `crs` keyword is specified. zoom : int or 'auto' [Optional. Default='auto'] Level of detail for the basemap. If 'auto', it is calculated automatically. Ignored if `source` is a local file. source : contextily.providers object or str [Optional. Default: Stamen Terrain web tiles] The tile source: web tile provider or path to local file. The web tile provider can be in the form of a `contextily.providers` object or a URL. The placeholders for the XYZ in the URL need to be `{x}`, `{y}`, `{z}`, respectively. For local file paths, the file is read with `rasterio` and all bands are loaded into the basemap. IMPORTANT: tiles are assumed to be in the Spherical Mercator projection (EPSG:3857), unless the `crs` keyword is specified. interpolation : str [Optional. Default='bilinear'] Interpolation algorithm to be passed to `imshow`. See `matplotlib.pyplot.imshow` for further details. attribution : str [Optional. Defaults to attribution specified by the source] Text to be added at the bottom of the axis. This defaults to the attribution of the provider specified in `source` if available. Specify False to not automatically add an attribution, or a string to pass a custom attribution. attribution_size : int [Optional. Defaults to `ATTRIBUTION_SIZE`]. Font size to render attribution text with. reset_extent : bool [Optional. Default=True] If True, the extent of the basemap added is reset to the original extent (xlim, ylim) of `ax` crs : None or str or CRS [Optional. Default=None] coordinate reference system (CRS), expressed in any format permitted by rasterio, to use for the resulting basemap. If None (default), no warping is performed and the original Spherical Mercator (EPSG:3857) is used. resampling : <enum 'Resampling'> [Optional. Default=Resampling.bilinear] Resampling method for executing warping, expressed as a `rasterio.enums.Resampling` method url : str [DEPRECATED] [Optional. Default: 'http://tile.stamen.com/terrain/{z}/{x}/{y}.png'] Source url for web tiles, or path to local file. If local, the file is read with `rasterio` and all bands are loaded into the basemap. **extra_imshow_args : Other parameters to be passed to `imshow`. Examples -------- >>> import geopandas >>> import contextily as ctx >>> db = geopandas.read_file(ps.examples.get_path('virginia.shp')) Ensure the data is in Spherical Mercator: >>> db = db.to_crs(epsg=3857) Add a web basemap: >>> ax = db.plot(alpha=0.5, color='k', figsize=(6, 6)) >>> ctx.add_basemap(ax, source=url) >>> plt.show() Or download a basemap to a local file and then plot it: >>> source = 'virginia.tiff' >>> _ = ctx.bounds2raster(*db.total_bounds, zoom=6, source=source) >>> ax = db.plot(alpha=0.5, color='k', figsize=(6, 6)) >>> ctx.add_basemap(ax, source=source) >>> plt.show() """ xmin, xmax, ymin, ymax = ax.axis() if url is not None and source is None: warnings.warn( 'The "url" option is deprecated. Please use the "source"' " argument instead.", FutureWarning, stacklevel=2, ) source = url elif url is not None and source is not None: warnings.warn( 'The "url" argument is deprecated. Please use the "source"' ' argument. Do not supply a "url" argument. It will be ignored.', FutureWarning, stacklevel=2, ) # If web source if (source is None or isinstance(source, (dict, TileProvider)) or (isinstance(source, str) and source[:4] == "http")): # Extent left, right, bottom, top = xmin, xmax, ymin, ymax # Convert extent from `crs` into WM for tile query if crs is not None: left, right, bottom, top = _reproj_bb(left, right, bottom, top, crs, {"init": "epsg:3857"}) # Download image image, extent = bounds2img(left, bottom, right, top, zoom=zoom, source=source, ll=False) # Warping if crs is not None: image, extent = warp_tiles(image, extent, t_crs=crs, resampling=resampling) # Check if overlay if _is_overlay(source) and 'zorder' not in extra_imshow_args: # If zorder was not set then make it 9 otherwise leave it extra_imshow_args['zorder'] = 9 # If local source else: import rasterio as rio # Read file with rio.open(source) as raster: if reset_extent: from rasterio.mask import mask as riomask # Read window if crs: left, bottom, right, top = rio.warp.transform_bounds( crs, raster.crs, xmin, ymin, xmax, ymax) else: left, bottom, right, top = xmin, ymin, xmax, ymax window = [{ "type": "Polygon", "coordinates": (( (left, bottom), (right, bottom), (right, top), (left, top), (left, bottom), ), ), }] image, img_transform = riomask(raster, window, crop=True) extent = left, right, bottom, top else: # Read full image = np.array([band for band in raster.read()]) img_transform = raster.transform bb = raster.bounds extent = bb.left, bb.right, bb.bottom, bb.top # Warp if (crs is not None) and (raster.crs != crs): image, bounds, _ = _warper(image, img_transform, raster.crs, crs, resampling) extent = bounds.left, bounds.right, bounds.bottom, bounds.top image = image.transpose(1, 2, 0) # Plotting if image.shape[2] == 1: image = image[:, :, 0] img = ax.imshow(image, extent=extent, interpolation=interpolation, **extra_imshow_args) if reset_extent: ax.axis((xmin, xmax, ymin, ymax)) else: max_bounds = ( min(xmin, extent[0]), max(xmax, extent[1]), min(ymin, extent[2]), max(ymax, extent[3]), ) ax.axis(max_bounds) # Add attribution text if source is None: source = providers.Stamen.Terrain if isinstance(source, (dict, TileProvider)) and attribution is None: attribution = source.get("attribution") if attribution: add_attribution(ax, attribution, font_size=attribution_size) return
def simple_glacier_masks(gdir): """Compute glacier masks based on much simpler rules than OGGM's default. This is therefore more robust: we use this function to compute glacier hypsometries. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` where to write the data """ if not os.path.exists(gdir.get_filepath('gridded_data')): # In a possible future, we might actually want to raise a # deprecation warning here process_dem(gdir) # Geometries geometry = gdir.read_shapefile('outlines').geometry[0] # rio metadata with rasterio.open(gdir.get_filepath('dem'), 'r', driver='GTiff') as ds: data = ds.read(1).astype(rasterio.float32) profile = ds.profile # simple trick to correct invalid polys: # http://stackoverflow.com/questions/20833344/ # fix-invalid-polygon-python-shapely geometry = geometry.buffer(0) if not geometry.is_valid: raise InvalidDEMError('This glacier geometry is not valid.') # Compute the glacier mask using rasterio # Small detour as mask only accepts DataReader objects with rasterio.io.MemoryFile() as memfile: with memfile.open(**profile) as dataset: dataset.write(data.astype(np.int16)[np.newaxis, ...]) dem_data = rasterio.open(memfile.name) masked_dem, _ = riomask(dem_data, [shpg.mapping(geometry)], filled=False) glacier_mask = ~masked_dem[0, ...].mask # Same without nunataks with rasterio.io.MemoryFile() as memfile: with memfile.open(**profile) as dataset: dataset.write(data.astype(np.int16)[np.newaxis, ...]) dem_data = rasterio.open(memfile.name) poly = shpg.mapping(shpg.Polygon(geometry.exterior)) masked_dem, _ = riomask(dem_data, [poly], filled=False) glacier_mask_nonuna = ~masked_dem[0, ...].mask # Glacier exterior excluding nunataks erode = binary_erosion(glacier_mask_nonuna) glacier_ext = glacier_mask_nonuna ^ erode glacier_ext = np.where(glacier_mask_nonuna, glacier_ext, 0) dem = read_geotiff_dem(gdir) # Last sanity check based on the masked dem tmp_max = np.max(dem[glacier_mask]) tmp_min = np.min(dem[glacier_mask]) if tmp_max < (tmp_min + 1): raise InvalidDEMError('({}) min equal max in the masked DEM.' .format(gdir.rgi_id)) # hypsometry bsize = 50. dem_on_ice = dem[glacier_mask] bins = np.arange(nicenumber(dem_on_ice.min(), bsize, lower=True), nicenumber(dem_on_ice.max(), bsize) + 0.01, bsize) h, _ = np.histogram(dem_on_ice, bins) h = h / np.sum(h) * 1000 # in permil # We want to convert the bins to ints but preserve their sum to 1000 # Start with everything rounded down, then round up the numbers with the # highest fractional parts until the desired sum is reached. hi = np.floor(h).astype(np.int) hup = np.ceil(h).astype(np.int) aso = np.argsort(hup - h) for i in aso: hi[i] = hup[i] if np.sum(hi) == 1000: break # slope sy, sx = np.gradient(dem, gdir.grid.dx) aspect = np.arctan2(np.mean(-sx[glacier_mask]), np.mean(sy[glacier_mask])) aspect = np.rad2deg(aspect) if aspect < 0: aspect += 360 slope = np.arctan(np.sqrt(sx ** 2 + sy ** 2)) avg_slope = np.rad2deg(np.mean(slope[glacier_mask])) # write df = pd.DataFrame() df['RGIId'] = [gdir.rgi_id] df['GLIMSId'] = [gdir.glims_id] df['Zmin'] = [dem_on_ice.min()] df['Zmax'] = [dem_on_ice.max()] df['Zmed'] = [np.median(dem_on_ice)] df['Area'] = [gdir.rgi_area_km2] df['Slope'] = [avg_slope] df['Aspect'] = [aspect] for b, bs in zip(hi, (bins[1:] + bins[:-1])/2): df['{}'.format(np.round(bs).astype(int))] = [b] df.to_csv(gdir.get_filepath('hypsometry'), index=False) # write out the grids in the netcdf file with GriddedNcdfFile(gdir) as nc: if 'glacier_mask' not in nc.variables: v = nc.createVariable('glacier_mask', 'i1', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier mask' else: v = nc.variables['glacier_mask'] v[:] = glacier_mask if 'glacier_ext' not in nc.variables: v = nc.createVariable('glacier_ext', 'i1', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier external boundaries' else: v = nc.variables['glacier_ext'] v[:] = glacier_ext # Log DEM that needed processing within the glacier mask valid_mask = nc.variables['topo_valid_mask'][:] if gdir.get_diagnostics().get('dem_needed_interpolation', False): pnan = (valid_mask == 0) & glacier_mask gdir.add_to_diagnostics('dem_invalid_perc_in_mask', np.sum(pnan) / np.sum(glacier_mask)) # add some meta stats and close nc.max_h_dem = np.max(dem) nc.min_h_dem = np.min(dem) dem_on_g = dem[np.where(glacier_mask)] nc.max_h_glacier = np.max(dem_on_g) nc.min_h_glacier = np.min(dem_on_g)