def _distribute_thickness_per_interp(glacier_mask, topo, cls, fls, grid, smooth=True, add_slope=True): """Where the job is actually done.""" # Thick to interpolate dx = grid.dx thick = np.where(glacier_mask, np.NaN, 0) # Along the lines vs = [] for cl, fl in zip(cls, fls): # TODO: here one should see if parabola is always the best choice vs.extend(cl['volume']) x, y = utils.tuple2int(fl.line.xy) thick[y, x] = cl['thick'] vol = np.sum(vs) # Interpolate xx, yy = grid.ij_coordinates pnan = np.nonzero(~np.isfinite(thick)) pok = np.nonzero(np.isfinite(thick)) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T thick[pnan] = griddata(points, np.ravel(thick[pok]), inter, method='cubic') # Smooth if smooth: gsize = np.rint(cfg.PARAMS['smooth_window'] / dx) thick = gaussian_blur(thick, np.int(gsize)) thick = np.where(glacier_mask, thick, 0.) # Slope slope = 1. if add_slope: sy, sx = np.gradient(topo, dx, dx) slope = np.arctan(np.sqrt(sy**2 + sx**2)) slope = np.clip(slope, np.deg2rad(6.), np.pi / 2.) slope = 1 / slope**(cfg.N / (cfg.N + 2)) # Conserve volume tmp_vol = np.nansum(thick * slope * dx**2) final_t = thick * slope * vol / tmp_vol # Add to grids final_t = np.where(np.isfinite(final_t), final_t, 0.) assert np.allclose(np.sum(final_t * dx**2), vol) return final_t
def _distribute_thickness_per_interp(glacier_mask, topo, cls, fls, grid, smooth=True, add_slope=True): """Where the job is actually done.""" # Thick to interpolate dx = grid.dx thick = np.where(glacier_mask, np.NaN, 0) # Along the lines vs = [] for cl, fl in zip(cls, fls): # TODO: here one should see if parabola is always the best choice vs.extend(cl['volume']) x, y = utils.tuple2int(fl.line.xy) thick[y, x] = cl['thick'] vol = np.sum(vs) # Interpolate xx, yy = grid.ij_coordinates pnan = np.nonzero(~ np.isfinite(thick)) pok = np.nonzero(np.isfinite(thick)) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T thick[pnan] = griddata(points, np.ravel(thick[pok]), inter, method='cubic') # Smooth if smooth: gsize = np.rint(cfg.PARAMS['smooth_window'] / dx) thick = gaussian_blur(thick, np.int(gsize)) thick = np.where(glacier_mask, thick, 0.) # Slope slope = 1. if add_slope: sy, sx = np.gradient(topo, dx, dx) slope = np.arctan(np.sqrt(sy**2 + sx**2)) slope = np.clip(slope, np.deg2rad(6.), np.pi/2.) slope = 1 / slope**(cfg.N / (cfg.N+2)) # Conserve volume tmp_vol = np.nansum(thick * slope * dx**2) final_t = thick * slope * vol / tmp_vol # Add to grids final_t = np.where(np.isfinite(final_t), final_t, 0.) assert np.allclose(np.sum(final_t * dx**2), vol) return final_t
def catchment_area(gdir, div_id=None): """Compute the catchment areas of each tributary line. The idea is to compute the route of lowest cost for any point on the glacier to rejoin a centerline. These routes are then put together if they belong to the same centerline, thus creating "catchment areas" for each centerline. Parameters ---------- gdir : oggm.GlacierDirectory """ # Variables cls = gdir.read_pickle('centerlines', div_id=div_id) geoms = gdir.read_pickle('geometries', div_id=div_id) glacier_pix = geoms['polygon_pix'] fpath = gdir.get_filepath('gridded_data', div_id=div_id) with netCDF4.Dataset(fpath) as nc: costgrid = nc.variables['cost_grid'][:] mask = nc.variables['glacier_mask'][:] # If we have only one centerline this is going to be easy: take the # mask and return if len(cls) == 1: cl_catchments = [np.array(np.nonzero(mask == 1)).T] gdir.write_pickle(cl_catchments, 'catchment_indices', div_id=div_id) return # Initialise costgrid and the "catching" dict cost_factor = 0. # Make it cheap dic_catch = dict() for i, cl in enumerate(cls): x, y = tuple2int(cl.line.xy) costgrid[y, x] = cost_factor for x, y in [(int(x), int(y)) for x, y in cl.line.coords]: assert (y, x) not in dic_catch dic_catch[(y, x)] = set([(y, x)]) # It is much faster to make the array as small as possible (especially # with divides). We have to trick: pm = np.nonzero(mask == 1) ymi, yma = np.min(pm[0])-1, np.max(pm[0])+2 xmi, xma = np.min(pm[1])-1, np.max(pm[1])+2 costgrid = costgrid[ymi:yma, xmi:xma] mask = mask[ymi:yma, xmi:xma] # Where did we compute the path already? computed = np.where(mask == 1, 0, np.nan) # Coords of Terminus (converted) endcoords = np.array(cls[0].tail.coords[0])[::-1].astype(np.int64) endcoords -= [ymi, xmi] # Start with all the paths at the boundaries, they are more likely # to cover much of the glacier for headx, heady in tuple2int(glacier_pix.exterior.coords): headcoords = np.array([heady-ymi, headx-xmi]) # convert indices, _ = route_through_array(costgrid, headcoords, endcoords) inds = np.array(indices).T computed[inds[0], inds[1]] = 1 set_dump = set([]) for y, x in indices: y, x = y+ymi, x+xmi # back to original set_dump.add((y, x)) if (y, x) in dic_catch: dic_catch[(y, x)] = dic_catch[(y, x)].union(set_dump) break # Repeat for the not yet computed pixels while True: not_computed = np.where(computed == 0) if len(not_computed[0]) == 0: # All points computed !! break headcoords = np.array([not_computed[0][0], not_computed[1][0]]).astype(np.int64) indices, _ = route_through_array(costgrid, headcoords, endcoords) inds = np.array(indices).T computed[inds[0], inds[1]] = 1 set_dump = set([]) for y, x in indices: y, x = y+ymi, x+xmi # back to original set_dump.add((y, x)) if (y, x) in dic_catch: dic_catch[(y, x)] = dic_catch[(y, x)].union(set_dump) break # For each centerline, make a set of all points flowing into it cl_catchments = [] for cl in cls: # Union of all cl_catch = set() for x, y in [(int(x), int(y)) for x, y in cl.line.coords]: cl_catch = cl_catch.union(dic_catch[(y, x)]) cl_catchments.append(cl_catch) # The higher order centerlines will get the points from the upstream # ones too. The idea is to store the points which are unique to each # centerline: now, in decreasing line order we remove the indices from # the tributaries cl_catchments = cl_catchments[::-1] for i, cl in enumerate(cl_catchments): cl_catchments[i] = np.array(list(cl.difference(*cl_catchments[i+1:]))) cl_catchments = cl_catchments[::-1] # put it back in order # Write the data gdir.write_pickle(cl_catchments, 'catchment_indices', div_id=div_id)
def merged_glacier_masks(gdir, geometry): """Makes a gridded mask of a merged glacier outlines. This is a simplified version of glacier_masks. We don't need fancy corrections or smoothing here: The flowlines for the actual model run are based on a proper call of glacier_masks. This task is only to get outlines etc. for visualization! Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` where to write the data geometry: shapely.geometry.multipolygon.MultiPolygon united outlines of the merged glaciers """ # open srtm tif-file: dem_dr = rasterio.open(gdir.get_filepath('dem'), 'r', driver='GTiff') dem = dem_dr.read(1).astype(rasterio.float32) # Grid nx = dem_dr.width ny = dem_dr.height assert nx == gdir.grid.nx assert ny == gdir.grid.ny if np.min(dem) == np.max(dem): raise RuntimeError('({}) min equal max in the DEM.' .format(gdir.rgi_id)) # Projection if LooseVersion(rasterio.__version__) >= LooseVersion('1.0'): transf = dem_dr.transform else: transf = dem_dr.affine x0 = transf[2] # UL corner y0 = transf[5] # UL corner dx = transf[0] dy = transf[4] # Negative if not (np.allclose(dx, -dy) or np.allclose(dx, gdir.grid.dx) or np.allclose(y0, gdir.grid.corner_grid.y0, atol=1e-2) or np.allclose(x0, gdir.grid.corner_grid.x0, atol=1e-2)): raise RuntimeError('DEM file and Salem Grid do not match!') dem_dr.close() # Clip topography to 0 m a.s.l. dem = dem.clip(0) # Interpolate shape to a regular path glacier_poly_hr = list(geometry) for nr, poly in enumerate(glacier_poly_hr): # transform geometry to map _geometry = salem.transform_geometry(poly, to_crs=gdir.grid.proj) glacier_poly_hr[nr] = _interp_polygon(_geometry, gdir.grid.dx) glacier_poly_hr = shpg.MultiPolygon(glacier_poly_hr) # Transform geometry into grid coordinates # It has to be in pix center coordinates because of how skimage works def proj(x, y): grid = gdir.grid.center_grid return grid.transform(x, y, crs=grid.proj) glacier_poly_hr = shapely.ops.transform(proj, glacier_poly_hr) # simple trick to correct invalid polys: # http://stackoverflow.com/questions/20833344/ # fix-invalid-polygon-python-shapely glacier_poly_hr = glacier_poly_hr.buffer(0) if not glacier_poly_hr.is_valid: raise RuntimeError('This glacier geometry is not valid.') # Rounded geometry to nearest nearest pix # I can not use _polyg # glacier_poly_pix = _polygon_to_pix(glacier_poly_hr) def project(x, y): return np.rint(x).astype(np.int64), np.rint(y).astype(np.int64) glacier_poly_pix = shapely.ops.transform(project, glacier_poly_hr) # Compute the glacier mask (currently: center pixels + touched) nx, ny = gdir.grid.nx, gdir.grid.ny glacier_mask = np.zeros((ny, nx), dtype=np.uint8) glacier_ext = np.zeros((ny, nx), dtype=np.uint8) for poly in glacier_poly_pix: (x, y) = poly.exterior.xy glacier_mask[skdraw.polygon(np.array(y), np.array(x))] = 1 for gint in poly.interiors: x, y = tuple2int(gint.xy) glacier_mask[skdraw.polygon(y, x)] = 0 glacier_mask[y, x] = 0 # on the nunataks, no x, y = tuple2int(poly.exterior.xy) glacier_mask[y, x] = 1 glacier_ext[y, x] = 1 # Last sanity check based on the masked dem tmp_max = np.max(dem[np.where(glacier_mask == 1)]) tmp_min = np.min(dem[np.where(glacier_mask == 1)]) if tmp_max < (tmp_min + 1): raise RuntimeError('({}) min equal max in the masked DEM.' .format(gdir.rgi_id)) # write out the grids in the netcdf file nc = gdir.create_gridded_ncdf_file('gridded_data') v = nc.createVariable('topo', 'f4', ('y', 'x', ), zlib=True) v.units = 'm' v.long_name = 'DEM topography' v[:] = dem v = nc.createVariable('glacier_mask', 'i1', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier mask' v[:] = glacier_mask v = nc.createVariable('glacier_ext', 'i1', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier external boundaries' v[:] = glacier_ext # add some meta stats and close nc.max_h_dem = np.max(dem) nc.min_h_dem = np.min(dem) dem_on_g = dem[np.where(glacier_mask)] nc.max_h_glacier = np.max(dem_on_g) nc.min_h_glacier = np.min(dem_on_g) nc.close() geometries = dict() geometries['polygon_hr'] = glacier_poly_hr geometries['polygon_pix'] = glacier_poly_pix geometries['polygon_area'] = geometry.area gdir.write_pickle(geometries, 'geometries')
def glacier_masks(gdir): """Makes a gridded mask of the glacier outlines and topography. This function fills holes in the source DEM and produces smoothed gridded topography and glacier outline arrays. These are the ones which will later be used to determine bed and surface height. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` where to write the data """ # open srtm tif-file: dem_dr = rasterio.open(gdir.get_filepath('dem'), 'r', driver='GTiff') dem = dem_dr.read(1).astype(rasterio.float32) # Grid nx = dem_dr.width ny = dem_dr.height assert nx == gdir.grid.nx assert ny == gdir.grid.ny # Correct the DEM # Currently we just do a linear interp -- filling is totally shit anyway min_z = -999. dem[dem <= min_z] = np.NaN isfinite = np.isfinite(dem) if np.all(~isfinite): raise InvalidDEMError('Not a single valid grid point in DEM') if np.any(~isfinite): xx, yy = gdir.grid.ij_coordinates pnan = np.nonzero(~isfinite) pok = np.nonzero(isfinite) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T try: dem[pnan] = griddata(points, np.ravel(dem[pok]), inter, method='linear') except ValueError: raise InvalidDEMError('DEM interpolation not possible.') log.warning(gdir.rgi_id + ': DEM needed interpolation.') gdir.add_to_diagnostics('dem_needed_interpolation', True) gdir.add_to_diagnostics('dem_invalid_perc', len(pnan[0]) / (nx*ny)) isfinite = np.isfinite(dem) if np.any(~isfinite): # this happens when extrapolation is needed # see how many percent of the dem if np.sum(~isfinite) > (0.5 * nx * ny): log.warning('({}) many NaNs in DEM'.format(gdir.rgi_id)) xx, yy = gdir.grid.ij_coordinates pnan = np.nonzero(~isfinite) pok = np.nonzero(isfinite) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T try: dem[pnan] = griddata(points, np.ravel(dem[pok]), inter, method='nearest') except ValueError: raise InvalidDEMError('DEM extrapolation not possible.') log.warning(gdir.rgi_id + ': DEM needed extrapolation.') gdir.add_to_diagnostics('dem_needed_extrapolation', True) gdir.add_to_diagnostics('dem_extrapol_perc', len(pnan[0]) / (nx*ny)) if np.min(dem) == np.max(dem): raise InvalidDEMError('({}) min equal max in the DEM.' .format(gdir.rgi_id)) # Projection if LooseVersion(rasterio.__version__) >= LooseVersion('1.0'): transf = dem_dr.transform else: transf = dem_dr.affine x0 = transf[2] # UL corner y0 = transf[5] # UL corner dx = transf[0] dy = transf[4] # Negative if not (np.allclose(dx, -dy) or np.allclose(dx, gdir.grid.dx) or np.allclose(y0, gdir.grid.corner_grid.y0, atol=1e-2) or np.allclose(x0, gdir.grid.corner_grid.x0, atol=1e-2)): raise InvalidDEMError('DEM file and Salem Grid do not match!') dem_dr.close() # Clip topography to 0 m a.s.l. dem = dem.clip(0) # Smooth DEM? if cfg.PARAMS['smooth_window'] > 0.: gsize = np.rint(cfg.PARAMS['smooth_window'] / dx) smoothed_dem = gaussian_blur(dem, np.int(gsize)) else: smoothed_dem = dem.copy() if not np.all(np.isfinite(smoothed_dem)): raise InvalidDEMError('({}) NaN in smoothed DEM'.format(gdir.rgi_id)) # Geometries geometry = gdir.read_shapefile('outlines').geometry[0] # Interpolate shape to a regular path glacier_poly_hr = _interp_polygon(geometry, gdir.grid.dx) # Transform geometry into grid coordinates # It has to be in pix center coordinates because of how skimage works def proj(x, y): grid = gdir.grid.center_grid return grid.transform(x, y, crs=grid.proj) glacier_poly_hr = shapely.ops.transform(proj, glacier_poly_hr) # simple trick to correct invalid polys: # http://stackoverflow.com/questions/20833344/ # fix-invalid-polygon-python-shapely glacier_poly_hr = glacier_poly_hr.buffer(0) if not glacier_poly_hr.is_valid: raise InvalidGeometryError('This glacier geometry is not valid.') # Rounded nearest pix glacier_poly_pix = _polygon_to_pix(glacier_poly_hr) if glacier_poly_pix.exterior is None: raise InvalidGeometryError('Problem in converting glacier geometry ' 'to grid resolution.') # Compute the glacier mask (currently: center pixels + touched) nx, ny = gdir.grid.nx, gdir.grid.ny glacier_mask = np.zeros((ny, nx), dtype=np.uint8) glacier_ext = np.zeros((ny, nx), dtype=np.uint8) (x, y) = glacier_poly_pix.exterior.xy glacier_mask[skdraw.polygon(np.array(y), np.array(x))] = 1 for gint in glacier_poly_pix.interiors: x, y = tuple2int(gint.xy) glacier_mask[skdraw.polygon(y, x)] = 0 glacier_mask[y, x] = 0 # on the nunataks, no x, y = tuple2int(glacier_poly_pix.exterior.xy) glacier_mask[y, x] = 1 glacier_ext[y, x] = 1 # Because of the 0 values at nunataks boundaries, some "Ice Islands" # can happen within nunataks (e.g.: RGI40-11.00062) # See if we can filter them out easily regions, nregions = label(glacier_mask, structure=label_struct) if nregions > 1: log.debug('(%s) we had to cut an island in the mask', gdir.rgi_id) # Check the size of those region_sizes = [np.sum(regions == r) for r in np.arange(1, nregions+1)] am = np.argmax(region_sizes) # Check not a strange glacier sr = region_sizes.pop(am) for ss in region_sizes: assert (ss / sr) < 0.1 glacier_mask[:] = 0 glacier_mask[np.where(regions == (am+1))] = 1 # Last sanity check based on the masked dem tmp_max = np.max(dem[np.where(glacier_mask == 1)]) tmp_min = np.min(dem[np.where(glacier_mask == 1)]) if tmp_max < (tmp_min + 1): raise InvalidDEMError('({}) min equal max in the masked DEM.' .format(gdir.rgi_id)) # write out the grids in the netcdf file nc = gdir.create_gridded_ncdf_file('gridded_data') v = nc.createVariable('topo', 'f4', ('y', 'x', ), zlib=True) v.units = 'm' v.long_name = 'DEM topography' v[:] = dem v = nc.createVariable('topo_smoothed', 'f4', ('y', 'x', ), zlib=True) v.units = 'm' v.long_name = ('DEM topography smoothed ' 'with radius: {:.1} m'.format(cfg.PARAMS['smooth_window'])) v[:] = smoothed_dem v = nc.createVariable('glacier_mask', 'i1', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier mask' v[:] = glacier_mask v = nc.createVariable('glacier_ext', 'i1', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier external boundaries' v[:] = glacier_ext # add some meta stats and close nc.max_h_dem = np.max(dem) nc.min_h_dem = np.min(dem) dem_on_g = dem[np.where(glacier_mask)] nc.max_h_glacier = np.max(dem_on_g) nc.min_h_glacier = np.min(dem_on_g) nc.close() geometries = dict() geometries['polygon_hr'] = glacier_poly_hr geometries['polygon_pix'] = glacier_poly_pix geometries['polygon_area'] = geometry.area gdir.write_pickle(geometries, 'geometries')
def distribute_thickness_interp(gdir, add_slope=True, smooth_radius=None, varname_suffix=''): """Compute a thickness map by interpolating between centerlines and border. This is a rather cosmetic task, not relevant for OGGM but for ITMIX. Parameters ---------- gdir : oggm.GlacierDirectory the glacier directory to process add_slope : bool whether a corrective slope factor should be used or not smooth_radius : int pixel size of the gaussian smoothing. Default is to use cfg.PARAMS['smooth_window'] (i.e. a size in meters). Set to zero to suppress smoothing. varname_suffix : str add a suffix to the variable written in the file (for experiments) """ # Variables grids_file = gdir.get_filepath('gridded_data') # See if we have the masks, else compute them with utils.ncDataset(grids_file) as nc: has_masks = 'glacier_ext_erosion' in nc.variables if not has_masks: from oggm.core.gis import interpolation_masks interpolation_masks(gdir) with utils.ncDataset(grids_file) as nc: glacier_mask = nc.variables['glacier_mask'][:] glacier_ext = nc.variables['glacier_ext_erosion'][:] ice_divides = nc.variables['ice_divides'][:] if add_slope: slope_factor = nc.variables['slope_factor'][:] else: slope_factor = 1. # Thickness to interpolate thick = glacier_ext * np.NaN thick[(glacier_ext - ice_divides) == 1] = 0. # TODO: domain border too, for convenience for a start thick[0, :] = 0. thick[-1, :] = 0. thick[:, 0] = 0. thick[:, -1] = 0. # Along the lines cls = gdir.read_pickle('inversion_output') fls = gdir.read_pickle('inversion_flowlines') vs = [] for cl, fl in zip(cls, fls): vs.extend(cl['volume']) x, y = utils.tuple2int(fl.line.xy) thick[y, x] = cl['thick'] init_vol = np.sum(vs) # Interpolate xx, yy = gdir.grid.ij_coordinates pnan = np.nonzero(~np.isfinite(thick)) pok = np.nonzero(np.isfinite(thick)) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T thick[pnan] = griddata(points, np.ravel(thick[pok]), inter, method='cubic') thick = thick.clip(0) # Slope thick *= slope_factor # Smooth dx = gdir.grid.dx if smooth_radius != 0: if smooth_radius is None: smooth_radius = np.rint(cfg.PARAMS['smooth_window'] / dx) thick = gaussian_blur(thick, np.int(smooth_radius)) thick = np.where(glacier_mask, thick, 0.) # Re-mask thick[glacier_mask == 0] = np.NaN assert np.all(np.isfinite(thick[glacier_mask == 1])) # Conserve volume tmp_vol = np.nansum(thick * dx**2) thick *= init_vol / tmp_vol # write grids_file = gdir.get_filepath('gridded_data') with utils.ncDataset(grids_file, 'a') as nc: vn = 'distributed_thickness' + varname_suffix if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ( 'y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Distributed ice thickness' v[:] = thick return thick
def test_baltoro_centerlines(self): cfg.PARAMS['border'] = 2 cfg.PATHS['dem_file'] = get_demo_file('baltoro_srtm_clip.tif') b_file = get_demo_file('baltoro_wgs84.shp') rgidf = gpd.GeoDataFrame.from_file(b_file) kienholz_file = get_demo_file('centerlines_baltoro_wgs84.shp') kdf = gpd.read_file(kienholz_file) # loop because for some reason indexing wont work for index, entity in rgidf.iterrows(): # add fake attribs entity.O1REGION = 0 entity.BGNDATE = 0 entity.NAME = 'Baltoro' gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) my_mask = np.zeros((gdir.grid.ny, gdir.grid.nx), dtype=np.uint8) cls = gdir.read_pickle('centerlines', div_id=1) for cl in cls: x, y = tuple2int(cl.line.xy) my_mask[y, x] = 1 # Transform kien_mask = np.zeros((gdir.grid.ny, gdir.grid.nx), dtype=np.uint8) from shapely.ops import transform for index, entity in kdf.iterrows(): def proj(lon, lat): return salem.transform_proj(salem.wgs84, gdir.grid.proj, lon, lat) kgm = transform(proj, entity.geometry) # Interpolate shape to a regular path e_line = [] for distance in np.arange(0.0, kgm.length, gdir.grid.dx): e_line.append(*kgm.interpolate(distance).coords) kgm = shpg.LineString(e_line) # Transform geometry into grid coordinates def proj(x, y): return gdir.grid.transform(x, y, crs=gdir.grid.proj) kgm = transform(proj, kgm) # Rounded nearest pix project = lambda x, y: (np.rint(x).astype(np.int64), np.rint(y). astype(np.int64)) kgm = transform(project, kgm) x, y = tuple2int(kgm.xy) kien_mask[y, x] = 1 # We test the Heidke Skill score of our predictions rest = kien_mask + 2 * my_mask # gr.plot_array(rest) na = len(np.where(rest == 3)[0]) nb = len(np.where(rest == 2)[0]) nc = len(np.where(rest == 1)[0]) nd = len(np.where(rest == 0)[0]) denom = np.float64((na + nc) * (nd + nc) + (na + nb) * (nd + nb)) hss = np.float64(2.) * ((na * nd) - (nb * nc)) / denom self.assertTrue(hss > 0.53)
def glacier_masks(gdir): """Makes a gridded mask of the glacier outlines. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` where to write the data """ # open srtm tif-file: dem_dr = rasterio.open(gdir.get_filepath('dem'), 'r', driver='GTiff') dem = dem_dr.read(1).astype(rasterio.float32) # Grid nx = dem_dr.width ny = dem_dr.height assert nx == gdir.grid.nx assert ny == gdir.grid.ny # Correct the DEM (ASTER...) # Currently we just do a linear interp -- ASTER is totally shit anyway min_z = -999. isfinite = np.isfinite(dem) if (np.min(dem) <= min_z) or np.any(~isfinite): xx, yy = gdir.grid.ij_coordinates pnan = np.nonzero((dem <= min_z) | (~isfinite)) pok = np.nonzero((dem > min_z) | isfinite) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T dem[pnan] = griddata(points, np.ravel(dem[pok]), inter) log.warning(gdir.rgi_id + ': DEM needed interpolation.') isfinite = np.isfinite(dem) if not np.all(isfinite): # see how many percent of the dem if np.sum(~isfinite) > (0.2 * nx * ny): raise RuntimeError('({}) too many NaNs in DEM'.format(gdir.rgi_id)) log.warning('({}) DEM needed zeros somewhere.'.format(gdir.rgi_id)) dem[isfinite] = 0 if np.min(dem) == np.max(dem): raise RuntimeError('({}) min equal max in the DEM.' .format(gdir.rgi_id)) # Proj if LooseVersion(rasterio.__version__) >= LooseVersion('1.0'): transf = dem_dr.transform else: transf = dem_dr.affine x0 = transf[2] # UL corner y0 = transf[5] # UL corner dx = transf[0] dy = transf[4] # Negative assert dx == -dy assert dx == gdir.grid.dx assert y0 == gdir.grid.corner_grid.y0 assert x0 == gdir.grid.corner_grid.x0 dem_dr.close() # Clip topography to 0 m a.s.l. dem = dem.clip(0) # Smooth DEM? if cfg.PARAMS['smooth_window'] > 0.: gsize = np.rint(cfg.PARAMS['smooth_window'] / dx) smoothed_dem = gaussian_blur(dem, np.int(gsize)) else: smoothed_dem = dem.copy() if not np.all(np.isfinite(smoothed_dem)): raise RuntimeError('({}) NaN in smoothed DEM'.format(gdir.rgi_id)) # Geometries outlines_file = gdir.get_filepath('outlines') geometry = gpd.GeoDataFrame.from_file(outlines_file).geometry[0] # Interpolate shape to a regular path glacier_poly_hr = _interp_polygon(geometry, gdir.grid.dx) # Transform geometry into grid coordinates # It has to be in pix center coordinates because of how skimage works def proj(x, y): grid = gdir.grid.center_grid return grid.transform(x, y, crs=grid.proj) glacier_poly_hr = shapely.ops.transform(proj, glacier_poly_hr) # simple trick to correct invalid polys: # http://stackoverflow.com/questions/20833344/ # fix-invalid-polygon-python-shapely glacier_poly_hr = glacier_poly_hr.buffer(0) if not glacier_poly_hr.is_valid: raise RuntimeError('This glacier geometry is crazy.') # Rounded nearest pix glacier_poly_pix = _polygon_to_pix(glacier_poly_hr) # Compute the glacier mask (currently: center pixels + touched) nx, ny = gdir.grid.nx, gdir.grid.ny glacier_mask = np.zeros((ny, nx), dtype=np.uint8) glacier_ext = np.zeros((ny, nx), dtype=np.uint8) (x, y) = glacier_poly_pix.exterior.xy glacier_mask[skdraw.polygon(np.array(y), np.array(x))] = 1 for gint in glacier_poly_pix.interiors: x, y = tuple2int(gint.xy) glacier_mask[skdraw.polygon(y, x)] = 0 glacier_mask[y, x] = 0 # on the nunataks, no x, y = tuple2int(glacier_poly_pix.exterior.xy) glacier_mask[y, x] = 1 glacier_ext[y, x] = 1 # Because of the 0 values at nunataks boundaries, some "Ice Islands" # can happen within nunataks (e.g.: RGI40-11.00062) # See if we can filter them out easily regions, nregions = label(glacier_mask, structure=label_struct) if nregions > 1: log.debug('(%s) we had to cut an island in the mask', gdir.rgi_id) # Check the size of those region_sizes = [np.sum(regions == r) for r in np.arange(1, nregions+1)] am = np.argmax(region_sizes) # Check not a strange glacier sr = region_sizes.pop(am) for ss in region_sizes: assert (ss / sr) < 0.1 glacier_mask[:] = 0 glacier_mask[np.where(regions == (am+1))] = 1 # Last sanity check based on the masked dem tmp_max = np.max(dem[np.where(glacier_mask == 1)]) tmp_min = np.min(dem[np.where(glacier_mask == 1)]) if tmp_max < (tmp_min + 1): raise RuntimeError('({}) min equal max in the masked DEM.' .format(gdir.rgi_id)) # write out the grids in the netcdf file nc = gdir.create_gridded_ncdf_file('gridded_data') v = nc.createVariable('topo', 'f4', ('y', 'x', ), zlib=True) v.units = 'm' v.long_name = 'DEM topography' v[:] = dem v = nc.createVariable('topo_smoothed', 'f4', ('y', 'x', ), zlib=True) v.units = 'm' v.long_name = ('DEM topography smoothed' ' with radius: {:.1} m'.format(cfg.PARAMS['smooth_window'])) v[:] = smoothed_dem v = nc.createVariable('glacier_mask', 'i1', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier mask' v[:] = glacier_mask v = nc.createVariable('glacier_ext', 'i1', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier external boundaries' v[:] = glacier_ext # add some meta stats and close nc.max_h_dem = np.max(dem) nc.min_h_dem = np.min(dem) dem_on_g = dem[np.where(glacier_mask)] nc.max_h_glacier = np.max(dem_on_g) nc.min_h_glacier = np.min(dem_on_g) nc.close() geometries = dict() geometries['polygon_hr'] = glacier_poly_hr geometries['polygon_pix'] = glacier_poly_pix geometries['polygon_area'] = geometry.area gdir.write_pickle(geometries, 'geometries')
def merged_glacier_masks(gdir, geometry): """Makes a gridded mask of a merged glacier outlines. This is a simplified version of glacier_masks. We don't need fancy corrections or smoothing here: The flowlines for the actual model run are based on a proper call of glacier_masks. This task is only to get outlines etc. for visualization! Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` where to write the data geometry: shapely.geometry.multipolygon.MultiPolygon united outlines of the merged glaciers """ # open srtm tif-file: dem_dr = rasterio.open(gdir.get_filepath('dem'), 'r', driver='GTiff') dem = dem_dr.read(1).astype(rasterio.float32) # Grid nx = dem_dr.width ny = dem_dr.height assert nx == gdir.grid.nx assert ny == gdir.grid.ny if np.min(dem) == np.max(dem): raise RuntimeError('({}) min equal max in the DEM.'.format( gdir.rgi_id)) # Projection if LooseVersion(rasterio.__version__) >= LooseVersion('1.0'): transf = dem_dr.transform else: transf = dem_dr.affine x0 = transf[2] # UL corner y0 = transf[5] # UL corner dx = transf[0] dy = transf[4] # Negative if not (np.allclose(dx, -dy) or np.allclose(dx, gdir.grid.dx) or np.allclose(y0, gdir.grid.corner_grid.y0, atol=1e-2) or np.allclose(x0, gdir.grid.corner_grid.x0, atol=1e-2)): raise RuntimeError('DEM file and Salem Grid do not match!') dem_dr.close() # Clip topography to 0 m a.s.l. dem = dem.clip(0) # Interpolate shape to a regular path glacier_poly_hr = list(geometry) for nr, poly in enumerate(glacier_poly_hr): # transform geometry to map _geometry = salem.transform_geometry(poly, to_crs=gdir.grid.proj) glacier_poly_hr[nr] = _interp_polygon(_geometry, gdir.grid.dx) glacier_poly_hr = shpg.MultiPolygon(glacier_poly_hr) # Transform geometry into grid coordinates # It has to be in pix center coordinates because of how skimage works def proj(x, y): grid = gdir.grid.center_grid return grid.transform(x, y, crs=grid.proj) glacier_poly_hr = shapely.ops.transform(proj, glacier_poly_hr) # simple trick to correct invalid polys: # http://stackoverflow.com/questions/20833344/ # fix-invalid-polygon-python-shapely glacier_poly_hr = glacier_poly_hr.buffer(0) if not glacier_poly_hr.is_valid: raise RuntimeError('This glacier geometry is not valid.') # Rounded geometry to nearest nearest pix # I can not use _polyg # glacier_poly_pix = _polygon_to_pix(glacier_poly_hr) def project(x, y): return np.rint(x).astype(np.int64), np.rint(y).astype(np.int64) glacier_poly_pix = shapely.ops.transform(project, glacier_poly_hr) # Compute the glacier mask (currently: center pixels + touched) nx, ny = gdir.grid.nx, gdir.grid.ny glacier_mask = np.zeros((ny, nx), dtype=np.uint8) glacier_ext = np.zeros((ny, nx), dtype=np.uint8) for poly in glacier_poly_pix: (x, y) = poly.exterior.xy glacier_mask[skdraw.polygon(np.array(y), np.array(x))] = 1 for gint in poly.interiors: x, y = tuple2int(gint.xy) glacier_mask[skdraw.polygon(y, x)] = 0 glacier_mask[y, x] = 0 # on the nunataks, no x, y = tuple2int(poly.exterior.xy) glacier_mask[y, x] = 1 glacier_ext[y, x] = 1 # Last sanity check based on the masked dem tmp_max = np.max(dem[np.where(glacier_mask == 1)]) tmp_min = np.min(dem[np.where(glacier_mask == 1)]) if tmp_max < (tmp_min + 1): raise RuntimeError('({}) min equal max in the masked DEM.'.format( gdir.rgi_id)) # write out the grids in the netcdf file nc = gdir.create_gridded_ncdf_file('gridded_data') v = nc.createVariable('topo', 'f4', ( 'y', 'x', ), zlib=True) v.units = 'm' v.long_name = 'DEM topography' v[:] = dem v = nc.createVariable('glacier_mask', 'i1', ( 'y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier mask' v[:] = glacier_mask v = nc.createVariable('glacier_ext', 'i1', ( 'y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier external boundaries' v[:] = glacier_ext # add some meta stats and close nc.max_h_dem = np.max(dem) nc.min_h_dem = np.min(dem) dem_on_g = dem[np.where(glacier_mask)] nc.max_h_glacier = np.max(dem_on_g) nc.min_h_glacier = np.min(dem_on_g) nc.close() geometries = dict() geometries['polygon_hr'] = glacier_poly_hr geometries['polygon_pix'] = glacier_poly_pix geometries['polygon_area'] = geometry.area gdir.write_pickle(geometries, 'geometries')
def compute_centerlines(gdir, div_id=None): """Compute the centerlines following Kienholz et al., (2014). They are then sorted according to the modified Strahler number: http://en.wikipedia.org/wiki/Strahler_number Parameters ---------- gdir : oggm.GlacierDirectory """ # Params single_fl = not cfg.PARAMS['use_multiple_flowlines'] do_filter_slope = cfg.PARAMS['filter_min_slope'] if 'force_one_flowline' in cfg.PARAMS: if gdir.rgi_id in cfg.PARAMS['force_one_flowline']: single_fl = True # open geom = gdir.read_pickle('geometries', div_id=div_id) grids_file = gdir.get_filepath('gridded_data', div_id=div_id) with netCDF4.Dataset(grids_file) as nc: # Variables glacier_mask = nc.variables['glacier_mask'][:] glacier_ext = nc.variables['glacier_ext'][:] topo = nc.variables['topo_smoothed'][:] poly_pix = geom['polygon_pix'] # Find for local maximas on the outline x, y = tuple2int(poly_pix.exterior.xy) ext_yx = tuple(reversed(poly_pix.exterior.xy)) zoutline = topo[y[:-1], x[:-1]] # last point is first point # Size of the half window to use to look for local maximas maxorder = np.rint(cfg.PARAMS['localmax_window'] / gdir.grid.dx) maxorder = np.clip(maxorder, 5., np.rint((len(zoutline) / 5.))) heads_idx = scipy.signal.argrelmax(zoutline, mode='wrap', order=maxorder.astype(np.int64)) if single_fl or len(heads_idx[0]) <= 1: # small glaciers with one or less heads: take the absolute max heads_idx = (np.atleast_1d(np.argmax(zoutline)), ) # Remove the heads that are too low zglacier = topo[np.where(glacier_mask)] head_threshold = np.percentile(zglacier, (1. / 3.) * 100) heads_idx = heads_idx[0][np.where(zoutline[heads_idx] > head_threshold)] heads = np.asarray(ext_yx)[:, heads_idx] heads_z = zoutline[heads_idx] # careful, the coords are in y, x order! heads = [shpg.Point(x, y) for y, x in zip(heads[0, :], heads[1, :])] # get radius of the buffer according to Kienholz eq. (1) radius = cfg.PARAMS['q1'] * geom['polygon_area'] + cfg.PARAMS['q2'] radius = np.clip(radius, 0, cfg.PARAMS['rmax']) radius /= gdir.grid.dx # in raster coordinates # Plus our criteria, quite usefull to remove short lines: radius += cfg.PARAMS['flowline_junction_pix'] * cfg.PARAMS['flowline_dx'] log.debug('%s: radius in raster coordinates: %.2f', gdir.rgi_id, radius) # OK. Filter and see. log.debug('%s: number of heads before radius filter: %d', gdir.rgi_id, len(heads)) heads, heads_z = _filter_heads(heads, heads_z, radius, poly_pix) log.debug('%s: number of heads after radius filter: %d', gdir.rgi_id, len(heads)) # Cost array costgrid = _make_costgrid(glacier_mask, glacier_ext, topo) # Terminus t_coord = _get_terminus_coord(gdir, ext_yx, zoutline) # Compute the routes lines = [] for h in heads: h_coord = np.asarray(h.xy)[::-1].astype(np.int64) indices, _ = route_through_array(costgrid, h_coord, t_coord) lines.append(shpg.LineString(np.array(indices)[:, [1, 0]])) log.debug('%s: computed the routes', gdir.rgi_id) # Filter the shortest lines out dx_cls = cfg.PARAMS['flowline_dx'] radius = cfg.PARAMS['flowline_junction_pix'] * dx_cls radius += 6 * dx_cls olines, _ = _filter_lines(lines, heads, cfg.PARAMS['kbuffer'], radius) log.debug('%s: number of heads after lines filter: %d', gdir.rgi_id, len(olines)) # Filter the lines which are going up instead of down if do_filter_slope: olines = _filter_lines_slope(olines, topo, gdir) log.debug('%s: number of heads after slope filter: %d', gdir.rgi_id, len(olines)) # And rejoin the cutted tails olines = _join_lines(olines) # Adds the line level for cl in olines: cl.order = _line_order(cl) # And sort them per order !!! several downstream tasks rely on this cls = [] for i in np.argsort([cl.order for cl in olines]): cls.append(olines[i]) # Final check if len(cls) == 0: raise RuntimeError('{} : no centerline found!'.format(gdir.rgi_id)) # Write the data gdir.write_pickle(cls, 'centerlines', div_id=div_id) # Netcdf with netCDF4.Dataset(grids_file, 'a') as nc: if 'cost_grid' in nc.variables: # Overwrite nc.variables['cost_grid'][:] = costgrid else: # Create v = nc.createVariable('cost_grid', 'f4', ( 'y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Centerlines cost grid' v[:] = costgrid
def test_baltoro_centerlines(self): cfg.PARAMS["border"] = 2 cfg.PATHS["dem_file"] = get_demo_file("baltoro_srtm_clip.tif") b_file = get_demo_file("baltoro_wgs84.shp") entity = gpd.GeoDataFrame.from_file(b_file).iloc[0] kienholz_file = get_demo_file("centerlines_baltoro_wgs84.shp") kdf = gpd.read_file(kienholz_file) # add fake attribs entity.O1REGION = 0 entity.BGNDATE = 0 entity.NAME = "Baltoro" entity.GLACTYPE = "0000" gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) my_mask = np.zeros((gdir.grid.ny, gdir.grid.nx), dtype=np.uint8) cls = gdir.read_pickle("centerlines", div_id=1) for cl in cls: x, y = tuple2int(cl.line.xy) my_mask[y, x] = 1 # Transform kien_mask = np.zeros((gdir.grid.ny, gdir.grid.nx), dtype=np.uint8) from shapely.ops import transform for index, entity in kdf.iterrows(): def proj(lon, lat): return salem.transform_proj(salem.wgs84, gdir.grid.proj, lon, lat) kgm = transform(proj, entity.geometry) # Interpolate shape to a regular path e_line = [] for distance in np.arange(0.0, kgm.length, gdir.grid.dx): e_line.append(*kgm.interpolate(distance).coords) kgm = shpg.LineString(e_line) # Transform geometry into grid coordinates def proj(x, y): return gdir.grid.transform(x, y, crs=gdir.grid.proj) kgm = transform(proj, kgm) # Rounded nearest pix project = lambda x, y: (np.rint(x).astype(np.int64), np.rint(y).astype(np.int64)) kgm = transform(project, kgm) x, y = tuple2int(kgm.xy) kien_mask[y, x] = 1 # We test the Heidke Skill score of our predictions rest = kien_mask + 2 * my_mask # gr.plot_array(rest) na = len(np.where(rest == 3)[0]) nb = len(np.where(rest == 2)[0]) nc = len(np.where(rest == 1)[0]) nd = len(np.where(rest == 0)[0]) denom = np.float64((na + nc) * (nd + nc) + (na + nb) * (nd + nb)) hss = np.float64(2.0) * ((na * nd) - (nb * nc)) / denom self.assertTrue(hss > 0.53)
def distribute_thickness_interp(gdir, add_slope=True, smooth_radius=None, varname_suffix=''): """Compute a thickness map by interpolating between centerlines and border. This is a rather cosmetic task, not relevant for OGGM but for ITMIX. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` the glacier directory to process add_slope : bool whether a corrective slope factor should be used or not smooth_radius : int pixel size of the gaussian smoothing. Default is to use cfg.PARAMS['smooth_window'] (i.e. a size in meters). Set to zero to suppress smoothing. varname_suffix : str add a suffix to the variable written in the file (for experiments) """ # Variables grids_file = gdir.get_filepath('gridded_data') # See if we have the masks, else compute them with utils.ncDataset(grids_file) as nc: has_masks = 'glacier_ext_erosion' in nc.variables if not has_masks: from oggm.core.gis import interpolation_masks interpolation_masks(gdir) with utils.ncDataset(grids_file) as nc: glacier_mask = nc.variables['glacier_mask'][:] glacier_ext = nc.variables['glacier_ext_erosion'][:] ice_divides = nc.variables['ice_divides'][:] if add_slope: slope_factor = nc.variables['slope_factor'][:] else: slope_factor = 1. # Thickness to interpolate thick = glacier_ext * np.NaN thick[(glacier_ext-ice_divides) == 1] = 0. # TODO: domain border too, for convenience for a start thick[0, :] = 0. thick[-1, :] = 0. thick[:, 0] = 0. thick[:, -1] = 0. # Along the lines cls = gdir.read_pickle('inversion_output') fls = gdir.read_pickle('inversion_flowlines') vs = [] for cl, fl in zip(cls, fls): vs.extend(cl['volume']) x, y = utils.tuple2int(fl.line.xy) thick[y, x] = cl['thick'] init_vol = np.sum(vs) # Interpolate xx, yy = gdir.grid.ij_coordinates pnan = np.nonzero(~ np.isfinite(thick)) pok = np.nonzero(np.isfinite(thick)) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T thick[pnan] = griddata(points, np.ravel(thick[pok]), inter, method='cubic') thick = thick.clip(0) # Slope thick *= slope_factor # Smooth dx = gdir.grid.dx if smooth_radius != 0: if smooth_radius is None: smooth_radius = np.rint(cfg.PARAMS['smooth_window'] / dx) thick = gaussian_blur(thick, np.int(smooth_radius)) thick = np.where(glacier_mask, thick, 0.) # Re-mask thick[glacier_mask == 0] = np.NaN assert np.all(np.isfinite(thick[glacier_mask == 1])) # Conserve volume tmp_vol = np.nansum(thick * dx**2) thick *= init_vol / tmp_vol # write grids_file = gdir.get_filepath('gridded_data') with utils.ncDataset(grids_file, 'a') as nc: vn = 'distributed_thickness' + varname_suffix if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Distributed ice thickness' v[:] = thick return thick
def compute_centerlines(gdir, div_id=None): """Compute the centerlines following Kienholz et al., (2014). They are then sorted according to the modified Strahler number: http://en.wikipedia.org/wiki/Strahler_number Parameters ---------- gdir : oggm.GlacierDirectory """ # open geom = gdir.read_pickle('geometries', div_id=div_id) grids_file = gdir.get_filepath('gridded_data', div_id=div_id) with netCDF4.Dataset(grids_file) as nc: # Variables glacier_mask = nc.variables['glacier_mask'][:] glacier_ext = nc.variables['glacier_ext'][:] topo = nc.variables['topo_smoothed'][:] poly_pix = geom['polygon_pix'] # Find for local maximas on the outline x, y = tuple2int(poly_pix.exterior.xy) ext_yx = tuple(reversed(poly_pix.exterior.xy)) zoutline = topo[y[:-1], x[:-1]] # last point is first point # Size of the half window to use to look for local maximas maxorder = np.rint(cfg.PARAMS['localmax_window'] / gdir.grid.dx) maxorder = np.clip(maxorder, 5., np.rint((len(zoutline) / 5.))) heads_idx = scipy.signal.argrelmax(zoutline, mode='wrap', order=maxorder.astype(np.int64)) if (not cfg.PARAMS['use_multiple_flowlines']) or len(heads_idx[0]) <= 1: # small glaciers with one or less heads: take the absolute max heads_idx = (np.atleast_1d(np.argmax(zoutline)),) # Remove the heads that are too low zglacier = topo[np.where(glacier_mask)] head_threshold = np.percentile(zglacier, (1./3.)*100) heads_idx = heads_idx[0][np.where(zoutline[heads_idx] > head_threshold)] heads = np.asarray(ext_yx)[:, heads_idx] heads_z = zoutline[heads_idx] # careful, the coords are in y, x order! heads = [shpg.Point(x, y) for y, x in zip(heads[0, :], heads[1, :])] # get radius of the buffer according to Kienholz eq. (1) radius = cfg.PARAMS['q1'] * geom['polygon_area'] + cfg.PARAMS['q2'] radius = np.clip(radius, 0, cfg.PARAMS['rmax']) radius /= gdir.grid.dx # in raster coordinates # Plus our criteria, quite usefull to remove short lines: radius += cfg.PARAMS['flowline_junction_pix'] * cfg.PARAMS['flowline_dx'] log.debug('%s: radius in raster coordinates: %.2f', gdir.rgi_id, radius) # OK. Filter and see. log.debug('%s: number of heads before radius filter: %d', gdir.rgi_id, len(heads)) heads, heads_z = _filter_heads(heads, heads_z, radius, poly_pix) log.debug('%s: number of heads after radius filter: %d', gdir.rgi_id, len(heads)) # Cost array costgrid = _make_costgrid(glacier_mask, glacier_ext, topo) # Terminus t_coord = np.asarray(ext_yx)[:, np.argmin(zoutline)].astype(np.int64) # Compute the routes lines = [] for h in heads: h_coord = np.asarray(h.xy)[::-1].astype(np.int64) indices, _ = route_through_array(costgrid, h_coord, t_coord) lines.append(shpg.LineString(np.array(indices)[:, [1, 0]])) log.debug('%s: computed the routes', gdir.rgi_id) # Filter the shortest lines out radius = cfg.PARAMS['flowline_junction_pix'] * cfg.PARAMS['flowline_dx'] radius += 6 * cfg.PARAMS['flowline_dx'] olines, _ = _filter_lines(lines, heads, cfg.PARAMS['kbuffer'], radius) log.debug('%s: number of heads after lines filter: %d', gdir.rgi_id, len(olines)) # And rejoin the cutted tails olines = _join_lines(olines) # Adds the line level for cl in olines: cl.order = _line_order(cl) # And sort them per order !!! several downstream tasks rely on this cls = [] for i in np.argsort([cl.order for cl in olines]): cls.append(olines[i]) # Final check if len(cls) == 0: raise RuntimeError('{} : no centerline found!'.format(gdir.rgi_id)) # Write the data gdir.write_pickle(cls, 'centerlines', div_id=div_id) # Netcdf with netCDF4.Dataset(grids_file, 'a') as nc: v = nc.createVariable('cost_grid', 'f4', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Centerlines cost grid' v[:] = costgrid
def _mask_per_divide(gdir, div_id, dem, smoothed_dem): """Compute mask and geometries for each glacier divide. Is called by glacier masks. Parameters ---------- gdir : oggm.GlacierDirectory div_id: int id of the divide to process dem: 2D array topography smoothed_dem: 2D array smoothed topography """ outlines_file = gdir.get_filepath('outlines', div_id=div_id) geometry = gpd.GeoDataFrame.from_file(outlines_file).geometry[0] # Interpolate shape to a regular path glacier_poly_hr = _interp_polygon(geometry, gdir.grid.dx) # Transform geometry into grid coordinates # It has to be in pix center coordinates because of how skimage works def proj(x, y): grid = gdir.grid.center_grid return grid.transform(x, y, crs=grid.proj) glacier_poly_hr = shapely.ops.transform(proj, glacier_poly_hr) # simple trick to correct invalid polys: # http://stackoverflow.com/questions/20833344/ # fix-invalid-polygon-python-shapely glacier_poly_hr = glacier_poly_hr.buffer(0) if not glacier_poly_hr.is_valid: raise RuntimeError('This glacier geometry is crazy.') # Rounded nearest pix glacier_poly_pix = _polygon_to_pix(glacier_poly_hr) # Compute the glacier mask (currently: center pixels + touched) nx, ny = gdir.grid.nx, gdir.grid.ny glacier_mask = np.zeros((ny, nx), dtype=np.uint8) glacier_ext = np.zeros((ny, nx), dtype=np.uint8) (x, y) = glacier_poly_pix.exterior.xy glacier_mask[skdraw.polygon(np.array(y), np.array(x))] = 1 for gint in glacier_poly_pix.interiors: x, y = tuple2int(gint.xy) glacier_mask[skdraw.polygon(y, x)] = 0 glacier_mask[y, x] = 0 # on the nunataks, no x, y = tuple2int(glacier_poly_pix.exterior.xy) glacier_mask[y, x] = 1 glacier_ext[y, x] = 1 # Because of the 0 values at nunataks boundaries, some "Ice Islands" # can happen within nunataks (e.g.: RGI40-11.00062) # See if we can filter them out easily regions, nregions = label(glacier_mask, structure=label_struct) if nregions > 1: log.debug('%s: we had to cut an island in the mask', gdir.rgi_id) # Check the size of those region_sizes = [np.sum(regions == r) for r in np.arange(1, nregions+1)] am = np.argmax(region_sizes) # Check not a strange glacier sr = region_sizes.pop(am) for ss in region_sizes: assert (ss / sr) < 0.1 glacier_mask[:] = 0 glacier_mask[np.where(regions == (am+1))] = 1 # write out the grids in the netcdf file nc = gdir.create_gridded_ncdf_file('gridded_data', div_id=div_id) v = nc.createVariable('topo', 'f4', ('y', 'x', ), zlib=True) v.units = 'm' v.long_name = 'DEM topography' v[:] = dem v = nc.createVariable('topo_smoothed', 'f4', ('y', 'x', ), zlib=True) v.units = 'm' v.long_name = 'DEM topography smoothed' \ ' with radius: {:.1} m'.format(cfg.PARAMS['smooth_window']) v[:] = smoothed_dem v = nc.createVariable('glacier_mask', 'i1', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier mask' v[:] = glacier_mask v = nc.createVariable('glacier_ext', 'i1', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier external boundaries' v[:] = glacier_ext # add some meta stats and close nc.max_h_dem = np.max(dem) nc.min_h_dem = np.min(dem) dem_on_g = dem[np.where(glacier_mask)] nc.max_h_glacier = np.max(dem_on_g) nc.min_h_glacier = np.min(dem_on_g) nc.close() geometries = dict() geometries['polygon_hr'] = glacier_poly_hr geometries['polygon_pix'] = glacier_poly_pix geometries['polygon_area'] = geometry.area gdir.write_pickle(geometries, 'geometries', div_id=div_id)
def _mask_per_divide(gdir, div_id, dem, smoothed_dem): """Compute mask and geometries for each glacier divide. Is called by glacier masks. Parameters ---------- gdir : oggm.GlacierDirectory div_id: int id of the divide to process dem: 2D array topography smoothed_dem: 2D array smoothed topography """ outlines_file = gdir.get_filepath('outlines', div_id=div_id) geometry = gpd.GeoDataFrame.from_file(outlines_file).geometry[0] # Interpolate shape to a regular path glacier_poly_hr = _interp_polygon(geometry, gdir.grid.dx) # Transform geometry into grid coordinates # It has to be in pix center coordinates because of how skimage works def proj(x, y): grid = gdir.grid.center_grid return grid.transform(x, y, crs=grid.proj) glacier_poly_hr = shapely.ops.transform(proj, glacier_poly_hr) # simple trick to correct invalid polys: # http://stackoverflow.com/questions/20833344/ # fix-invalid-polygon-python-shapely glacier_poly_hr = glacier_poly_hr.buffer(0) if not glacier_poly_hr.is_valid: raise RuntimeError('This glacier geometry is crazy.') # Rounded nearest pix glacier_poly_pix = _polygon_to_pix(glacier_poly_hr) # Compute the glacier mask (currently: center pixels + touched) nx, ny = gdir.grid.nx, gdir.grid.ny glacier_mask = np.zeros((ny, nx), dtype=np.uint8) glacier_ext = np.zeros((ny, nx), dtype=np.uint8) (x, y) = glacier_poly_pix.exterior.xy glacier_mask[skdraw.polygon(np.array(y), np.array(x))] = 1 for gint in glacier_poly_pix.interiors: x, y = tuple2int(gint.xy) glacier_mask[skdraw.polygon(y, x)] = 0 glacier_mask[y, x] = 0 # on the nunataks, no x, y = tuple2int(glacier_poly_pix.exterior.xy) glacier_mask[y, x] = 1 glacier_ext[y, x] = 1 # Because of the 0 values at nunataks boundaries, some "Ice Islands" # can happen within nunataks (e.g.: RGI40-11.00062) # See if we can filter them out easily regions, nregions = label(glacier_mask, structure=label_struct) if nregions > 1: log.debug('%s: we had to cut an island in the mask', gdir.rgi_id) # Check the size of those region_sizes = [ np.sum(regions == r) for r in np.arange(1, nregions + 1) ] am = np.argmax(region_sizes) # Check not a strange glacier sr = region_sizes.pop(am) for ss in region_sizes: assert (ss / sr) < 0.1 glacier_mask[:] = 0 glacier_mask[np.where(regions == (am + 1))] = 1 # write out the grids in the netcdf file nc = gdir.create_gridded_ncdf_file('gridded_data', div_id=div_id) v = nc.createVariable('topo', 'f4', ( 'y', 'x', ), zlib=True) v.units = 'm' v.long_name = 'DEM topography' v[:] = dem v = nc.createVariable('topo_smoothed', 'f4', ( 'y', 'x', ), zlib=True) v.units = 'm' v.long_name = 'DEM topography smoothed' \ ' with radius: {:.1} m'.format(cfg.PARAMS['smooth_window']) v[:] = smoothed_dem v = nc.createVariable('glacier_mask', 'i1', ( 'y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier mask' v[:] = glacier_mask v = nc.createVariable('glacier_ext', 'i1', ( 'y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier external boundaries' v[:] = glacier_ext # add some meta stats and close nc.max_h_dem = np.max(dem) nc.min_h_dem = np.min(dem) dem_on_g = dem[np.where(glacier_mask)] nc.max_h_glacier = np.max(dem_on_g) nc.min_h_glacier = np.min(dem_on_g) nc.close() geometries = dict() geometries['polygon_hr'] = glacier_poly_hr geometries['polygon_pix'] = glacier_poly_pix geometries['polygon_area'] = geometry.area gdir.write_pickle(geometries, 'geometries', div_id=div_id)
def glacier_masks(gdir): """Makes a gridded mask of the glacier outlines that can be used by OGGM. For a more robust solution (not OGGM compatible) see simple_glacier_masks. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` where to write the data """ # In case nominal, just raise if gdir.is_nominal: raise GeometryError('{} is a nominal glacier.'.format(gdir.rgi_id)) if not os.path.exists(gdir.get_filepath('gridded_data')): # In a possible future, we might actually want to raise a # deprecation warning here process_dem(gdir) # Geometries geometry = gdir.read_shapefile('outlines').geometry[0] # Interpolate shape to a regular path glacier_poly_hr = _interp_polygon(geometry, gdir.grid.dx) # Transform geometry into grid coordinates # It has to be in pix center coordinates because of how skimage works def proj(x, y): grid = gdir.grid.center_grid return grid.transform(x, y, crs=grid.proj) glacier_poly_hr = shapely.ops.transform(proj, glacier_poly_hr) # simple trick to correct invalid polys: # http://stackoverflow.com/questions/20833344/ # fix-invalid-polygon-python-shapely glacier_poly_hr = glacier_poly_hr.buffer(0) if not glacier_poly_hr.is_valid: raise InvalidGeometryError('This glacier geometry is not valid.') # Rounded nearest pix glacier_poly_pix = _polygon_to_pix(glacier_poly_hr) if glacier_poly_pix.exterior is None: raise InvalidGeometryError('Problem in converting glacier geometry ' 'to grid resolution.') # Compute the glacier mask (currently: center pixels + touched) nx, ny = gdir.grid.nx, gdir.grid.ny glacier_mask = np.zeros((ny, nx), dtype=np.uint8) glacier_ext = np.zeros((ny, nx), dtype=np.uint8) (x, y) = glacier_poly_pix.exterior.xy glacier_mask[skdraw.polygon(np.array(y), np.array(x))] = 1 for gint in glacier_poly_pix.interiors: x, y = tuple2int(gint.xy) glacier_mask[skdraw.polygon(y, x)] = 0 glacier_mask[y, x] = 0 # on the nunataks, no x, y = tuple2int(glacier_poly_pix.exterior.xy) glacier_mask[y, x] = 1 glacier_ext[y, x] = 1 # Because of the 0 values at nunataks boundaries, some "Ice Islands" # can happen within nunataks (e.g.: RGI40-11.00062) # See if we can filter them out easily regions, nregions = label(glacier_mask, structure=label_struct) if nregions > 1: log.debug('(%s) we had to cut an island in the mask', gdir.rgi_id) # Check the size of those region_sizes = [np.sum(regions == r) for r in np.arange(1, nregions+1)] am = np.argmax(region_sizes) # Check not a strange glacier sr = region_sizes.pop(am) for ss in region_sizes: assert (ss / sr) < 0.1 glacier_mask[:] = 0 glacier_mask[np.where(regions == (am+1))] = 1 # Write geometries geometries = dict() geometries['polygon_hr'] = glacier_poly_hr geometries['polygon_pix'] = glacier_poly_pix geometries['polygon_area'] = geometry.area gdir.write_pickle(geometries, 'geometries') # write out the grids in the netcdf file with GriddedNcdfFile(gdir) as nc: if 'glacier_mask' not in nc.variables: v = nc.createVariable('glacier_mask', 'i1', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier mask' else: v = nc.variables['glacier_mask'] v[:] = glacier_mask if 'glacier_ext' not in nc.variables: v = nc.createVariable('glacier_ext', 'i1', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier external boundaries' else: v = nc.variables['glacier_ext'] v[:] = glacier_ext dem = nc.variables['topo'][:] valid_mask = nc.variables['topo_valid_mask'][:] # Last sanity check based on the masked dem tmp_max = np.max(dem[np.where(glacier_mask == 1)]) tmp_min = np.min(dem[np.where(glacier_mask == 1)]) if tmp_max < (tmp_min + 1): raise InvalidDEMError('({}) min equal max in the masked DEM.' .format(gdir.rgi_id)) # Log DEM that needed processing within the glacier mask if gdir.get_diagnostics().get('dem_needed_interpolation', False): pnan = (valid_mask == 0) & glacier_mask gdir.add_to_diagnostics('dem_invalid_perc_in_mask', np.sum(pnan) / np.sum(glacier_mask)) # add some meta stats and close dem_on_g = dem[np.where(glacier_mask)] nc.max_h_glacier = np.max(dem_on_g) nc.min_h_glacier = np.min(dem_on_g)