def _distribute_thickness_per_interp(glacier_mask, topo, cls, fls, grid, smooth=True, add_slope=True): """Where the job is actually done.""" # Thick to interpolate dx = grid.dx thick = np.where(glacier_mask, np.NaN, 0) # Along the lines vs = [] for cl, fl in zip(cls, fls): # TODO: here one should see if parabola is always the best choice vs.extend(cl['volume']) x, y = utils.tuple2int(fl.line.xy) thick[y, x] = cl['thick'] vol = np.sum(vs) # Interpolate xx, yy = grid.ij_coordinates pnan = np.nonzero(~np.isfinite(thick)) pok = np.nonzero(np.isfinite(thick)) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T thick[pnan] = griddata(points, np.ravel(thick[pok]), inter, method='cubic') # Smooth if smooth: gsize = np.rint(cfg.PARAMS['smooth_window'] / dx) thick = gaussian_blur(thick, np.int(gsize)) thick = np.where(glacier_mask, thick, 0.) # Slope slope = 1. if add_slope: sy, sx = np.gradient(topo, dx, dx) slope = np.arctan(np.sqrt(sy**2 + sx**2)) slope = np.clip(slope, np.deg2rad(6.), np.pi / 2.) slope = 1 / slope**(cfg.N / (cfg.N + 2)) # Conserve volume tmp_vol = np.nansum(thick * slope * dx**2) final_t = thick * slope * vol / tmp_vol # Add to grids final_t = np.where(np.isfinite(final_t), final_t, 0.) assert np.allclose(np.sum(final_t * dx**2), vol) return final_t
def g2ti_masks(gdir): """Adds the g2ti mask to the netcdf file. Parameters ---------- """ # open srtm tif-file: dem_dr = rasterio.open(gdir.get_filepath('dem'), 'r', driver='GTiff') dem = dem_dr.read(1).astype(rasterio.float32) # Grid nx = dem_dr.width ny = dem_dr.height assert nx == gdir.grid.nx assert ny == gdir.grid.ny # Correct the DEM (ASTER...) # Currently we just do a linear interp -- ASTER is totally shit anyway min_z = -999. isfinite = np.isfinite(dem) if (np.min(dem) <= min_z) or np.any(~isfinite): xx, yy = gdir.grid.ij_coordinates pnan = np.nonzero((dem <= min_z) | (~isfinite)) pok = np.nonzero((dem > min_z) | isfinite) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T dem[pnan] = griddata(points, np.ravel(dem[pok]), inter) log.warning(gdir.rgi_id + ': DEM needed interpolation.') isfinite = np.isfinite(dem) if not np.all(isfinite): # see how many percent of the dem if np.sum(~isfinite) > (0.2 * nx * ny): raise RuntimeError('({}) too many NaNs in DEM'.format(gdir.rgi_id)) log.warning('({}) DEM needed zeros somewhere.'.format(gdir.rgi_id)) dem[isfinite] = 0 if np.min(dem) == np.max(dem): raise RuntimeError('({}) min equal max in the DEM.'.format( gdir.rgi_id)) # Projection if LooseVersion(rasterio.__version__) >= LooseVersion('1.0'): transf = dem_dr.transform else: transf = dem_dr.affine x0 = transf[2] # UL corner y0 = transf[5] # UL corner dx = transf[0] dy = transf[4] # Negative if not (np.allclose(dx, -dy) or np.allclose(dx, gdir.grid.dx) or np.allclose(y0, gdir.grid.corner_grid.y0, atol=1e-2) or np.allclose(x0, gdir.grid.corner_grid.x0, atol=1e-2)): raise RuntimeError('DEM file and Salem Grid do not match!') dem_dr.close() # Clip topography to 0 m a.s.l. dem = dem.clip(0) # Smooth DEM? if cfg.PARAMS['smooth_window'] > 0.: gsize = np.rint(cfg.PARAMS['smooth_window'] / dx) smoothed_dem = gaussian_blur(dem, np.int(gsize)) else: smoothed_dem = dem.copy() if not np.all(np.isfinite(smoothed_dem)): raise RuntimeError('({}) NaN in smoothed DEM'.format(gdir.rgi_id)) with xr.open_rasterio(os.path.join(gdir.dir, 'g2ti_mask.tif')) as da: glacier_mask = np.squeeze(da.data) # Last sanity check based on the masked dem tmp_max = np.max(dem[np.where(glacier_mask == 1)]) tmp_min = np.min(dem[np.where(glacier_mask == 1)]) if tmp_max < (tmp_min + 1): raise RuntimeError('({}) min equal max in the masked DEM.'.format( gdir.rgi_id)) # write out the grids in the netcdf file nc = gdir.create_gridded_ncdf_file('gridded_data') v = nc.createVariable('topo', 'f4', ( 'y', 'x', ), zlib=True) v.units = 'm' v.long_name = 'DEM topography' v[:] = dem v = nc.createVariable('topo_smoothed', 'f4', ( 'y', 'x', ), zlib=True) v.units = 'm' v.long_name = ('DEM topography smoothed' ' with radius: {:.1} m'.format(cfg.PARAMS['smooth_window'])) v[:] = smoothed_dem v = nc.createVariable('glacier_mask', 'i1', ( 'y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Glacier mask' v[:] = glacier_mask # add some meta stats and close nc.max_h_dem = np.max(dem) nc.min_h_dem = np.min(dem) dem_on_g = dem[np.where(glacier_mask)] nc.max_h_glacier = np.max(dem_on_g) nc.min_h_glacier = np.min(dem_on_g) nc.close()
def distribute_thickness_interp(gdir, add_slope=True, smooth_radius=None, varname_suffix=''): """Compute a thickness map by interpolating between centerlines and border. This is a rather cosmetic task, not relevant for OGGM but for ITMIX. Parameters ---------- gdir : oggm.GlacierDirectory the glacier directory to process add_slope : bool whether a corrective slope factor should be used or not smooth_radius : int pixel size of the gaussian smoothing. Default is to use cfg.PARAMS['smooth_window'] (i.e. a size in meters). Set to zero to suppress smoothing. varname_suffix : str add a suffix to the variable written in the file (for experiments) """ # Variables grids_file = gdir.get_filepath('gridded_data') # See if we have the masks, else compute them with utils.ncDataset(grids_file) as nc: has_masks = 'glacier_ext_erosion' in nc.variables if not has_masks: from oggm.core.gis import interpolation_masks interpolation_masks(gdir) with utils.ncDataset(grids_file) as nc: glacier_mask = nc.variables['glacier_mask'][:] glacier_ext = nc.variables['glacier_ext_erosion'][:] ice_divides = nc.variables['ice_divides'][:] if add_slope: slope_factor = nc.variables['slope_factor'][:] else: slope_factor = 1. # Thickness to interpolate thick = glacier_ext * np.NaN thick[(glacier_ext - ice_divides) == 1] = 0. # TODO: domain border too, for convenience for a start thick[0, :] = 0. thick[-1, :] = 0. thick[:, 0] = 0. thick[:, -1] = 0. # Along the lines cls = gdir.read_pickle('inversion_output') fls = gdir.read_pickle('inversion_flowlines') vs = [] for cl, fl in zip(cls, fls): vs.extend(cl['volume']) x, y = utils.tuple2int(fl.line.xy) thick[y, x] = cl['thick'] init_vol = np.sum(vs) # Interpolate xx, yy = gdir.grid.ij_coordinates pnan = np.nonzero(~np.isfinite(thick)) pok = np.nonzero(np.isfinite(thick)) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T thick[pnan] = griddata(points, np.ravel(thick[pok]), inter, method='cubic') thick = thick.clip(0) # Slope thick *= slope_factor # Smooth dx = gdir.grid.dx if smooth_radius != 0: if smooth_radius is None: smooth_radius = np.rint(cfg.PARAMS['smooth_window'] / dx) thick = gaussian_blur(thick, np.int(smooth_radius)) thick = np.where(glacier_mask, thick, 0.) # Re-mask thick[glacier_mask == 0] = np.NaN assert np.all(np.isfinite(thick[glacier_mask == 1])) # Conserve volume tmp_vol = np.nansum(thick * dx**2) thick *= init_vol / tmp_vol # write grids_file = gdir.get_filepath('gridded_data') with utils.ncDataset(grids_file, 'a') as nc: vn = 'distributed_thickness' + varname_suffix if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ( 'y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Distributed ice thickness' v[:] = thick return thick
def distribute_thickness_per_altitude(gdir, add_slope=True, smooth_radius=None, dis_from_border_exp=0.25, varname_suffix=''): """Compute a thickness map by redistributing mass along altitudinal bands. This is a rather cosmetic task, not relevant for OGGM but for ITMIX. Parameters ---------- gdir : oggm.GlacierDirectory the glacier directory to process add_slope : bool whether a corrective slope factor should be used or not smooth_radius : int pixel size of the gaussian smoothing. Default is to use cfg.PARAMS['smooth_window'] (i.e. a size in meters). Set to zero to suppress smoothing. dis_from_border_exp : float the exponent of the distance from border mask varname_suffix : str add a suffix to the variable written in the file (for experiments) """ # Variables grids_file = gdir.get_filepath('gridded_data') # See if we have the masks, else compute them with utils.ncDataset(grids_file) as nc: has_masks = 'glacier_ext_erosion' in nc.variables if not has_masks: from oggm.core.gis import interpolation_masks interpolation_masks(gdir) with utils.ncDataset(grids_file) as nc: topo_smoothed = nc.variables['topo_smoothed'][:] glacier_mask = nc.variables['glacier_mask'][:] dis_from_border = nc.variables['dis_from_border'][:] if add_slope: slope_factor = nc.variables['slope_factor'][:] else: slope_factor = 1. # Along the lines cls = gdir.read_pickle('inversion_output') fls = gdir.read_pickle('inversion_flowlines') hs, ts, vs, xs, ys = [], [], [], [], [] for cl, fl in zip(cls, fls): hs = np.append(hs, fl.surface_h) ts = np.append(ts, cl['thick']) vs = np.append(vs, cl['volume']) x, y = fl.line.xy xs = np.append(xs, x) ys = np.append(ys, y) init_vol = np.sum(vs) # Assign a first order thickness to the points # very inefficient inverse distance stuff thick = glacier_mask * np.NaN for y in range(thick.shape[0]): for x in range(thick.shape[1]): phgt = topo_smoothed[y, x] # take the ones in a 100m range starth = 100. while True: starth += 10 pok = np.nonzero(np.abs(phgt - hs) <= starth)[0] if len(pok) != 0: break sqr = np.sqrt((xs[pok] - x)**2 + (ys[pok] - y)**2) pzero = np.where(sqr == 0) if len(pzero[0]) == 0: thick[y, x] = np.average(ts[pok], weights=1 / sqr) elif len(pzero[0]) == 1: thick[y, x] = ts[pzero] else: raise RuntimeError('We should not be there') # Distance from border (normalized) dis_from_border = dis_from_border**dis_from_border_exp dis_from_border /= np.mean(dis_from_border[glacier_mask == 1]) thick *= dis_from_border # Slope thick *= slope_factor # Smooth dx = gdir.grid.dx if smooth_radius != 0: if smooth_radius is None: smooth_radius = np.rint(cfg.PARAMS['smooth_window'] / dx) thick = gaussian_blur(thick, np.int(smooth_radius)) thick = np.where(glacier_mask, thick, 0.) # Re-mask thick = thick.clip(0) thick[glacier_mask == 0] = np.NaN assert np.all(np.isfinite(thick[glacier_mask == 1])) # Conserve volume tmp_vol = np.nansum(thick * dx**2) thick *= init_vol / tmp_vol # write with utils.ncDataset(grids_file, 'a') as nc: vn = 'distributed_thickness' + varname_suffix if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ( 'y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Distributed ice thickness' v[:] = thick return thick
def glacier_masks_itmix(gdir): """Converts the glacier vector geometries to grids. Uses where possible the ITMIX DEM Parameters ---------- gdir : oggm.GlacierDirectory """ # open srtm tif-file: dem_ds = gdal.Open(gdir.get_filepath('dem')) dem = dem_ds.ReadAsArray().astype(float) # Correct the DEM (ASTER...) # Currently we just do a linear interp -- ASTER is totally shit anyway min_z = -999. if np.min(dem) <= min_z: xx, yy = gdir.grid.ij_coordinates pnan = np.nonzero(dem <= min_z) pok = np.nonzero(dem > min_z) if len(pok[0]) > 0: points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T dem[pnan] = griddata(points, np.ravel(dem[pok]), inter) msg = gdir.rgi_id + ': DEM needed interpolation' msg += '({:.1f}% missing).'.format( len(pnan[0]) / len(dem.flatten()) * 100) log.warning(msg) else: dem = dem * np.NaN # Replace DEM values with ITMIX ones where possible # Open DEM dem_f = None n_g = gdir.name.split(':')[-1] searchf = os.path.join(DATA_DIR, 'itmix', 'glaciers_sorted', '*') searchf = os.path.join(searchf, '02_surface_' + n_g + '_*.asc') for dem_f in glob.glob(searchf): pass if dem_f is None: # try synth n_g = gdir.rgi_id searchf = os.path.join(DATA_DIR, 'itmix', 'glaciers_synth', '*') searchf = os.path.join(searchf, '02_surface_' + n_g + '*.asc') for dem_f in glob.glob(searchf): pass if dem_f is not None: log.info('%s: ITMIX DEM file: %s', gdir.rgi_id, dem_f) it_dem_ds = EsriITMIX(dem_f) it_dem = it_dem_ds.get_vardata() it_dem = np.where(it_dem < -999., np.NaN, it_dem) # for some glaciers, trick if n_g in ['Academy', 'Devon']: it_dem = np.where(it_dem <= 0, np.NaN, it_dem) it_dem = np.where(np.isfinite(it_dem), it_dem, np.nanmin(it_dem)) if n_g in ['Brewster', 'Austfonna']: it_dem = np.where(it_dem <= 0, np.NaN, it_dem) # Transform to local grid it_dem = gdir.grid.map_gridded_data(it_dem, it_dem_ds.grid, interp='linear') # And update values where possible if n_g in ['Synthetic2', 'Synthetic1']: dem = np.where(~it_dem.mask, it_dem, np.nanmin(it_dem)) else: dem = np.where(~it_dem.mask, it_dem, dem) else: if 'Devon' in n_g: raise RuntimeError('Should have found DEM for Devon') # Disallow negative dem = dem.clip(0) # Grid nx = dem_ds.RasterXSize ny = dem_ds.RasterYSize assert nx == gdir.grid.nx assert ny == gdir.grid.ny # Proj geot = dem_ds.GetGeoTransform() x0 = geot[0] # UL corner y0 = geot[3] # UL corner dx = geot[1] dy = geot[5] # Negative assert dx == -dy assert dx == gdir.grid.dx assert y0 == gdir.grid.corner_grid.y0 assert x0 == gdir.grid.corner_grid.x0 dem_ds = None # to be sure... # Smooth SRTM? if cfg.PARAMS['smooth_window'] > 0.: gsize = np.rint(cfg.PARAMS['smooth_window'] / dx) smoothed_dem = gaussian_blur(dem, np.int(gsize)) else: smoothed_dem = dem.copy() # Make entity masks log.debug('%s: glacier mask, divide %d', gdir.rgi_id, 0) _mask_per_divide(gdir, 0, dem, smoothed_dem) # Glacier divides nd = gdir.n_divides if nd == 1: # Optim: just make links linkname = gdir.get_filepath('gridded_data', div_id=1) sourcename = gdir.get_filepath('gridded_data') # overwrite as default if os.path.exists(linkname): os.remove(linkname) # TODO: temporary suboptimal solution try: # we are on UNIX os.link(sourcename, linkname) except AttributeError: # we are on windows copyfile(sourcename, linkname) linkname = gdir.get_filepath('geometries', div_id=1) sourcename = gdir.get_filepath('geometries') # overwrite as default if os.path.exists(linkname): os.remove(linkname) # TODO: temporary suboptimal solution try: # we are on UNIX os.link(sourcename, linkname) except AttributeError: # we are on windows copyfile(sourcename, linkname) else: # Loop over divides for i in gdir.divide_ids: log.debug('%s: glacier mask, divide %d', gdir.rgi_id, i) _mask_per_divide(gdir, i, dem, smoothed_dem)
def distribute_thickness_interp(gdir, add_slope=True, smooth_radius=None, varname_suffix=''): """Compute a thickness map by interpolating between centerlines and border. This is a rather cosmetic task, not relevant for OGGM but for ITMIX. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` the glacier directory to process add_slope : bool whether a corrective slope factor should be used or not smooth_radius : int pixel size of the gaussian smoothing. Default is to use cfg.PARAMS['smooth_window'] (i.e. a size in meters). Set to zero to suppress smoothing. varname_suffix : str add a suffix to the variable written in the file (for experiments) """ # Variables grids_file = gdir.get_filepath('gridded_data') # See if we have the masks, else compute them with utils.ncDataset(grids_file) as nc: has_masks = 'glacier_ext_erosion' in nc.variables if not has_masks: from oggm.core.gis import interpolation_masks interpolation_masks(gdir) with utils.ncDataset(grids_file) as nc: glacier_mask = nc.variables['glacier_mask'][:] glacier_ext = nc.variables['glacier_ext_erosion'][:] ice_divides = nc.variables['ice_divides'][:] if add_slope: slope_factor = nc.variables['slope_factor'][:] else: slope_factor = 1. # Thickness to interpolate thick = glacier_ext * np.NaN thick[(glacier_ext-ice_divides) == 1] = 0. # TODO: domain border too, for convenience for a start thick[0, :] = 0. thick[-1, :] = 0. thick[:, 0] = 0. thick[:, -1] = 0. # Along the lines cls = gdir.read_pickle('inversion_output') fls = gdir.read_pickle('inversion_flowlines') vs = [] for cl, fl in zip(cls, fls): vs.extend(cl['volume']) x, y = utils.tuple2int(fl.line.xy) thick[y, x] = cl['thick'] init_vol = np.sum(vs) # Interpolate xx, yy = gdir.grid.ij_coordinates pnan = np.nonzero(~ np.isfinite(thick)) pok = np.nonzero(np.isfinite(thick)) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T thick[pnan] = griddata(points, np.ravel(thick[pok]), inter, method='cubic') thick = thick.clip(0) # Slope thick *= slope_factor # Smooth dx = gdir.grid.dx if smooth_radius != 0: if smooth_radius is None: smooth_radius = np.rint(cfg.PARAMS['smooth_window'] / dx) thick = gaussian_blur(thick, np.int(smooth_radius)) thick = np.where(glacier_mask, thick, 0.) # Re-mask thick[glacier_mask == 0] = np.NaN assert np.all(np.isfinite(thick[glacier_mask == 1])) # Conserve volume tmp_vol = np.nansum(thick * dx**2) thick *= init_vol / tmp_vol # write grids_file = gdir.get_filepath('gridded_data') with utils.ncDataset(grids_file, 'a') as nc: vn = 'distributed_thickness' + varname_suffix if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Distributed ice thickness' v[:] = thick return thick
def distribute_thickness_per_altitude(gdir, add_slope=True, smooth_radius=None, dis_from_border_exp=0.25, varname_suffix=''): """Compute a thickness map by redistributing mass along altitudinal bands. This is a rather cosmetic task, not relevant for OGGM but for ITMIX. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` the glacier directory to process add_slope : bool whether a corrective slope factor should be used or not smooth_radius : int pixel size of the gaussian smoothing. Default is to use cfg.PARAMS['smooth_window'] (i.e. a size in meters). Set to zero to suppress smoothing. dis_from_border_exp : float the exponent of the distance from border mask varname_suffix : str add a suffix to the variable written in the file (for experiments) """ # Variables grids_file = gdir.get_filepath('gridded_data') # See if we have the masks, else compute them with utils.ncDataset(grids_file) as nc: has_masks = 'glacier_ext_erosion' in nc.variables if not has_masks: from oggm.core.gis import interpolation_masks interpolation_masks(gdir) with utils.ncDataset(grids_file) as nc: topo_smoothed = nc.variables['topo_smoothed'][:] glacier_mask = nc.variables['glacier_mask'][:] dis_from_border = nc.variables['dis_from_border'][:] if add_slope: slope_factor = nc.variables['slope_factor'][:] else: slope_factor = 1. # Along the lines cls = gdir.read_pickle('inversion_output') fls = gdir.read_pickle('inversion_flowlines') hs, ts, vs, xs, ys = [], [], [], [], [] for cl, fl in zip(cls, fls): hs = np.append(hs, fl.surface_h) ts = np.append(ts, cl['thick']) vs = np.append(vs, cl['volume']) x, y = fl.line.xy xs = np.append(xs, x) ys = np.append(ys, y) init_vol = np.sum(vs) # Assign a first order thickness to the points # very inefficient inverse distance stuff thick = glacier_mask * np.NaN for y in range(thick.shape[0]): for x in range(thick.shape[1]): phgt = topo_smoothed[y, x] # take the ones in a 100m range starth = 100. while True: starth += 10 pok = np.nonzero(np.abs(phgt - hs) <= starth)[0] if len(pok) != 0: break sqr = np.sqrt((xs[pok]-x)**2 + (ys[pok]-y)**2) pzero = np.where(sqr == 0) if len(pzero[0]) == 0: thick[y, x] = np.average(ts[pok], weights=1 / sqr) elif len(pzero[0]) == 1: thick[y, x] = ts[pzero] else: raise RuntimeError('We should not be there') # Distance from border (normalized) dis_from_border = dis_from_border**dis_from_border_exp dis_from_border /= np.mean(dis_from_border[glacier_mask == 1]) thick *= dis_from_border # Slope thick *= slope_factor # Smooth dx = gdir.grid.dx if smooth_radius != 0: if smooth_radius is None: smooth_radius = np.rint(cfg.PARAMS['smooth_window'] / dx) thick = gaussian_blur(thick, np.int(smooth_radius)) thick = np.where(glacier_mask, thick, 0.) # Re-mask thick = thick.clip(0) thick[glacier_mask == 0] = np.NaN assert np.all(np.isfinite(thick[glacier_mask == 1])) # Conserve volume tmp_vol = np.nansum(thick * dx**2) thick *= init_vol / tmp_vol # write with utils.ncDataset(grids_file, 'a') as nc: vn = 'distributed_thickness' + varname_suffix if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Distributed ice thickness' v[:] = thick return thick
def _distribute_thickness_per_altitude(glacier_mask, topo, cls, fls, grid, add_slope=True, smooth=True): """Where the job is actually done.""" # Along the lines dx = grid.dx hs, ts, vs, xs, ys = [], [], [], [], [] for cl, fl in zip(cls, fls): # TODO: here one should see if parabola is always the best choice hs = np.append(hs, fl.surface_h) ts = np.append(ts, cl['thick']) vs = np.append(vs, cl['volume']) x, y = fl.line.xy xs = np.append(xs, x) ys = np.append(ys, y) vol = np.sum(vs) # very inefficient inverse distance stuff to_compute = np.nonzero(glacier_mask) thick = topo * np.NaN for (y, x) in np.asarray(to_compute).T: assert glacier_mask[y, x] == 1 phgt = topo[y, x] # take the ones in a 100m range starth = 100. while True: starth += 10 pok = np.nonzero(np.abs(phgt - hs) <= starth)[0] if len(pok) != 0: break sqr = np.sqrt((xs[pok] - x)**2 + (ys[pok] - y)**2) pzero = np.where(sqr == 0) if len(pzero[0]) == 0: thick[y, x] = np.average(ts[pok], weights=1 / sqr) elif len(pzero[0]) == 1: thick[y, x] = ts[pzero] else: raise RuntimeError('We should not be there') # Smooth if smooth: thick = np.where(np.isfinite(thick), thick, 0.) gsize = np.rint(cfg.PARAMS['smooth_window'] / dx) thick = gaussian_blur(thick, np.int(gsize)) thick = np.where(glacier_mask, thick, 0.) # Distance dis = distance_transform_edt(glacier_mask) dis = np.where(glacier_mask, dis, np.NaN)**0.5 # Slope slope = 1. if add_slope: sy, sx = np.gradient(topo, dx, dx) slope = np.arctan(np.sqrt(sy**2 + sx**2)) slope = np.clip(slope, np.deg2rad(6.), np.pi / 2.) slope = 1 / slope**(cfg.N / (cfg.N + 2)) # Conserve volume tmp_vol = np.nansum(thick * dis * slope * dx**2) final_t = thick * dis * slope * vol / tmp_vol # Done final_t = np.where(np.isfinite(final_t), final_t, 0.) assert np.allclose(np.sum(final_t * dx**2), vol) return final_t