def __enter__(self): if os.path.exists(self.fpath): # Already there - just append self.nc = ncDataset(self.fpath, 'a', format='NETCDF4') return self.nc # Create and fill nc = ncDataset(self.fpath, 'w', format='NETCDF4') nc.createDimension('x', self.grid.nx) nc.createDimension('y', self.grid.ny) nc.author = 'OGGM' nc.author_info = 'Open Global Glacier Model' nc.proj_srs = self.grid.proj.srs x = self.grid.x0 + np.arange(self.grid.nx) * self.grid.dx y = self.grid.y0 + np.arange(self.grid.ny) * self.grid.dy v = nc.createVariable('x', 'f4', ('x',), zlib=True) v.units = 'm' v.long_name = 'x coordinate of projection' v.standard_name = 'projection_x_coordinate' v[:] = x v = nc.createVariable('y', 'f4', ('y',), zlib=True) v.units = 'm' v.long_name = 'y coordinate of projection' v.standard_name = 'projection_y_coordinate' v[:] = y self.nc = nc return nc
def plot_distributed_thickness(gdirs, ax=None, smap=None, varname_suffix=''): """Plots the result of the inversion out of a glacier directory. Method: 'alt' or 'interp' """ gdir = gdirs[0] with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: topo = nc.variables['topo'][:] smap.set_topography(topo) for gdir in gdirs: grids_file = gdir.get_filepath('gridded_data') with utils.ncDataset(grids_file) as nc: import warnings with warnings.catch_warnings(): # https://github.com/Unidata/netcdf4-python/issues/766 warnings.filterwarnings("ignore", category=RuntimeWarning) vn = 'distributed_thickness' + varname_suffix thick = nc.variables[vn][:] mask = nc.variables['glacier_mask'][:] thick = np.where(mask, thick, np.NaN) crs = gdir.grid.center_grid # Plot boundaries # Try to read geometries.pkl as the glacier boundary, # if it can't be found, we use the shapefile to instead. try: geom = gdir.read_pickle('geometries') poly_pix = geom['polygon_pix'] smap.set_geometry(poly_pix, crs=crs, fc='none', zorder=2, linewidth=.2) for l in poly_pix.interiors: smap.set_geometry(l, crs=crs, color='black', linewidth=0.5) except FileNotFoundError: smap.set_shapefile(gdir.read_shapefile('outlines'), fc='none') smap.set_data(thick, crs=crs, overplot=True) smap.set_plot_params(cmap=OGGM_CMAPS['glacier_thickness']) smap.plot(ax) return dict(cbar_label='Glacier thickness [m]')
def __init__(self, gdir, mu_star=None, bias=None, y0=None, halfsize=15, filename='climate_historical', input_filesuffix='', **kwargs): """Initialize Parameters ---------- gdir : GlacierDirectory the glacier directory mu_star : float, optional set to the alternative value of mu* you want to use (the default is to use the calibrated value) bias : float, optional set to the alternative value of the annual bias [mm we yr-1] you want to use (the default is to use the calibrated value) y0 : int, optional, default: tstar the year at the center of the period of interest. The default is to use tstar as center. halfsize : int, optional the half-size of the time window (window size = 2 * halfsize + 1) filename : str, optional set to a different BASENAME if you want to use alternative climate data. input_filesuffix : str the file suffix of the input climate file """ super(ConstantMassBalance, self).__init__() self.mbmod = PastMassBalance(gdir, mu_star=mu_star, bias=bias, filename=filename, input_filesuffix=input_filesuffix, **kwargs) if y0 is None: df = gdir.read_json('local_mustar') y0 = df['t_star'] # This is a quick'n dirty optimisation try: fls = gdir.read_pickle('model_flowlines') h = [] for fl in fls: # We use bed because of overdeepenings h = np.append(h, fl.bed_h) h = np.append(h, fl.surface_h) zminmax = np.round([np.min(h)-50, np.max(h)+2000]) except FileNotFoundError: # in case we don't have them with ncDataset(gdir.get_filepath('gridded_data')) as nc: if np.isfinite(nc.min_h_dem): # a bug sometimes led to non-finite zminmax = [nc.min_h_dem-250, nc.max_h_dem+1500] else: zminmax = [nc.min_h_glacier-1250, nc.max_h_glacier+1500] self.hbins = np.arange(*zminmax, step=10) self.valid_bounds = self.hbins[[0, -1]] self.y0 = y0 self.halfsize = halfsize self.years = np.arange(y0-halfsize, y0+halfsize+1) self.hemisphere = gdir.hemisphere
def plot_inversion(gdirs, ax=None, smap=None, linewidth=3, vmax=None): """Plots the result of the inversion out of a glacier directory.""" gdir = gdirs[0] with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: topo = nc.variables['topo'][:] # Dirty optim try: smap.set_topography(topo) except ValueError: pass toplot_th = np.array([]) toplot_lines = [] toplot_crs = [] vol = [] for gdir in gdirs: crs = gdir.grid.center_grid geom = gdir.read_pickle('geometries') inv = gdir.read_pickle('inversion_output') # Plot boundaries poly_pix = geom['polygon_pix'] smap.set_geometry(poly_pix, crs=crs, fc='none', zorder=2, linewidth=.2) for l in poly_pix.interiors: smap.set_geometry(l, crs=crs, color='black', linewidth=0.5) # plot Centerlines cls = gdir.read_pickle('inversion_flowlines') for l, c in zip(cls, inv): smap.set_geometry(l.line, crs=crs, color='gray', linewidth=1.2, zorder=50) toplot_th = np.append(toplot_th, c['thick']) for wi, cur, (n1, n2) in zip(l.widths, l.line.coords, l.normals): l = shpg.LineString([ shpg.Point(cur + wi / 2. * n1), shpg.Point(cur + wi / 2. * n2) ]) toplot_lines.append(l) toplot_crs.append(crs) vol.extend(c['volume']) cm = plt.cm.get_cmap('YlOrRd') dl = salem.DataLevels(cmap=cm, nlevels=256, data=toplot_th, vmin=0, vmax=vmax) colors = dl.to_rgb() for l, c, crs in zip(toplot_lines, colors, toplot_crs): smap.set_geometry(l, crs=crs, color=c, linewidth=linewidth, zorder=50) smap.plot(ax) return dict(cbar_label='Section thickness [m]', cbar_primitive=dl, title_comment=' ({:.2f} km3)'.format(np.nansum(vol) * 1e-9))
def plot_domain(gdirs, ax=None, smap=None): """Plot the glacier directory.""" # Files gdir = gdirs[0] with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: topo = nc.variables['topo'][:] try: smap.set_data(topo) except ValueError: pass cm = truncate_colormap(ALTITUDE_CMAP, minval=0.25, maxval=1.0, n=256) smap.set_cmap(cm) smap.set_plot_params(nlevels=256) for gdir in gdirs: crs = gdir.grid.center_grid geom = gdir.read_pickle('geometries') # Plot boundaries poly_pix = geom['polygon_pix'] smap.set_geometry(poly_pix, crs=crs, fc='white', alpha=0.3, zorder=2, linewidth=.2) for l in poly_pix.interiors: smap.set_geometry(l, crs=crs, color='black', linewidth=0.5) smap.plot(ax) return dict(cbar_label='Alt. [m]')
def plot_raster(gdirs, var_name=None, cmap='viridis', ax=None, smap=None): """Plot any raster from the gridded_data file.""" # Files gdir = gdirs[0] with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: var = nc.variables[var_name] data = var[:] description = var.long_name description += ' [{}]'.format(var.units) smap.set_data(data) smap.set_cmap(cmap) for gdir in gdirs: crs = gdir.grid.center_grid try: geom = gdir.read_pickle('geometries') # Plot boundaries poly_pix = geom['polygon_pix'] smap.set_geometry(poly_pix, crs=crs, fc='none', alpha=0.3, zorder=2, linewidth=.2) poly_pix = utils.tolist(poly_pix) for _poly in poly_pix: for l in _poly.interiors: smap.set_geometry(l, crs=crs, color='black', linewidth=0.5) except FileNotFoundError: smap.set_shapefile(gdir.read_shapefile('outlines')) smap.plot(ax) return dict(cbar_label='\n'.join(textwrap.wrap(description, 30)))
def plot_centerlines(gdirs, ax=None, smap=None, use_flowlines=False, add_downstream=False, lines_cmap='Set1'): """Plots the centerlines of a glacier directory.""" if add_downstream and not use_flowlines: raise ValueError('Downstream lines can be plotted with flowlines only') # Files filename = 'centerlines' if use_flowlines: filename = 'inversion_flowlines' gdir = gdirs[0] with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: topo = nc.variables['topo'][:] cm = truncate_colormap(colormap.terrain, minval=0.25, maxval=1.0, n=256) smap.set_cmap(cm) smap.set_plot_params(nlevels=256) smap.set_data(topo) for gdir in gdirs: crs = gdir.grid.center_grid geom = gdir.read_pickle('geometries') # Plot boundaries poly_pix = geom['polygon_pix'] smap.set_geometry(poly_pix, crs=crs, fc='white', alpha=0.3, zorder=2, linewidth=.2) for l in poly_pix.interiors: smap.set_geometry(l, crs=crs, color='black', linewidth=0.5) # plot Centerlines cls = gdir.read_pickle(filename) # Go in reverse order for red always being the longuest cls = cls[::-1] color = gencolor(len(cls) + 1, cmap=lines_cmap) for l, c in zip(cls, color): if add_downstream and not gdir.is_tidewater and l is cls[0]: line = gdir.read_pickle('downstream_line')['full_line'] else: line = l.line smap.set_geometry(line, crs=crs, color=c, linewidth=2.5, zorder=50) smap.set_geometry(l.head, crs=gdir.grid, marker='o', markersize=60, alpha=0.8, color=c, zorder=99) for j in l.inflow_points: smap.set_geometry(j, crs=crs, marker='o', markersize=40, edgecolor='k', alpha=0.8, zorder=99, facecolor='none') smap.plot(ax) return dict(cbar_label='Alt. [m]')
def plot_distributed_thickness(gdirs, ax=None, smap=None, varname_suffix=''): """Plots the result of the inversion out of a glacier directory. Method: 'alt' or 'interp' """ gdir = gdirs[0] if len(gdirs) > 1: raise NotImplementedError('Cannot plot a list of gdirs (yet)') with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: topo = nc.variables['topo'][:] mask = nc.variables['glacier_mask'][:] grids_file = gdir.get_filepath('gridded_data') with utils.ncDataset(grids_file) as nc: import warnings with warnings.catch_warnings(): # https://github.com/Unidata/netcdf4-python/issues/766 warnings.filterwarnings("ignore", category=RuntimeWarning) vn = 'distributed_thickness' + varname_suffix thick = nc.variables[vn][:] thick = np.where(mask, thick, np.NaN) smap.set_topography(topo) crs = gdir.grid.center_grid geom = gdir.read_pickle('geometries') # Plot boundaries poly_pix = geom['polygon_pix'] smap.set_geometry(poly_pix, crs=crs, fc='none', zorder=2, linewidth=.2) for l in poly_pix.interiors: smap.set_geometry(l, crs=crs, color='black', linewidth=0.5) smap.set_cmap(GLACIER_THICKNESS_CMAP) smap.set_plot_params(nlevels=256) smap.set_data(thick) smap.plot(ax) return dict(cbar_label='Glacier thickness [m]')
def plot_distributed_thickness(gdirs, ax=None, smap=None, varname_suffix=''): """Plots the result of the inversion out of a glacier directory. Method: 'alt' or 'interp' """ gdir = gdirs[0] if len(gdirs) > 1: raise NotImplementedError('Cannot plot a list of gdirs (yet)') with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: topo = nc.variables['topo'][:] mask = nc.variables['glacier_mask'][:] grids_file = gdir.get_filepath('gridded_data') with utils.ncDataset(grids_file) as nc: import warnings with warnings.catch_warnings(): # https://github.com/Unidata/netcdf4-python/issues/766 warnings.filterwarnings("ignore", category=RuntimeWarning) vn = 'distributed_thickness' + varname_suffix thick = nc.variables[vn][:] thick = np.where(mask, thick, np.NaN) smap.set_topography(topo) crs = gdir.grid.center_grid geom = gdir.read_pickle('geometries') # Plot boundaries poly_pix = geom['polygon_pix'] smap.set_geometry(poly_pix, crs=crs, fc='none', zorder=2, linewidth=.2) for l in poly_pix.interiors: smap.set_geometry(l, crs=crs, color='black', linewidth=0.5) smap.set_cmap(OGGM_CMAPS['glacier_thickness']) smap.set_plot_params(nlevels=256) smap.set_data(thick) smap.plot(ax) return dict(cbar_label='Glacier thickness [m]')
def add_consensus_thickness(gdir, base_url=None): """Add the consensus thickness estimate to the gridded_data file. varname: consensus_ice_thickness Parameters ---------- gdir ::py:class:`oggm.GlacierDirectory` the glacier directory to process base_url : str where to find the thickness data. Default is https://cluster.klima.uni-bremen.de/~fmaussion/icevol/composite """ if base_url is None: base_url = default_base_url if not base_url.endswith('/'): base_url += '/' rgi_str = gdir.rgi_id rgi_reg_str = rgi_str[:8] url = base_url + rgi_reg_str + '/' + rgi_str + '_thickness.tif' input_file = utils.file_downloader(url) dsb = salem.GeoTiff(input_file) thick = utils.clip_min(dsb.get_vardata(), 0) in_volume = thick.sum() * dsb.grid.dx**2 thick = gdir.grid.map_gridded_data(thick, dsb.grid, interp='linear') # Correct for volume thick = utils.clip_min(thick.filled(0), 0) out_volume = thick.sum() * gdir.grid.dx**2 if out_volume > 0: thick *= in_volume / out_volume # We mask zero ice as nodata thick = np.where(thick == 0, np.NaN, thick) # Write with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc: vn = 'consensus_ice_thickness' if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ( 'y', 'x', ), zlib=True) v.units = 'm' ln = 'Ice thickness from the consensus estimate' v.long_name = ln v.base_url = base_url v[:] = thick
def test_set_width(self): entity = gpd.read_file(self.rgi_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) centerlines.initialize_flowlines(gdir) centerlines.compute_downstream_line(gdir) centerlines.compute_downstream_bedshape(gdir) centerlines.catchment_area(gdir) centerlines.catchment_intersections(gdir) centerlines.catchment_width_geom(gdir) centerlines.catchment_width_correction(gdir) # Test that area and area-altitude elev is fine with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: mask = nc.variables['glacier_mask'][:] topo = nc.variables['topo_smoothed'][:] rhgt = topo[np.where(mask)][:] fls = gdir.read_pickle('inversion_flowlines') hgt, widths = gdir.get_inversion_flowline_hw() bs = 100 bins = np.arange(utils.nicenumber(np.min(hgt), bs, lower=True), utils.nicenumber(np.max(hgt), bs) + 1, bs) h1, b = np.histogram(hgt, weights=widths, density=True, bins=bins) h2, b = np.histogram(rhgt, density=True, bins=bins) h1 = h1 / np.sum(h1) h2 = h2 / np.sum(h2) assert utils.rmsd(h1, h2) < 0.02 # less than 2% error new_area = np.sum(widths * fls[-1].dx * gdir.grid.dx) np.testing.assert_allclose(new_area, gdir.rgi_area_m2) centerlines.terminus_width_correction(gdir, new_width=714) fls = gdir.read_pickle('inversion_flowlines') hgt, widths = gdir.get_inversion_flowline_hw() # Check that the width is ok np.testing.assert_allclose(fls[-1].widths[-1] * gdir.grid.dx, 714) # Check for area distrib bins = np.arange(utils.nicenumber(np.min(hgt), bs, lower=True), utils.nicenumber(np.max(hgt), bs) + 1, bs) h1, b = np.histogram(hgt, weights=widths, density=True, bins=bins) h2, b = np.histogram(rhgt, density=True, bins=bins) h1 = h1 / np.sum(h1) h2 = h2 / np.sum(h2) assert utils.rmsd(h1, h2) < 0.02 # less than 2% error new_area = np.sum(widths * fls[-1].dx * gdir.grid.dx) np.testing.assert_allclose(new_area, gdir.rgi_area_m2)
def test_set_width(self): entity = gpd.read_file(self.rgi_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) centerlines.initialize_flowlines(gdir) centerlines.compute_downstream_line(gdir) centerlines.compute_downstream_bedshape(gdir) centerlines.catchment_area(gdir) centerlines.catchment_intersections(gdir) centerlines.catchment_width_geom(gdir) centerlines.catchment_width_correction(gdir) # Test that area and area-altitude elev is fine with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: mask = nc.variables['glacier_mask'][:] topo = nc.variables['topo_smoothed'][:] rhgt = topo[np.where(mask)][:] fls = gdir.read_pickle('inversion_flowlines') hgt, widths = gdir.get_inversion_flowline_hw() bs = 100 bins = np.arange(utils.nicenumber(np.min(hgt), bs, lower=True), utils.nicenumber(np.max(hgt), bs) + 1, bs) h1, b = np.histogram(hgt, weights=widths, density=True, bins=bins) h2, b = np.histogram(rhgt, density=True, bins=bins) h1 = h1 / np.sum(h1) h2 = h2 / np.sum(h2) assert utils.rmsd(h1, h2) < 0.02 # less than 2% error new_area = np.sum(widths * fls[-1].dx * gdir.grid.dx) np.testing.assert_allclose(new_area, gdir.rgi_area_m2) centerlines.terminus_width_correction(gdir, new_width=714) fls = gdir.read_pickle('inversion_flowlines') hgt, widths = gdir.get_inversion_flowline_hw() # Check that the width is ok np.testing.assert_allclose(fls[-1].widths[-1] * gdir.grid.dx, 714) # Check for area distrib bins = np.arange(utils.nicenumber(np.min(hgt), bs, lower=True), utils.nicenumber(np.max(hgt), bs) + 1, bs) h1, b = np.histogram(hgt, weights=widths, density=True, bins=bins) h2, b = np.histogram(rhgt, density=True, bins=bins) h1 = h1 / np.sum(h1) h2 = h2 / np.sum(h2) assert utils.rmsd(h1, h2) < 0.02 # less than 2% error new_area = np.sum(widths * fls[-1].dx * gdir.grid.dx) np.testing.assert_allclose(new_area, gdir.rgi_area_m2)
def __init__(self, gdir, mu_star=None, bias=None, y0=None, halfsize=15, filename='climate_monthly', input_filesuffix=''): """Initialize Parameters ---------- gdir : GlacierDirectory the glacier directory mu_star : float, optional set to the alternative value of mu* you want to use (the default is to use the calibrated value) bias : float, optional set to the alternative value of the annual bias [mm we yr-1] you want to use (the default is to use the calibrated value) y0 : int, optional, default: tstar the year at the center of the period of interest. The default is to use tstar as center. halfsize : int, optional the half-size of the time window (window size = 2 * halfsize + 1) filename : str, optional set to a different BASENAME if you want to use alternative climate data. input_filesuffix : str the file suffix of the input climate file """ super(ConstantMassBalance, self).__init__() self.mbmod = PastMassBalance(gdir, mu_star=mu_star, bias=bias, filename=filename, input_filesuffix=input_filesuffix) if y0 is None: df = gdir.read_json('local_mustar') y0 = df['t_star'] # This is a quick'n dirty optimisation try: fls = gdir.read_pickle('model_flowlines') h = [] for fl in fls: # We use bed because of overdeepenings h = np.append(h, fl.bed_h) h = np.append(h, fl.surface_h) zminmax = np.round([np.min(h)-50, np.max(h)+2000]) except FileNotFoundError: # in case we don't have them with ncDataset(gdir.get_filepath('gridded_data')) as nc: zminmax = [nc.min_h_dem-250, nc.max_h_dem+1500] self.hbins = np.arange(*zminmax, step=10) self.valid_bounds = self.hbins[[0, -1]] self.y0 = y0 self.halfsize = halfsize self.years = np.arange(y0-halfsize, y0+halfsize+1)
def plot_inversion(gdirs, ax=None, smap=None, linewidth=3, vmax=None): """Plots the result of the inversion out of a glacier directory.""" gdir = gdirs[0] with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: topo = nc.variables['topo'][:] # Dirty optim try: smap.set_topography(topo) except ValueError: pass toplot_th = np.array([]) toplot_lines = [] toplot_crs = [] vol = [] for gdir in gdirs: crs = gdir.grid.center_grid geom = gdir.read_pickle('geometries') inv = gdir.read_pickle('inversion_output') # Plot boundaries poly_pix = geom['polygon_pix'] smap.set_geometry(poly_pix, crs=crs, fc='none', zorder=2, linewidth=.2) for l in poly_pix.interiors: smap.set_geometry(l, crs=crs, color='black', linewidth=0.5) # plot Centerlines cls = gdir.read_pickle('inversion_flowlines') for l, c in zip(cls, inv): smap.set_geometry(l.line, crs=crs, color='gray', linewidth=1.2, zorder=50) toplot_th = np.append(toplot_th, c['thick']) for wi, cur, (n1, n2) in zip(l.widths, l.line.coords, l.normals): line = shpg.LineString([shpg.Point(cur + wi / 2. * n1), shpg.Point(cur + wi / 2. * n2)]) toplot_lines.append(line) toplot_crs.append(crs) vol.extend(c['volume']) dl = salem.DataLevels(cmap=SECTION_THICKNESS_CMAP, nlevels=256, data=toplot_th, vmin=0, vmax=vmax) colors = dl.to_rgb() for l, c, crs in zip(toplot_lines, colors, toplot_crs): smap.set_geometry(l, crs=crs, color=c, linewidth=linewidth, zorder=50) smap.plot(ax) return dict(cbar_label='Section thickness [m]', cbar_primitive=dl, title_comment=' ({:.2f} km3)'.format(np.nansum(vol) * 1e-9))
def get_mean_temps_2k(rgi, return_prcp): from oggm import cfg, utils, workflow, tasks from oggm.core.massbalance import PastMassBalance # Initialize OGGM cfg.initialize() wd = utils.gettempdir(reset=True) cfg.PATHS['working_dir'] = wd utils.mkdir(wd, reset=True) cfg.PARAMS['baseline_climate'] = 'HISTALP' # and set standard histalp values cfg.PARAMS['temp_melt'] = -1.75 cfg.PARAMS['prcp_scaling_factor'] = 1.75 gdir = workflow.init_glacier_regions(rgidf=rgi.split('_')[0], from_prepro_level=3, prepro_border=10)[0] # run histalp climate on glacier! tasks.process_histalp_data(gdir) f = gdir.get_filepath('climate_historical') with utils.ncDataset(f) as nc: refhgt = nc.ref_hgt mb = PastMassBalance(gdir, check_calib_params=False) df = pd.DataFrame() df2 = pd.DataFrame() for y in np.arange(1870, 2015): for i in np.arange(9, 12): flyear = utils.date_to_floatyear(y, i) tmp = mb.get_monthly_climate([refhgt], flyear)[0] df.loc[y, i] = tmp.mean() if return_prcp: for i in np.arange(3, 6): flyear = utils.date_to_floatyear(y, i) pcp = mb.get_monthly_climate([refhgt], flyear)[3] df2.loc[y, i] = tmp.mean() t99 = df.loc[1984:2014, :].mean().mean() t85 = df.loc[1870:1900, :].mean().mean() t2k = df.loc[1900:2000, :].mean().mean() if return_prcp: p99 = df2.loc[1984:2014, :].mean().mean() p85 = df2.loc[1870:1900, :].mean().mean() p2k = df2.loc[1900:2000, :].mean().mean() return t85, t99, t2k, p85, p99, p2k return t85, t99, t2k
def plot_domain(gdirs, ax=None, smap=None, use_netcdf=False): """Plot the glacier directory. Parameters ---------- gdirs ax smap use_netcdf : bool use output of glacier_masks instead of geotiff DEM """ # Files gdir = gdirs[0] if use_netcdf: with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: topo = nc.variables['topo'][:] else: topo = gis.read_geotiff_dem(gdir) try: smap.set_data(topo) except ValueError: pass cm = truncate_colormap(OGGM_CMAPS['terrain'], minval=0.25, maxval=1.0) smap.set_cmap(cm) smap.set_plot_params(nlevels=256) for gdir in gdirs: crs = gdir.grid.center_grid try: geom = gdir.read_pickle('geometries') # Plot boundaries poly_pix = geom['polygon_pix'] smap.set_geometry(poly_pix, crs=crs, fc='white', alpha=0.3, zorder=2, linewidth=.2) poly_pix = utils.tolist(poly_pix) for _poly in poly_pix: for l in _poly.interiors: smap.set_geometry(l, crs=crs, color='black', linewidth=0.5) except FileNotFoundError: smap.set_shapefile(gdir.read_shapefile('outlines')) smap.plot(ax) return dict(cbar_label='Alt. [m]')
def plot_catchment_areas(gdirs, ax=None, smap=None, lines_cmap='Set1', mask_cmap='Set2'): """Plots the catchments out of a glacier directory. """ gdir = gdirs[0] if len(gdirs) > 1: raise NotImplementedError('Cannot plot a list of gdirs (yet)') with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: topo = nc.variables['topo'][:] mask = nc.variables['glacier_mask'][:] * np.NaN smap.set_topography(topo) crs = gdir.grid.center_grid geom = gdir.read_pickle('geometries') # Plot boundaries poly_pix = geom['polygon_pix'] smap.set_geometry(poly_pix, crs=crs, fc='none', zorder=2, linewidth=.2) for l in poly_pix.interiors: smap.set_geometry(l, crs=crs, color='black', linewidth=0.5) # plot Centerlines cls = gdir.read_pickle('centerlines')[::-1] color = gencolor(len(cls) + 1, cmap=lines_cmap) for l, c in zip(cls, color): smap.set_geometry(l.line, crs=crs, color=c, linewidth=2.5, zorder=50) # catchment areas cis = gdir.read_pickle('geometries')['catchment_indices'] for j, ci in enumerate(cis[::-1]): mask[tuple(ci.T)] = j + 1 smap.set_cmap(mask_cmap) smap.set_data(mask) smap.plot(ax) return {}
def plot_catchment_areas(gdirs, ax=None, smap=None, lines_cmap='Set1', mask_cmap='Set2'): """Plots the catchments out of a glacier directory. """ gdir = gdirs[0] if len(gdirs) > 1: raise NotImplementedError('Cannot plot a list of gdirs (yet)') with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: topo = nc.variables['topo'][:] mask = nc.variables['glacier_mask'][:] * np.NaN smap.set_topography(topo) crs = gdir.grid.center_grid geom = gdir.read_pickle('geometries') # Plot boundaries poly_pix = geom['polygon_pix'] smap.set_geometry(poly_pix, crs=crs, fc='none', zorder=2, linewidth=.2) for l in poly_pix.interiors: smap.set_geometry(l, crs=crs, color='black', linewidth=0.5) # plot Centerlines cls = gdir.read_pickle('centerlines')[::-1] color = gencolor(len(cls) + 1, cmap=lines_cmap) for l, c in zip(cls, color): smap.set_geometry(l.line, crs=crs, color=c, linewidth=2.5, zorder=50) # catchment areas cis = gdir.read_pickle('geometries')['catchment_indices'] for j, ci in enumerate(cis[::-1]): mask[tuple(ci.T)] = j+1 smap.set_cmap(mask_cmap) smap.set_data(mask) smap.plot(ax) return {}
def __init__(self, gdir, mu_star=None, bias=None, filename='climate_monthly', input_filesuffix='', repeat=False, ys=None, ye=None, check_calib_params=True): """Initialize. Parameters ---------- gdir : GlacierDirectory the glacier directory mu_star : float, optional set to the alternative value of mu* you want to use (the default is to use the calibrated value). bias : float, optional set to the alternative value of the calibration bias [mm we yr-1] you want to use (the default is to use the calibrated value) Note that this bias is *substracted* from the computed MB. Indeed: BIAS = MODEL_MB - REFERENCE_MB. filename : str, optional set to a different BASENAME if you want to use alternative climate data. input_filesuffix : str the file suffix of the input climate file repeat : bool Whether the climate period given by [ys, ye] should be repeated indefinitely in a circular way ys : int The start of the climate period where the MB model is valid (default: the period with available data) ye : int The end of the climate period where the MB model is valid (default: the period with available data) check_calib_params : bool OGGM will try hard not to use wrongly calibrated mu* by checking the parameters used during calibration and the ones you are using at run time. If they don't match, it will raise an error. Set to False to suppress this check. Attributes ---------- temp_bias : float, default 0 Add a temperature bias to the time series prcp_bias : float, default 1 Precipitation factor to the time series (called bias for consistency with `temp_bias`) """ super(PastMassBalance, self).__init__() self.valid_bounds = [-1e4, 2e4] # in m if mu_star is None: df = gdir.read_json('local_mustar') mu_star = df['mu_star_glacierwide'] if check_calib_params: if not df['mu_star_allsame']: raise RuntimeError('You seem to use the glacier-wide mu* ' 'to compute the mass-balance although ' 'this glacier has different mu* for ' 'its flowlines. ' 'Set `check_calib_params=False` ' 'to ignore this warning.') if bias is None: if cfg.PARAMS['use_bias_for_run']: df = gdir.read_json('local_mustar') bias = df['bias'] else: bias = 0. self.mu_star = mu_star self.bias = bias # Parameters self.t_solid = cfg.PARAMS['temp_all_solid'] self.t_liq = cfg.PARAMS['temp_all_liq'] self.t_melt = cfg.PARAMS['temp_melt'] prcp_fac = cfg.PARAMS['prcp_scaling_factor'] default_grad = cfg.PARAMS['temp_default_gradient'] # Check the climate related params to the GlacierDir to make sure if check_calib_params: mb_calib = gdir.read_pickle('climate_info')['mb_calib_params'] for k, v in mb_calib.items(): if v != cfg.PARAMS[k]: raise RuntimeError('You seem to use different mass-' 'balance parameters than used for the ' 'calibration. ' 'Set `check_calib_params=False` ' 'to ignore this warning.') # Public attrs self.temp_bias = 0. self.prcp_bias = 1. self.repeat = repeat # Read file fpath = gdir.get_filepath(filename, filesuffix=input_filesuffix) with ncDataset(fpath, mode='r') as nc: # time time = nc.variables['time'] time = netCDF4.num2date(time[:], time.units) ny, r = divmod(len(time), 12) if r != 0: raise ValueError('Climate data should be N full years') # This is where we switch to hydro float year format # Last year gives the tone of the hydro year self.years = np.repeat( np.arange(time[-1].year - ny + 1, time[-1].year + 1), 12) self.months = np.tile(np.arange(1, 13), ny) # Read timeseries self.temp = nc.variables['temp'][:] self.prcp = nc.variables['prcp'][:] * prcp_fac if 'gradient' in nc.variables: grad = nc.variables['gradient'][:] # Security for stuff that can happen with local gradients g_minmax = cfg.PARAMS['temp_local_gradient_bounds'] grad = np.where(~np.isfinite(grad), default_grad, grad) grad = np.clip(grad, g_minmax[0], g_minmax[1]) else: grad = self.prcp * 0 + default_grad self.grad = grad self.ref_hgt = nc.ref_hgt self.ys = self.years[0] if ys is None else ys self.ye = self.years[-1] if ye is None else ye
def plot_catchment_width(gdirs, ax=None, smap=None, corrected=False, add_intersects=False, add_touches=False, lines_cmap='Set1'): """Plots the catchment widths out of a glacier directory. """ gdir = gdirs[0] with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: topo = nc.variables['topo'][:] # Dirty optim try: smap.set_topography(topo) except ValueError: pass # Maybe plot touches xis, yis, cis = [], [], [] ogrid = smap.grid for gdir in gdirs: crs = gdir.grid.center_grid geom = gdir.read_pickle('geometries') # Plot boundaries poly_pix = geom['polygon_pix'] smap.set_geometry(poly_pix, crs=crs, fc='none', zorder=2, linewidth=.2) for l in poly_pix.interiors: smap.set_geometry(l, crs=crs, color='black', linewidth=0.5) # Plot intersects if add_intersects and gdir.has_file('intersects'): gdf = gdir.read_shapefile('intersects') smap.set_shapefile(gdf, color='k', linewidth=3.5, zorder=3) # plot Centerlines cls = gdir.read_pickle('inversion_flowlines')[::-1] color = gencolor(len(cls) + 1, cmap=lines_cmap) for l, c in zip(cls, color): smap.set_geometry(l.line, crs=crs, color=c, linewidth=2.5, zorder=50) if corrected: for wi, cur, (n1, n2) in zip(l.widths, l.line.coords, l.normals): _l = shpg.LineString([shpg.Point(cur + wi / 2. * n1), shpg.Point(cur + wi / 2. * n2)]) smap.set_geometry(_l, crs=crs, color=c, linewidth=0.6, zorder=50) else: for wl, wi in zip(l.geometrical_widths, l.widths): col = c if np.isfinite(wi) else 'grey' for w in wl: smap.set_geometry(w, crs=crs, color=col, linewidth=0.6, zorder=50) if add_touches: pok = np.where(l.is_rectangular) xi, yi = l.line.xy xi, yi = ogrid.transform(np.asarray(xi)[pok], np.asarray(yi)[pok], crs=crs) xis.append(xi) yis.append(yi) cis.append(c) smap.plot(ax) for xi, yi, c in zip(xis, yis, cis): ax.scatter(xi, yi, color=c, s=20, zorder=51) return {}
def _reproject_and_scale(gdir, do_error=False): """Reproject and scale itslive data, avoid code duplication for error""" reg = find_region(gdir) if reg is None: raise InvalidWorkflowError('There does not seem to be its_live data ' 'available for this glacier') vnx = 'vx' vny = 'vy' if do_error: vnx += '_err' vny += '_err' with utils.get_lock(): fx = utils.file_downloader(region_files[reg][vnx]) fy = utils.file_downloader(region_files[reg][vny]) # Open the files dsx = salem.GeoTiff(fx) dsy = salem.GeoTiff(fy) # subset them to our map grid_gla = gdir.grid.center_grid proj_vel = dsx.grid.proj x0, x1, y0, y1 = grid_gla.extent_in_crs(proj_vel) dsx.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4) dsy.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4) grid_vel = dsx.grid.center_grid # TODO: this should be taken care of by salem # https://github.com/fmaussion/salem/issues/171 with rasterio.Env(): with rasterio.open(fx) as src: nodata = getattr(src, 'nodata', -32767.0) # Error files are wrong if nodata == 0: nodata = -32767.0 # Get the coords at t0 xx0, yy0 = grid_vel.center_grid.xy_coordinates # Compute coords at t1 xx1 = dsx.get_vardata() yy1 = dsy.get_vardata() non_valid = (xx1 == nodata) | (yy1 == nodata) xx1[non_valid] = np.NaN yy1[non_valid] = np.NaN orig_vel = np.sqrt(xx1**2 + yy1**2) xx1 += xx0 yy1 += yy0 # Transform both to glacier proj xx0, yy0 = salem.transform_proj(proj_vel, grid_gla.proj, xx0, yy0) xx1, yy1 = salem.transform_proj(proj_vel, grid_gla.proj, xx1, yy1) # Correct no data after proj as well (inf) xx1[non_valid] = np.NaN yy1[non_valid] = np.NaN # Compute velocities from there vx = xx1 - xx0 vy = yy1 - yy0 # Scale back velocities - https://github.com/OGGM/oggm/issues/1014 new_vel = np.sqrt(vx**2 + vy**2) p_ok = new_vel > 1e-5 # avoid div by zero vx[p_ok] = vx[p_ok] * orig_vel[p_ok] / new_vel[p_ok] vy[p_ok] = vy[p_ok] * orig_vel[p_ok] / new_vel[p_ok] # And transform to local map vx = grid_gla.map_gridded_data(vx, grid=grid_vel, interp='linear') vy = grid_gla.map_gridded_data(vy, grid=grid_vel, interp='linear') # Write with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc: vn = 'obs_icevel_x' if do_error: vn = vn.replace('obs', 'err') if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True) v.units = 'm yr-1' ln = 'ITS LIVE velocity data in x map direction' if do_error: ln = 'Uncertainty of ' + ln v.long_name = ln v[:] = vx.filled(np.nan) vn = 'obs_icevel_y' if do_error: vn = vn.replace('obs', 'err') if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True) v.units = 'm yr-1' ln = 'ITS LIVE velocity data in y map direction' if do_error: ln = 'Uncertainty of ' + ln v.long_name = ln v[:] = vy.filled(np.nan)
def distribute_thickness_interp(gdir, add_slope=True, smooth_radius=None, varname_suffix=''): """Compute a thickness map by interpolating between centerlines and border. This is a rather cosmetic task, not relevant for OGGM but for ITMIX. Parameters ---------- gdir : oggm.GlacierDirectory the glacier directory to process add_slope : bool whether a corrective slope factor should be used or not smooth_radius : int pixel size of the gaussian smoothing. Default is to use cfg.PARAMS['smooth_window'] (i.e. a size in meters). Set to zero to suppress smoothing. varname_suffix : str add a suffix to the variable written in the file (for experiments) """ # Variables grids_file = gdir.get_filepath('gridded_data') # See if we have the masks, else compute them with utils.ncDataset(grids_file) as nc: has_masks = 'glacier_ext_erosion' in nc.variables if not has_masks: from oggm.core.gis import interpolation_masks interpolation_masks(gdir) with utils.ncDataset(grids_file) as nc: glacier_mask = nc.variables['glacier_mask'][:] glacier_ext = nc.variables['glacier_ext_erosion'][:] ice_divides = nc.variables['ice_divides'][:] if add_slope: slope_factor = nc.variables['slope_factor'][:] else: slope_factor = 1. # Thickness to interpolate thick = glacier_ext * np.NaN thick[(glacier_ext - ice_divides) == 1] = 0. # TODO: domain border too, for convenience for a start thick[0, :] = 0. thick[-1, :] = 0. thick[:, 0] = 0. thick[:, -1] = 0. # Along the lines cls = gdir.read_pickle('inversion_output') fls = gdir.read_pickle('inversion_flowlines') vs = [] for cl, fl in zip(cls, fls): vs.extend(cl['volume']) x, y = utils.tuple2int(fl.line.xy) thick[y, x] = cl['thick'] init_vol = np.sum(vs) # Interpolate xx, yy = gdir.grid.ij_coordinates pnan = np.nonzero(~np.isfinite(thick)) pok = np.nonzero(np.isfinite(thick)) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T thick[pnan] = griddata(points, np.ravel(thick[pok]), inter, method='cubic') thick = thick.clip(0) # Slope thick *= slope_factor # Smooth dx = gdir.grid.dx if smooth_radius != 0: if smooth_radius is None: smooth_radius = np.rint(cfg.PARAMS['smooth_window'] / dx) thick = gaussian_blur(thick, np.int(smooth_radius)) thick = np.where(glacier_mask, thick, 0.) # Re-mask thick[glacier_mask == 0] = np.NaN assert np.all(np.isfinite(thick[glacier_mask == 1])) # Conserve volume tmp_vol = np.nansum(thick * dx**2) thick *= init_vol / tmp_vol # write grids_file = gdir.get_filepath('gridded_data') with utils.ncDataset(grids_file, 'a') as nc: vn = 'distributed_thickness' + varname_suffix if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ( 'y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Distributed ice thickness' v[:] = thick return thick
def its_live_to_gdir(gdir, dsx, dsy, dex, dey, fx): """ Re-project its_live files to a given glacier directory. based on the function from oggm_shop: https://github.com/OGGM/oggm/blob/master/oggm/shop/its_live.py#L79 Variables are added to the gridded_data nc file. Re-projecting velocities from one map proj to another is done re-projecting the vector distances. In this process, absolute velocities might change as well because map projections do not always preserve distances -> we scale them back to the original velocities as per the ITS_LIVE documentation. Which states that velocities are given in ground units, i.e. absolute velocities. We use bi-linear interpolation to re-project the velocities to the local glacier map. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` where to write the data dsx: :salem.Geotiff: velocity in the x direction for Greenland dsy: :salem.Geotiff: velocity in the y direction for Greenland dex: :salem.Geotiff: velocity error in the x direction Greenland dey: :salem.Geotiff: velocity error in the y direction Greenland fx: path directory to original velocity data (x direction) """ # subset its live data to our glacier map grid_gla = gdir.grid.center_grid proj_vel = dsx.grid.proj x0, x1, y0, y1 = grid_gla.extent_in_crs(proj_vel) # Same projection for all the itslive data dsx.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4) dsy.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4) dex.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4) dey.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4) grid_vel = dsx.grid.center_grid # TODO: this should be taken care of by salem # https://github.com/fmaussion/salem/issues/171 with rasterio.Env(): with rasterio.open(fx) as src: nodata = getattr(src, 'nodata', -32767.0) # Get the coords at t0 xx0, yy0 = grid_vel.center_grid.xy_coordinates # Compute coords at t1 xx1 = dsx.get_vardata() yy1 = dsy.get_vardata() ex1 = dex.get_vardata() ey1 = dey.get_vardata() non_valid = (xx1 == nodata) | (yy1 == nodata) non_valid_e = (ex1 == nodata) | (ey1 == nodata) xx1[non_valid] = np.NaN yy1[non_valid] = np.NaN ex1[non_valid_e] = np.NaN ey1[non_valid_e] = np.NaN orig_vel = np.sqrt(xx1**2 + yy1**2) orig_vel_e = np.sqrt(ex1**2 + ey1**2) xx1 += xx0 yy1 += yy0 ex1 += xx0 ey1 += yy0 # Transform both to glacier proj xx0, yy0 = salem.transform_proj(proj_vel, grid_gla.proj, xx0, yy0) xx1, yy1 = salem.transform_proj(proj_vel, grid_gla.proj, xx1, yy1) ex0, ey0 = salem.transform_proj(proj_vel, grid_gla.proj, xx0, yy0) ex1, ey1 = salem.transform_proj(proj_vel, grid_gla.proj, ex1, ey1) # Correct no data after proj as well (inf) xx1[non_valid] = np.NaN yy1[non_valid] = np.NaN ex1[non_valid_e] = np.NaN ey1[non_valid_e] = np.NaN # Compute velocities from there vx = xx1 - xx0 vy = yy1 - yy0 ex = ex1 - ex0 ey = ey1 - ey0 # Scale back velocities - https://github.com/OGGM/oggm/issues/1014 new_vel = np.sqrt(vx**2 + vy**2) new_vel_e = np.sqrt(ex**2 + ey**2) p_ok = new_vel > 1e-5 # avoid div by zero vx[p_ok] = vx[p_ok] * orig_vel[p_ok] / new_vel[p_ok] vy[p_ok] = vy[p_ok] * orig_vel[p_ok] / new_vel[p_ok] p_ok_e = new_vel_e > 1e-5 # avoid div by zero ex[p_ok_e] = ex[p_ok_e] * orig_vel_e[p_ok_e] / new_vel_e[p_ok_e] ey[p_ok_e] = ey[p_ok_e] * orig_vel_e[p_ok_e] / new_vel_e[p_ok_e] # And transform to local map vx = grid_gla.map_gridded_data(vx, grid=grid_vel, interp='linear') vy = grid_gla.map_gridded_data(vy, grid=grid_vel, interp='linear') ex = grid_gla.map_gridded_data(ex, grid=grid_vel, interp='linear') ey = grid_gla.map_gridded_data(ey, grid=grid_vel, interp='linear') # Write with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc: vn = 'obs_icevel_x' if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ( 'y', 'x', ), zlib=True) v.units = 'm yr-1' v.long_name = 'ITS LIVE velocity data in x map direction' v[:] = vx vn = 'obs_icevel_y' if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ( 'y', 'x', ), zlib=True) v.units = 'm yr-1' v.long_name = 'ITS LIVE velocity data in y map direction' v[:] = vy vn = 'obs_icevel_x_error' if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ( 'y', 'x', ), zlib=True) v.units = 'm yr-1' v.long_name = 'ITS LIVE error velocity data in x map direction' v[:] = ex vn = 'obs_icevel_y_error' if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ( 'y', 'x', ), zlib=True) v.units = 'm yr-1' v.long_name = 'ITS LIVE error velocity data in y map direction' v[:] = ey
def mb_climate_on_height(gdir, heights, prcp_fac, time_range=None, year_range=None): """Mass-balance climate of the glacier at a specific height Reads the glacier's monthly climate data file and computes the temperature "energies" (temp above 0) and solid precipitation at the required height. Parameters: ----------- gdir: the glacier directory heights: a 1D array of the heights (in meter) where you want the data prcp_fac: the correction factor for precipitation time_range (optional): default is to read all data but with this you can provide a [datetime, datetime] bounds (inclusive). year_range (optional): maybe more useful than the time bounds above. Provide a [y0, y1] year range to get the data for specific (hydrological) years only Returns: -------- (time, tempformelt, prcpsol):: - time: array of shape (nt,) - tempformelt: array of shape (len(heights), nt) - prcpsol: array of shape (len(heights), nt) """ if year_range is not None: sm = cfg.PARAMS['hydro_month_' + gdir.hemisphere] em = sm - 1 if (sm > 1) else 12 t0 = datetime.datetime(year_range[0] - 1, sm, 1) t1 = datetime.datetime(year_range[1], em, 1) return mb_climate_on_height(gdir, heights, prcp_fac, time_range=[t0, t1]) # Parameters temp_all_solid = cfg.PARAMS['temp_all_solid'] temp_all_liq = cfg.PARAMS['temp_all_liq'] temp_melt = cfg.PARAMS['temp_melt'] # Read file with utils.ncDataset(gdir.get_filepath('climate_monthly'), mode='r') as nc: # time time = nc.variables['time'] time = netCDF4.num2date(time[:], time.units) if time_range is not None: p0 = np.where(time == time_range[0])[0] try: p0 = p0[0] except IndexError: raise RuntimeError('time_range[0] not found in file') p1 = np.where(time == time_range[1])[0] try: p1 = p1[0] except IndexError: raise RuntimeError('time_range[1] not found in file') else: p0 = 0 p1 = len(time) - 1 time = time[p0:p1 + 1] # Read timeseries itemp = nc.variables['temp'][p0:p1 + 1] iprcp = nc.variables['prcp'][p0:p1 + 1] igrad = nc.variables['grad'][p0:p1 + 1] ref_hgt = nc.ref_hgt # Correct precipitation iprcp *= prcp_fac # For each height pixel: # Compute temp and tempformelt (temperature above melting threshold) npix = len(heights) grad_temp = np.atleast_2d(igrad).repeat(npix, 0) grad_temp *= (heights.repeat(len(time)).reshape(grad_temp.shape) - ref_hgt) temp2d = np.atleast_2d(itemp).repeat(npix, 0) + grad_temp temp2dformelt = temp2d - temp_melt temp2dformelt = np.clip(temp2dformelt, 0, temp2dformelt.max()) # Compute solid precipitation from total precipitation prcpsol = np.atleast_2d(iprcp).repeat(npix, 0) fac = 1 - (temp2d - temp_all_solid) / (temp_all_liq - temp_all_solid) fac = np.clip(fac, 0, 1) prcpsol = prcpsol * fac return time, temp2dformelt, prcpsol
def plot_centerlines(gdirs, ax=None, smap=None, use_flowlines=False, add_downstream=False, lines_cmap='Set1', add_line_index=False, use_model_flowlines=False): """Plots the centerlines of a glacier directory.""" if add_downstream and not use_flowlines: raise ValueError('Downstream lines can be plotted with flowlines only') # Files filename = 'centerlines' if use_model_flowlines: filename = 'model_flowlines' elif use_flowlines: filename = 'inversion_flowlines' gdir = gdirs[0] with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: topo = nc.variables['topo'][:] cm = truncate_colormap(ALTITUDE_CMAP, minval=0.25, maxval=1.0, n=256) smap.set_cmap(cm) smap.set_plot_params(nlevels=256) smap.set_data(topo) for gdir in gdirs: crs = gdir.grid.center_grid geom = gdir.read_pickle('geometries') # Plot boundaries poly_pix = geom['polygon_pix'] smap.set_geometry(poly_pix, crs=crs, fc='white', alpha=0.3, zorder=2, linewidth=.2) poly_pix = utils.tolist(poly_pix) for _poly in poly_pix: for l in _poly.interiors: smap.set_geometry(l, crs=crs, color='black', linewidth=0.5) # plot Centerlines cls = gdir.read_pickle(filename) # Go in reverse order for red always being the longuest cls = cls[::-1] nl = len(cls) color = gencolor(len(cls) + 1, cmap=lines_cmap) for i, (l, c) in enumerate(zip(cls, color)): if add_downstream and not gdir.is_tidewater and l is cls[0]: line = gdir.read_pickle('downstream_line')['full_line'] else: line = l.line smap.set_geometry(line, crs=crs, color=c, linewidth=2.5, zorder=50) text = '{}'.format(nl - i - 1) if add_line_index else None smap.set_geometry(l.head, crs=gdir.grid, marker='o', markersize=60, alpha=0.8, color=c, zorder=99, text=text) for j in l.inflow_points: smap.set_geometry(j, crs=crs, marker='o', markersize=40, edgecolor='k', alpha=0.8, zorder=99, facecolor='none') smap.plot(ax) return dict(cbar_label='Alt. [m]')
def test_solid_prcp(self): """Tests the subroutine which computes solid precipitation amount from given total precipitation and temperature. """ # read the Hintereisferner DEM hef_file = get_demo_file('Hintereisferner_RGI5.shp') entity = gpd.read_file(hef_file).iloc[0] # initialize the GlacierDirectory gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) # define the local grid gis.define_glacier_region(gdir, entity=entity) # process the given climate file climate.process_custom_climate_data(gdir) # read the following variable from the center pixel (46.83N 10.75E) # of the Hintereisferner HistAlp climate file for the # entire time period from October 1801 until September 2003 # - surface height in m asl. # - total precipitation amount in kg/m2 # - 2m air temperature in °C with utils.ncDataset(get_demo_file('histalp_merged_hef.nc')) as nc_r: ref_h = nc_r.variables['hgt'][1, 1] ref_p = nc_r.variables['prcp'][:, 1, 1] ref_t = nc_r.variables['temp'][:, 1, 1] # define needed parameters prcp_factor = 1 temp_all_solid = 0 temp_grad = -0.0065 # define elevation levels ref_hgt = ref_h min_hgt = ref_h - 100 max_hgt = ref_h + 100 # if the terminus temperature is below the threshold for # solid precipitation all fallen precipitation must be solid temp_terminus = ref_t * 0 + temp_all_solid solid_prcp = vascaling._compute_solid_prcp(ref_p, prcp_factor, ref_hgt, min_hgt, max_hgt, temp_terminus, temp_all_solid, temp_grad, prcp_grad=0, prcp_anomaly=0) np.testing.assert_allclose(solid_prcp, ref_p) # if the temperature at the maximal elevation is above the threshold # for solid precipitation all fallen precipitation must be liquid temp_terminus = ref_t + 100 solid_prcp = vascaling._compute_solid_prcp(ref_p, prcp_factor, ref_hgt, min_hgt, max_hgt, temp_terminus, temp_all_solid, temp_grad, prcp_grad=0, prcp_anomaly=0) np.testing.assert_allclose(solid_prcp, 0) # test extreme case if max_hgt equals min_hgt test_p = ref_p * (ref_t <= temp_all_solid).astype(int) solid_prcp = vascaling._compute_solid_prcp(ref_p, prcp_factor, ref_hgt, ref_hgt, ref_hgt, ref_t, temp_all_solid, temp_grad, prcp_grad=0, prcp_anomaly=0) np.testing.assert_allclose(solid_prcp, test_p)
def plot_modeloutput_map(gdirs, ax=None, smap=None, model=None, vmax=None, linewidth=3, filesuffix='', modelyr=None): """Plots the result of the model output.""" gdir = gdirs[0] with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: topo = nc.variables['topo'][:] # Dirty optim try: smap.set_topography(topo) except ValueError: pass toplot_th = np.array([]) toplot_lines = [] toplot_crs = [] if model is None: models = [] for gdir in gdirs: model = FileModel(gdir.get_filepath('model_run', filesuffix=filesuffix)) model.run_until(modelyr) models.append(model) else: models = utils.tolist(model) for gdir, model in zip(gdirs, models): geom = gdir.read_pickle('geometries') poly_pix = geom['polygon_pix'] crs = gdir.grid.center_grid smap.set_geometry(poly_pix, crs=crs, fc='none', zorder=2, linewidth=.2) poly_pix = utils.tolist(poly_pix) for _poly in poly_pix: for l in _poly.interiors: smap.set_geometry(l, crs=crs, color='black', linewidth=0.5) # plot Centerlines cls = model.fls for l in cls: smap.set_geometry(l.line, crs=crs, color='gray', linewidth=1.2, zorder=50) toplot_th = np.append(toplot_th, l.thick) widths = l.widths.copy() widths = np.where(l.thick > 0, widths, 0.) for wi, cur, (n1, n2) in zip(widths, l.line.coords, l.normals): line = shpg.LineString([shpg.Point(cur + wi/2. * n1), shpg.Point(cur + wi/2. * n2)]) toplot_lines.append(line) toplot_crs.append(crs) dl = salem.DataLevels(cmap=SECTION_THICKNESS_CMAP, nlevels=256, data=toplot_th, vmin=0, vmax=vmax) colors = dl.to_rgb() for l, c, crs in zip(toplot_lines, colors, toplot_crs): smap.set_geometry(l, crs=crs, color=c, linewidth=linewidth, zorder=50) smap.plot(ax) return dict(cbar_label='Section thickness [m]', cbar_primitive=dl, title_comment=' -- year: {:d}'.format(np.int64(model.yr)))
def process_cesm_data(gdir, filesuffix='', fpath_temp=None, fpath_precc=None, fpath_precl=None, **kwargs): """Processes and writes CESM climate data for this glacier. This function is made for interpolating the Community Earth System Model Last Millennium Ensemble (CESM-LME) climate simulations, from Otto-Bliesner et al. (2016), to the high-resolution CL2 climatologies (provided with OGGM) and writes everything to a NetCDF file. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` where to write the data filesuffix : str append a suffix to the filename (useful for ensemble experiments). fpath_temp : str path to the temp file (default: cfg.PATHS['cesm_temp_file']) fpath_precc : str path to the precc file (default: cfg.PATHS['cesm_precc_file']) fpath_precl : str path to the precl file (default: cfg.PATHS['cesm_precl_file']) **kwargs: any kwarg to be passed to ref:`process_gcm_data` """ # CESM temperature and precipitation data if fpath_temp is None: if not ('cesm_temp_file' in cfg.PATHS): raise ValueError("Need to set cfg.PATHS['cesm_temp_file']") fpath_temp = cfg.PATHS['cesm_temp_file'] if fpath_precc is None: if not ('cesm_precc_file' in cfg.PATHS): raise ValueError("Need to set cfg.PATHS['cesm_precc_file']") fpath_precc = cfg.PATHS['cesm_precc_file'] if fpath_precl is None: if not ('cesm_precl_file' in cfg.PATHS): raise ValueError("Need to set cfg.PATHS['cesm_precl_file']") fpath_precl = cfg.PATHS['cesm_precl_file'] # read the files if LooseVersion(xr.__version__) < LooseVersion('0.11'): raise ImportError('This task needs xarray v0.11 or newer to run.') tempds = xr.open_dataset(fpath_temp) precpcds = xr.open_dataset(fpath_precc) preclpds = xr.open_dataset(fpath_precl) # Get the time right - i.e. from time bounds # Fix for https://github.com/pydata/xarray/issues/2565 with utils.ncDataset(fpath_temp, mode='r') as nc: time_unit = nc.variables['time'].units calendar = nc.variables['time'].calendar try: # xarray v0.11 time = netCDF4.num2date(tempds.time_bnds[:, 0], time_unit, calendar=calendar) except TypeError: # xarray > v0.11 time = tempds.time_bnds[:, 0].values # select for location lon = gdir.cenlon lat = gdir.cenlat # CESM files are in 0-360 if lon <= 0: lon += 360 # take the closest # Should we consider GCM interpolation? temp = tempds.TREFHT.sel(lat=lat, lon=lon, method='nearest') prcp = (precpcds.PRECC.sel(lat=lat, lon=lon, method='nearest') + preclpds.PRECL.sel(lat=lat, lon=lon, method='nearest')) temp['time'] = time prcp['time'] = time temp.lon.values = temp.lon if temp.lon <= 180 else temp.lon - 360 prcp.lon.values = prcp.lon if prcp.lon <= 180 else prcp.lon - 360 # Convert m s-1 to mm mth-1 if time[0].month != 1: raise ValueError('We expect the files to start in January!') ny, r = divmod(len(time), 12) assert r == 0 ndays = np.tile(cfg.DAYS_IN_MONTH, ny) prcp = prcp * ndays * (60 * 60 * 24 * 1000) tempds.close() precpcds.close() preclpds.close() # Here: # - time_unit='days since 0850-01-01 00:00:00' # - calendar='noleap' process_gcm_data(gdir, filesuffix=filesuffix, prcp=prcp, temp=temp, time_unit=time_unit, calendar=calendar, **kwargs)
def plot_modeloutput_map(gdirs, ax=None, smap=None, model=None, vmax=None, linewidth=3, filesuffix='', modelyr=None): """Plots the result of the model output.""" gdir = gdirs[0] with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: topo = nc.variables['topo'][:] # Dirty optim try: smap.set_topography(topo) except ValueError: pass toplot_th = np.array([]) toplot_lines = [] toplot_crs = [] if model is None: models = [] for gdir in gdirs: model = FileModel( gdir.get_filepath('model_run', filesuffix=filesuffix)) model.run_until(modelyr) models.append(model) else: models = utils.tolist(model) for gdir, model in zip(gdirs, models): geom = gdir.read_pickle('geometries') poly_pix = geom['polygon_pix'] crs = gdir.grid.center_grid smap.set_geometry(poly_pix, crs=crs, fc='none', zorder=2, linewidth=.2) poly_pix = utils.tolist(poly_pix) for _poly in poly_pix: for l in _poly.interiors: smap.set_geometry(l, crs=crs, color='black', linewidth=0.5) # plot Centerlines cls = model.fls for l in cls: smap.set_geometry(l.line, crs=crs, color='gray', linewidth=1.2, zorder=50) toplot_th = np.append(toplot_th, l.thick) widths = l.widths.copy() widths = np.where(l.thick > 0, widths, 0.) for wi, cur, (n1, n2) in zip(widths, l.line.coords, l.normals): line = shpg.LineString([ shpg.Point(cur + wi / 2. * n1), shpg.Point(cur + wi / 2. * n2) ]) toplot_lines.append(line) toplot_crs.append(crs) dl = salem.DataLevels(cmap=OGGM_CMAPS['section_thickness'], data=toplot_th, vmin=0, vmax=vmax) colors = dl.to_rgb() for l, c, crs in zip(toplot_lines, colors, toplot_crs): smap.set_geometry(l, crs=crs, color=c, linewidth=linewidth, zorder=50) smap.plot(ax) return dict(cbar_label='Section thickness [m]', cbar_primitive=dl, title_comment=' -- year: {:d}'.format(np.int64(model.yr)))
def __init__(self, gdir, mu_star=None, bias=None, filename='climate_monthly', input_filesuffix='', repeat=False, ys=None, ye=None, check_calib_params=True): """Initialize. Parameters ---------- gdir : GlacierDirectory the glacier directory mu_star : float, optional set to the alternative value of mu* you want to use (the default is to use the calibrated value). bias : float, optional set to the alternative value of the calibration bias [mm we yr-1] you want to use (the default is to use the calibrated value) Note that this bias is *substracted* from the computed MB. Indeed: BIAS = MODEL_MB - REFERENCE_MB. filename : str, optional set to a different BASENAME if you want to use alternative climate data. input_filesuffix : str the file suffix of the input climate file repeat : bool Whether the climate period given by [ys, ye] should be repeated indefinitely in a circular way ys : int The start of the climate period where the MB model is valid (default: the period with available data) ye : int The end of the climate period where the MB model is valid (default: the period with available data) check_calib_params : bool OGGM will try hard not to use wrongly calibrated mu* by checking the parameters used during calibration and the ones you are using at run time. If they don't match, it will raise an error. Set to False to suppress this check. Attributes ---------- temp_bias : float, default 0 Add a temperature bias to the time series prcp_bias : float, default 1 Precipitation factor to the time series (called bias for consistency with `temp_bias`) """ super(PastMassBalance, self).__init__() self.valid_bounds = [-1e4, 2e4] # in m if mu_star is None: df = gdir.read_json('local_mustar') mu_star = df['mu_star_glacierwide'] if check_calib_params: if not df['mu_star_allsame']: raise RuntimeError('You seem to use the glacier-wide mu* ' 'to compute the mass-balance although ' 'this glacier has different mu* for ' 'its flowlines. ' 'Set `check_calib_params=False` ' 'to ignore this warning.') if bias is None: if cfg.PARAMS['use_bias_for_run']: df = gdir.read_json('local_mustar') bias = df['bias'] else: bias = 0. self.mu_star = mu_star self.bias = bias # Parameters self.t_solid = cfg.PARAMS['temp_all_solid'] self.t_liq = cfg.PARAMS['temp_all_liq'] self.t_melt = cfg.PARAMS['temp_melt'] prcp_fac = cfg.PARAMS['prcp_scaling_factor'] default_grad = cfg.PARAMS['temp_default_gradient'] # Check the climate related params to the GlacierDir to make sure if check_calib_params: mb_calib = gdir.read_pickle('climate_info')['mb_calib_params'] for k, v in mb_calib.items(): if v != cfg.PARAMS[k]: raise RuntimeError('You seem to use different mass-' 'balance parameters than used for the ' 'calibration. ' 'Set `check_calib_params=False` ' 'to ignore this warning.') # Public attrs self.temp_bias = 0. self.prcp_bias = 1. self.repeat = repeat # Read file fpath = gdir.get_filepath(filename, filesuffix=input_filesuffix) with ncDataset(fpath, mode='r') as nc: # time time = nc.variables['time'] time = netCDF4.num2date(time[:], time.units) ny, r = divmod(len(time), 12) if r != 0: raise ValueError('Climate data should be N full years') # This is where we switch to hydro float year format # Last year gives the tone of the hydro year self.years = np.repeat(np.arange(time[-1].year-ny+1, time[-1].year+1), 12) self.months = np.tile(np.arange(1, 13), ny) # Read timeseries self.temp = nc.variables['temp'][:] self.prcp = nc.variables['prcp'][:] * prcp_fac if 'gradient' in nc.variables: grad = nc.variables['gradient'][:] # Security for stuff that can happen with local gradients g_minmax = cfg.PARAMS['temp_local_gradient_bounds'] grad = np.where(~np.isfinite(grad), default_grad, grad) grad = np.clip(grad, g_minmax[0], g_minmax[1]) else: grad = self.prcp * 0 + default_grad self.grad = grad self.ref_hgt = nc.ref_hgt self.ys = self.years[0] if ys is None else ys self.ye = self.years[-1] if ye is None else ye
def test_yearly_mb_temp_prcp(self): """Test the routine which returns the yearly mass balance relevant climate parameters, i.e. positive melting temperature and solid precipitation. The testing target is the output of the corresponding OGGM routine `get_yearly_mb_climate_on_glacier(gdir)`. """ # read the Hintereisferner DEM hef_file = get_demo_file('Hintereisferner_RGI5.shp') entity = gpd.read_file(hef_file).iloc[0] # initialize the GlacierDirectory gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) # define the local grid and glacier mask gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) # run centerline prepro tasks centerlines.compute_centerlines(gdir) centerlines.initialize_flowlines(gdir) centerlines.catchment_area(gdir) centerlines.catchment_intersections(gdir) centerlines.catchment_width_geom(gdir) centerlines.catchment_width_correction(gdir) # process the given climate file climate.process_custom_climate_data(gdir) # get yearly sums of terminus temperature and solid precipitation years, temp, prcp = vascaling.get_yearly_mb_temp_prcp(gdir) # use the OGGM methode to get the mass balance # relevant climate parameters years_oggm, temp_oggm, prcp_oggm = \ climate.mb_yearly_climate_on_glacier(gdir) # the energy input at the glacier terminus must be greater than (or # equal to) the glacier wide average, since the air temperature drops # with elevation, i.e. the mean deviation must be positive, using the # OGGM data as reference assert md(temp_oggm, temp) >= 0 # consequentially, the average mass input must be less than (or equal # to) the mass input integrated over the whole glacier surface, i.e. # the mean deviation must be negative, using the OGGM data as reference # TODO: does it actually?! And if so, why?! @ASK assert md(prcp_oggm, prcp) <= 0 # correlation must be higher than set threshold assert corrcoef(temp, temp_oggm) >= 0.94 assert corrcoef(prcp, prcp_oggm) >= 0.98 # get terminus temperature using the OGGM routine fpath = gdir.get_filepath('gridded_data') with ncDataset(fpath) as nc: mask = nc.variables['glacier_mask'][:] topo = nc.variables['topo'][:] heights = np.array([np.min(topo[np.where(mask == 1)])]) years_height, temp_height, _ = \ climate.mb_yearly_climate_on_height(gdir, heights, flatten=False) temp_height = temp_height[0] # both time series must be equal np.testing.assert_array_equal(temp, temp_height) # get solid precipitation averaged over the glacier # (not weighted with widths) fls = gdir.read_pickle('inversion_flowlines') heights = np.array([]) for fl in fls: heights = np.append(heights, fl.surface_h) years_height, _, prcp_height = \ climate.mb_yearly_climate_on_height(gdir, heights, flatten=True) # correlation must be higher than set threshold assert corrcoef(prcp, prcp_height) >= 0.99 # TODO: assert absolute values (or differences) of precipitation @ASK # test exception handling of out of bounds time/year range with self.assertRaises(climate.MassBalanceCalibrationError): # start year out of bounds year_range = [1500, 1980] _, _, _ = vascaling.get_yearly_mb_temp_prcp(gdir, year_range=year_range) with self.assertRaises(climate.MassBalanceCalibrationError): # end year oud of bounds year_range = [1980, 3000] _, _, _ = vascaling.get_yearly_mb_temp_prcp(gdir, year_range=year_range) with self.assertRaises(ValueError): # get not N full years t0 = datetime.datetime(1980, 1, 1) t1 = datetime.datetime(1980, 3, 1) time_range = [t0, t1] _, _, _ = vascaling.get_yearly_mb_temp_prcp(gdir, time_range=time_range) # TODO: assert gradient in climate file?! pass
def test_terminus_temp(self): """Testing the subroutine which computes the terminus temperature from the given climate file and glacier DEM. Pretty straight forward and somewhat useless, but nice finger exercise. """ # read the Hintereisferner DEM hef_file = get_demo_file('Hintereisferner_RGI5.shp') entity = gpd.read_file(hef_file).iloc[0] # initialize the GlacierDirectory gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) # define the local grid gis.define_glacier_region(gdir, entity=entity) # process the given climate file climate.process_custom_climate_data(gdir) # read the following variable from the center pixel (46.83N 10.75E) # of the Hintereisferner HistAlp climate file for the # entire time period from October 1801 until September 2003 # - surface height in m asl. # - total precipitation amount in kg/m2 # - 2m air temperature in °C with utils.ncDataset(get_demo_file('histalp_merged_hef.nc')) as nc_r: ref_h = nc_r.variables['hgt'][1, 1] ref_t = nc_r.variables['temp'][:, 1, 1] # define a temperature anomaly temp_anomaly = 0 # specify temperature gradient temp_grad = -0.0065 # the terminus temperature must equal the input temperature # if terminus elevation equals reference elevation temp_terminus =\ vascaling._compute_temp_terminus(ref_t, temp_grad, ref_hgt=ref_h, terminus_hgt=ref_h, temp_anomaly=temp_anomaly) np.testing.assert_allclose(temp_terminus, ref_t + temp_anomaly) # the terminus temperature must equal the input terperature # if the gradient is zero for term_h in np.array([-100, 0, 100]) + ref_h: temp_terminus =\ vascaling._compute_temp_terminus(ref_t, temp_grad=0, ref_hgt=ref_h, terminus_hgt=term_h, temp_anomaly=temp_anomaly) np.testing.assert_allclose(temp_terminus, ref_t + temp_anomaly) # now test the routine with actual elevation differences # and a non zero temperature gradient for h_diff in np.array([-100, 0, 100]): term_h = ref_h + h_diff temp_diff = temp_grad * h_diff temp_terminus =\ vascaling._compute_temp_terminus(ref_t, temp_grad, ref_hgt=ref_h, terminus_hgt=term_h, temp_anomaly=temp_anomaly) np.testing.assert_allclose(temp_terminus, ref_t + temp_anomaly + temp_diff)
def interpolation_masks(gdir): """Computes the glacier exterior masks taking ice divides into account. This is useful for distributed ice thickness. The masks are added to the gridded data file. For convenience we also add a slope mask. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` where to write the data """ # Variables grids_file = gdir.get_filepath('gridded_data') with ncDataset(grids_file) as nc: topo_smoothed = nc.variables['topo_smoothed'][:] glacier_mask = nc.variables['glacier_mask'][:] # Glacier exterior including nunataks erode = binary_erosion(glacier_mask) glacier_ext = glacier_mask ^ erode glacier_ext = np.where(glacier_mask == 1, glacier_ext, 0) # Intersects between glaciers gdfi = gpd.GeoDataFrame(columns=['geometry']) if gdir.has_file('intersects'): # read and transform to grid gdf = gdir.read_shapefile('intersects') salem.transform_geopandas(gdf, gdir.grid, inplace=True) gdfi = pd.concat([gdfi, gdf[['geometry']]]) # Ice divide mask # Probably not the fastest way to do this, but it works dist = np.array([]) jj, ii = np.where(glacier_ext) for j, i in zip(jj, ii): dist = np.append(dist, np.min(gdfi.distance(shpg.Point(i, j)))) with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=RuntimeWarning) pok = np.where(dist <= 1) glacier_ext_intersect = glacier_ext * 0 glacier_ext_intersect[jj[pok], ii[pok]] = 1 # Distance from border mask - Scipy does the job dx = gdir.grid.dx dis_from_border = 1 + glacier_ext_intersect - glacier_ext dis_from_border = distance_transform_edt(dis_from_border) * dx # Slope glen_n = cfg.PARAMS['glen_n'] sy, sx = np.gradient(topo_smoothed, dx, dx) slope = np.arctan(np.sqrt(sy**2 + sx**2)) slope = np.clip(slope, np.deg2rad(cfg.PARAMS['min_slope']*4), np.pi/2.) slope = 1 / slope**(glen_n / (glen_n+2)) with ncDataset(grids_file, 'a') as nc: vn = 'glacier_ext_erosion' if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'i1', ('y', 'x', )) v.units = '-' v.long_name = 'Glacier exterior with binary erosion method' v[:] = glacier_ext vn = 'ice_divides' if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'i1', ('y', 'x', )) v.units = '-' v.long_name = 'Glacier ice divides' v[:] = glacier_ext_intersect vn = 'slope_factor' if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ('y', 'x', )) v.units = '-' v.long_name = 'Slope factor as defined in Farinotti et al 2009' v[:] = slope vn = 'dis_from_border' if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ('y', 'x', )) v.units = 'm' v.long_name = 'Distance from border' v[:] = dis_from_border
def distribute_thickness_per_altitude(gdir, add_slope=True, smooth_radius=None, dis_from_border_exp=0.25, varname_suffix=''): """Compute a thickness map by redistributing mass along altitudinal bands. This is a rather cosmetic task, not relevant for OGGM but for ITMIX. Parameters ---------- gdir : oggm.GlacierDirectory the glacier directory to process add_slope : bool whether a corrective slope factor should be used or not smooth_radius : int pixel size of the gaussian smoothing. Default is to use cfg.PARAMS['smooth_window'] (i.e. a size in meters). Set to zero to suppress smoothing. dis_from_border_exp : float the exponent of the distance from border mask varname_suffix : str add a suffix to the variable written in the file (for experiments) """ # Variables grids_file = gdir.get_filepath('gridded_data') # See if we have the masks, else compute them with utils.ncDataset(grids_file) as nc: has_masks = 'glacier_ext_erosion' in nc.variables if not has_masks: from oggm.core.gis import interpolation_masks interpolation_masks(gdir) with utils.ncDataset(grids_file) as nc: topo_smoothed = nc.variables['topo_smoothed'][:] glacier_mask = nc.variables['glacier_mask'][:] dis_from_border = nc.variables['dis_from_border'][:] if add_slope: slope_factor = nc.variables['slope_factor'][:] else: slope_factor = 1. # Along the lines cls = gdir.read_pickle('inversion_output') fls = gdir.read_pickle('inversion_flowlines') hs, ts, vs, xs, ys = [], [], [], [], [] for cl, fl in zip(cls, fls): hs = np.append(hs, fl.surface_h) ts = np.append(ts, cl['thick']) vs = np.append(vs, cl['volume']) x, y = fl.line.xy xs = np.append(xs, x) ys = np.append(ys, y) init_vol = np.sum(vs) # Assign a first order thickness to the points # very inefficient inverse distance stuff thick = glacier_mask * np.NaN for y in range(thick.shape[0]): for x in range(thick.shape[1]): phgt = topo_smoothed[y, x] # take the ones in a 100m range starth = 100. while True: starth += 10 pok = np.nonzero(np.abs(phgt - hs) <= starth)[0] if len(pok) != 0: break sqr = np.sqrt((xs[pok] - x)**2 + (ys[pok] - y)**2) pzero = np.where(sqr == 0) if len(pzero[0]) == 0: thick[y, x] = np.average(ts[pok], weights=1 / sqr) elif len(pzero[0]) == 1: thick[y, x] = ts[pzero] else: raise RuntimeError('We should not be there') # Distance from border (normalized) dis_from_border = dis_from_border**dis_from_border_exp dis_from_border /= np.mean(dis_from_border[glacier_mask == 1]) thick *= dis_from_border # Slope thick *= slope_factor # Smooth dx = gdir.grid.dx if smooth_radius != 0: if smooth_radius is None: smooth_radius = np.rint(cfg.PARAMS['smooth_window'] / dx) thick = gaussian_blur(thick, np.int(smooth_radius)) thick = np.where(glacier_mask, thick, 0.) # Re-mask thick = thick.clip(0) thick[glacier_mask == 0] = np.NaN assert np.all(np.isfinite(thick[glacier_mask == 1])) # Conserve volume tmp_vol = np.nansum(thick * dx**2) thick *= init_vol / tmp_vol # write with utils.ncDataset(grids_file, 'a') as nc: vn = 'distributed_thickness' + varname_suffix if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ( 'y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Distributed ice thickness' v[:] = thick return thick
def distribute_thickness_per_altitude(gdir, add_slope=True, smooth_radius=None, dis_from_border_exp=0.25, varname_suffix=''): """Compute a thickness map by redistributing mass along altitudinal bands. This is a rather cosmetic task, not relevant for OGGM but for ITMIX. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` the glacier directory to process add_slope : bool whether a corrective slope factor should be used or not smooth_radius : int pixel size of the gaussian smoothing. Default is to use cfg.PARAMS['smooth_window'] (i.e. a size in meters). Set to zero to suppress smoothing. dis_from_border_exp : float the exponent of the distance from border mask varname_suffix : str add a suffix to the variable written in the file (for experiments) """ # Variables grids_file = gdir.get_filepath('gridded_data') # See if we have the masks, else compute them with utils.ncDataset(grids_file) as nc: has_masks = 'glacier_ext_erosion' in nc.variables if not has_masks: from oggm.core.gis import interpolation_masks interpolation_masks(gdir) with utils.ncDataset(grids_file) as nc: topo_smoothed = nc.variables['topo_smoothed'][:] glacier_mask = nc.variables['glacier_mask'][:] dis_from_border = nc.variables['dis_from_border'][:] if add_slope: slope_factor = nc.variables['slope_factor'][:] else: slope_factor = 1. # Along the lines cls = gdir.read_pickle('inversion_output') fls = gdir.read_pickle('inversion_flowlines') hs, ts, vs, xs, ys = [], [], [], [], [] for cl, fl in zip(cls, fls): hs = np.append(hs, fl.surface_h) ts = np.append(ts, cl['thick']) vs = np.append(vs, cl['volume']) x, y = fl.line.xy xs = np.append(xs, x) ys = np.append(ys, y) init_vol = np.sum(vs) # Assign a first order thickness to the points # very inefficient inverse distance stuff thick = glacier_mask * np.NaN for y in range(thick.shape[0]): for x in range(thick.shape[1]): phgt = topo_smoothed[y, x] # take the ones in a 100m range starth = 100. while True: starth += 10 pok = np.nonzero(np.abs(phgt - hs) <= starth)[0] if len(pok) != 0: break sqr = np.sqrt((xs[pok]-x)**2 + (ys[pok]-y)**2) pzero = np.where(sqr == 0) if len(pzero[0]) == 0: thick[y, x] = np.average(ts[pok], weights=1 / sqr) elif len(pzero[0]) == 1: thick[y, x] = ts[pzero] else: raise RuntimeError('We should not be there') # Distance from border (normalized) dis_from_border = dis_from_border**dis_from_border_exp dis_from_border /= np.mean(dis_from_border[glacier_mask == 1]) thick *= dis_from_border # Slope thick *= slope_factor # Smooth dx = gdir.grid.dx if smooth_radius != 0: if smooth_radius is None: smooth_radius = np.rint(cfg.PARAMS['smooth_window'] / dx) thick = gaussian_blur(thick, np.int(smooth_radius)) thick = np.where(glacier_mask, thick, 0.) # Re-mask thick = thick.clip(0) thick[glacier_mask == 0] = np.NaN assert np.all(np.isfinite(thick[glacier_mask == 1])) # Conserve volume tmp_vol = np.nansum(thick * dx**2) thick *= init_vol / tmp_vol # write with utils.ncDataset(grids_file, 'a') as nc: vn = 'distributed_thickness' + varname_suffix if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Distributed ice thickness' v[:] = thick return thick
def process_histalp_data(gdir, y0=None, y1=None, output_filesuffix=None): """Processes and writes the HISTALP baseline climate data for this glacier. Extracts the nearest timeseries and writes everything to a NetCDF file. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` the glacier directory to process y0 : int the starting year of the timeseries to write. The default is to take 1850 (because the data is quite bad before that) y1 : int the starting year of the timeseries to write. The default is to take the entire time period available in the file, but with this kwarg you can shorten it (to save space or to crop bad data) output_filesuffix : str this add a suffix to the output file (useful to avoid overwriting previous experiments) """ if cfg.PATHS.get('climate_file', None): warnings.warn("You seem to have set a custom climate file for this " "run, but are using the default HISTALP climate file " "instead.") if cfg.PARAMS['baseline_climate'] != 'HISTALP': raise InvalidParamsError("cfg.PARAMS['baseline_climate'] should be " "set to HISTALP.") # read the time out of the pure netcdf file ft = get_histalp_file('tmp') fp = get_histalp_file('pre') with utils.ncDataset(ft) as nc: vt = nc.variables['time'] assert vt[0] == 0 assert vt[-1] == vt.shape[0] - 1 t0 = vt.units.split(' since ')[1][:7] time_t = pd.date_range(start=t0, periods=vt.shape[0], freq='MS') with utils.ncDataset(fp) as nc: vt = nc.variables['time'] assert vt[0] == 0.5 assert vt[-1] == vt.shape[0] - .5 t0 = vt.units.split(' since ')[1][:7] time_p = pd.date_range(start=t0, periods=vt.shape[0], freq='MS') # Now open with salem nc_ts_tmp = salem.GeoNetcdf(ft, time=time_t) nc_ts_pre = salem.GeoNetcdf(fp, time=time_p) # Some default if y0 is None: y0 = 1850 # set temporal subset for the ts data (hydro years) # the reference time is given by precip, which is shorter sm = cfg.PARAMS['hydro_month_' + gdir.hemisphere] em = sm - 1 if (sm > 1) else 12 yrs = nc_ts_pre.time.year y0 = yrs[0] if y0 is None else y0 y1 = yrs[-1] if y1 is None else y1 nc_ts_tmp.set_period(t0='{}-{:02d}-01'.format(y0, sm), t1='{}-{:02d}-01'.format(y1, em)) nc_ts_pre.set_period(t0='{}-{:02d}-01'.format(y0, sm), t1='{}-{:02d}-01'.format(y1, em)) time = nc_ts_pre.time ny, r = divmod(len(time), 12) assert r == 0 # Units assert nc_ts_tmp._nc.variables['HSURF'].units.lower() in [ 'm', 'meters', 'meter', 'metres', 'metre' ] assert nc_ts_tmp._nc.variables['T_2M'].units.lower() in [ 'degc', 'degrees', 'degrees celcius', 'degree', 'c' ] assert nc_ts_pre._nc.variables['TOT_PREC'].units.lower() in [ 'kg m-2', 'l m-2', 'mm', 'millimeters', 'millimeter' ] # geoloc lon = gdir.cenlon lat = gdir.cenlat nc_ts_tmp.set_subset(corners=((lon, lat), (lon, lat)), margin=1) nc_ts_pre.set_subset(corners=((lon, lat), (lon, lat)), margin=1) # read the data temp = nc_ts_tmp.get_vardata('T_2M') prcp = nc_ts_pre.get_vardata('TOT_PREC') hgt = nc_ts_tmp.get_vardata('HSURF') ref_lon = nc_ts_tmp.get_vardata('lon') ref_lat = nc_ts_tmp.get_vardata('lat') source = nc_ts_tmp._nc.title[:7] nc_ts_tmp._nc.close() nc_ts_pre._nc.close() # Should we compute the gradient? use_grad = cfg.PARAMS['temp_use_local_gradient'] igrad = None if use_grad: igrad = np.zeros(len(time)) * np.NaN for t, loct in enumerate(temp): slope, _, _, p_val, _ = stats.linregress(hgt.flatten(), loct.flatten()) igrad[t] = slope if (p_val < 0.01) else np.NaN gdir.write_monthly_climate_file(time, prcp[:, 1, 1], temp[:, 1, 1], hgt[1, 1], ref_lon[1], ref_lat[1], gradient=igrad, filesuffix=output_filesuffix, source=source)
def plot_catchment_width(gdirs, ax=None, smap=None, corrected=False, add_intersects=False, add_touches=False, lines_cmap='Set1'): """Plots the catchment widths out of a glacier directory. """ gdir = gdirs[0] with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: topo = nc.variables['topo'][:] # Dirty optim try: smap.set_topography(topo) except ValueError: pass # Maybe plot touches xis, yis, cis = [], [], [] ogrid = smap.grid for gdir in gdirs: crs = gdir.grid.center_grid geom = gdir.read_pickle('geometries') # Plot boundaries poly_pix = geom['polygon_pix'] smap.set_geometry(poly_pix, crs=crs, fc='none', zorder=2, linewidth=.2) for l in poly_pix.interiors: smap.set_geometry(l, crs=crs, color='black', linewidth=0.5) # Plot intersects if add_intersects and gdir.has_file('intersects'): gdf = gdir.read_shapefile('intersects') smap.set_shapefile(gdf, color='k', linewidth=3.5, zorder=3) # plot Centerlines cls = gdir.read_pickle('inversion_flowlines')[::-1] color = gencolor(len(cls) + 1, cmap=lines_cmap) for l, c in zip(cls, color): smap.set_geometry(l.line, crs=crs, color=c, linewidth=2.5, zorder=50) if corrected: for wi, cur, (n1, n2) in zip(l.widths, l.line.coords, l.normals): _l = shpg.LineString([ shpg.Point(cur + wi / 2. * n1), shpg.Point(cur + wi / 2. * n2) ]) smap.set_geometry(_l, crs=crs, color=c, linewidth=0.6, zorder=50) else: for wl, wi in zip(l.geometrical_widths, l.widths): col = c if np.isfinite(wi) else 'grey' for w in wl: smap.set_geometry(w, crs=crs, color=col, linewidth=0.6, zorder=50) if add_touches: pok = np.where(l.is_rectangular) xi, yi = l.line.xy xi, yi = ogrid.transform(np.asarray(xi)[pok], np.asarray(yi)[pok], crs=crs) xis.append(xi) yis.append(yi) cis.append(c) smap.plot(ax) for xi, yi, c in zip(xis, yis, cis): ax.scatter(xi, yi, color=c, s=20, zorder=51) return {}
def distribute_thickness_interp(gdir, add_slope=True, smooth_radius=None, varname_suffix=''): """Compute a thickness map by interpolating between centerlines and border. This is a rather cosmetic task, not relevant for OGGM but for ITMIX. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` the glacier directory to process add_slope : bool whether a corrective slope factor should be used or not smooth_radius : int pixel size of the gaussian smoothing. Default is to use cfg.PARAMS['smooth_window'] (i.e. a size in meters). Set to zero to suppress smoothing. varname_suffix : str add a suffix to the variable written in the file (for experiments) """ # Variables grids_file = gdir.get_filepath('gridded_data') # See if we have the masks, else compute them with utils.ncDataset(grids_file) as nc: has_masks = 'glacier_ext_erosion' in nc.variables if not has_masks: from oggm.core.gis import interpolation_masks interpolation_masks(gdir) with utils.ncDataset(grids_file) as nc: glacier_mask = nc.variables['glacier_mask'][:] glacier_ext = nc.variables['glacier_ext_erosion'][:] ice_divides = nc.variables['ice_divides'][:] if add_slope: slope_factor = nc.variables['slope_factor'][:] else: slope_factor = 1. # Thickness to interpolate thick = glacier_ext * np.NaN thick[(glacier_ext-ice_divides) == 1] = 0. # TODO: domain border too, for convenience for a start thick[0, :] = 0. thick[-1, :] = 0. thick[:, 0] = 0. thick[:, -1] = 0. # Along the lines cls = gdir.read_pickle('inversion_output') fls = gdir.read_pickle('inversion_flowlines') vs = [] for cl, fl in zip(cls, fls): vs.extend(cl['volume']) x, y = utils.tuple2int(fl.line.xy) thick[y, x] = cl['thick'] init_vol = np.sum(vs) # Interpolate xx, yy = gdir.grid.ij_coordinates pnan = np.nonzero(~ np.isfinite(thick)) pok = np.nonzero(np.isfinite(thick)) points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T thick[pnan] = griddata(points, np.ravel(thick[pok]), inter, method='cubic') thick = thick.clip(0) # Slope thick *= slope_factor # Smooth dx = gdir.grid.dx if smooth_radius != 0: if smooth_radius is None: smooth_radius = np.rint(cfg.PARAMS['smooth_window'] / dx) thick = gaussian_blur(thick, np.int(smooth_radius)) thick = np.where(glacier_mask, thick, 0.) # Re-mask thick[glacier_mask == 0] = np.NaN assert np.all(np.isfinite(thick[glacier_mask == 1])) # Conserve volume tmp_vol = np.nansum(thick * dx**2) thick *= init_vol / tmp_vol # write grids_file = gdir.get_filepath('gridded_data') with utils.ncDataset(grids_file, 'a') as nc: vn = 'distributed_thickness' + varname_suffix if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Distributed ice thickness' v[:] = thick return thick
def __init__(self, gdir, mu_star=None, bias=None, prcp_fac=None, filename='climate_monthly', input_filesuffix='', repeat=False, ys=None, ye=None): """Initialize. Parameters ---------- gdir : GlacierDirectory the glacier directory mu_star : float, optional set to the alternative value of mustar you want to use (the default is to use the calibrated value) bias : float, optional set to the alternative value of the calibration bias [mm we yr-1] you want to use (the default is to use the calibrated value) Note that this bias is *substracted* from the computed MB. Indeed: BIAS = MODEL_MB - REFERENCE_MB. prcp_fac : float, optional set to the alternative value of the precipitation factor you want to use (the default is to use the calibrated value) filename : str, optional set to a different BASENAME if you want to use alternative climate data. input_filesuffix : str the file suffix of the input climate file repeat : bool Whether the climate period given by [ys, ye] should be repeated indefinitely in a circular way ys : int The start of the climate period where the MB model is valid (default: the period with available data) ye : int The end of the climate period where the MB model is valid (default: the period with available data) Attributes ---------- temp_bias : float, default 0 Add a temperature bias to the time series prcp_bias : float, default 1 Precipitation factor to the time series (called bias for consistency with `temp_bias`) """ super(PastMassBalance, self).__init__() self.valid_bounds = [-1e4, 2e4] # in m if mu_star is None: df = pd.read_csv(gdir.get_filepath('local_mustar')) mu_star = df['mu_star'][0] if bias is None: if cfg.PARAMS['use_bias_for_run']: df = pd.read_csv(gdir.get_filepath('local_mustar')) bias = df['bias'][0] else: bias = 0. if prcp_fac is None: df = pd.read_csv(gdir.get_filepath('local_mustar')) prcp_fac = df['prcp_fac'][0] self.mu_star = mu_star self.bias = bias # Parameters self.t_solid = cfg.PARAMS['temp_all_solid'] self.t_liq = cfg.PARAMS['temp_all_liq'] self.t_melt = cfg.PARAMS['temp_melt'] # Public attrs self.temp_bias = 0. self.prcp_bias = 1. self.repeat = repeat # Read file fpath = gdir.get_filepath(filename, filesuffix=input_filesuffix) with ncDataset(fpath, mode='r') as nc: # time time = nc.variables['time'] time = netCDF4.num2date(time[:], time.units) ny, r = divmod(len(time), 12) if r != 0: raise ValueError('Climate data should be N full years') # This is where we switch to hydro float year format # Last year gives the tone of the hydro year self.years = np.repeat( np.arange(time[-1].year - ny + 1, time[-1].year + 1), 12) self.months = np.tile(np.arange(1, 13), ny) # Read timeseries self.temp = nc.variables['temp'][:] self.prcp = nc.variables['prcp'][:] * prcp_fac self.grad = nc.variables['grad'][:] self.ref_hgt = nc.ref_hgt self.ys = self.years[0] if ys is None else ys self.ye = self.years[-1] if ye is None else ye
def debris_to_gdir(gdir, debris_dir=pygem_prms.debris_fp, add_to_gridded=True, hd_max=5, hd_min=0, ed_max=10, ed_min=0): """Reproject the debris thickness and enhancement factor files to the given glacier directory Variables are exported as new files in the glacier directory. Reprojecting debris data from one map proj to another is done. We use bilinear interpolation to reproject the velocities to the local glacier map. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` where to write the data """ assert os.path.exists( debris_dir), "Error: debris directory does not exist." hd_dir = debris_dir + 'hd_tifs/' + gdir.rgi_region + '/' ed_dir = debris_dir + 'ed_tifs/' + gdir.rgi_region + '/' glac_str_nolead = str(int( gdir.rgi_region)) + '.' + gdir.rgi_id.split('-')[1].split('.')[1] # If debris thickness data exists, then write to glacier directory if os.path.exists(hd_dir + glac_str_nolead + '_hdts_m.tif'): hd_fn = hd_dir + glac_str_nolead + '_hdts_m.tif' elif os.path.exists(hd_dir + glac_str_nolead + '_hdts_m_extrap.tif'): hd_fn = hd_dir + glac_str_nolead + '_hdts_m_extrap.tif' else: hd_fn = None if hd_fn is not None: rasterio_to_gdir(gdir, hd_fn, 'debris_hd', resampling='bilinear') if add_to_gridded and hd_fn is not None: output_fn = gdir.get_filepath('debris_hd') # append the debris data to the gridded dataset with rasterio.open(output_fn) as src: grids_file = gdir.get_filepath('gridded_data') with ncDataset(grids_file, 'a') as nc: # Mask values glacier_mask = nc['glacier_mask'][:] data = src.read(1) * glacier_mask data[data > hd_max] = 0 data[data < hd_min] = 0 # Write data vn = 'debris_hd' if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f8', ( 'y', 'x', ), zlib=True) v.units = 'm' v.long_name = 'Debris thicknness' v[:] = data # If debris enhancement factor data exists, then write to glacier directory if os.path.exists(ed_dir + glac_str_nolead + '_meltfactor.tif'): ed_fn = ed_dir + glac_str_nolead + '_meltfactor.tif' elif os.path.exists(ed_dir + glac_str_nolead + '_meltfactor_extrap.tif'): ed_fn = ed_dir + glac_str_nolead + '_meltfactor_extrap.tif' else: ed_fn = None if ed_fn is not None: rasterio_to_gdir(gdir, ed_fn, 'debris_ed', resampling='bilinear') if add_to_gridded and ed_fn is not None: output_fn = gdir.get_filepath('debris_ed') # append the debris data to the gridded dataset with rasterio.open(output_fn) as src: grids_file = gdir.get_filepath('gridded_data') with ncDataset(grids_file, 'a') as nc: # Mask values glacier_mask = nc['glacier_mask'][:] data = src.read(1) * glacier_mask data[data > ed_max] = 1 data[data < ed_min] = 1 # Write data vn = 'debris_ed' if vn in nc.variables: v = nc.variables[vn] else: v = nc.createVariable(vn, 'f8', ( 'y', 'x', ), zlib=True) v.units = '-' v.long_name = 'Debris enhancement factor' v[:] = data
def process_cmip5_data(gdir, filesuffix='', fpath_temp=None, fpath_precip=None, **kwargs): """Read, process and store the CMIP5 climate data data for this glacier. It stores the data in a format that can be used by the OGGM mass balance model and in the glacier directory. Currently, this function is built for the CMIP5 projection simulation (https://pcmdi.llnl.gov/mips/cmip5/) from Taylor et al. (2012). Parameters ---------- filesuffix : str append a suffix to the filename (useful for ensemble experiments). fpath_temp : str path to the temp file (default: cfg.PATHS['cmip5_temp_file']) fpath_precip : str path to the precip file (default: cfg.PATHS['cmip5_precip_file']) **kwargs: any kwarg to be passed to ref:`process_gcm_data` """ # Get the path of GCM temperature & precipitation data if fpath_temp is None: if not ('cmip5_temp_file' in cfg.PATHS): raise ValueError("Need to set cfg.PATHS['cmip5_temp_file']") fpath_temp = cfg.PATHS['cmip5_temp_file'] if fpath_precip is None: if not ('cmip5_precip_file' in cfg.PATHS): raise ValueError("Need to set cfg.PATHS['cmip5_precip_file']") fpath_precip = cfg.PATHS['cmip5_precip_file'] # Read the GCM files tempds = xr.open_dataset(fpath_temp, decode_times=False) precipds = xr.open_dataset(fpath_precip, decode_times=False) with utils.ncDataset(fpath_temp, mode='r') as nc: time_units = nc.variables['time'].units calendar = nc.variables['time'].calendar time = netCDF4.num2date(nc.variables['time'][:], time_units) # Select for location lon = gdir.cenlon lat = gdir.cenlat # Conversion of the longitude if lon <= 0: lon += 360 # Take the closest to the glacier # Should we consider GCM interpolation? temp = tempds.tas.sel(lat=lat, lon=lon, method='nearest') precip = precipds.pr.sel(lat=lat, lon=lon, method='nearest') # Time needs a set to start of month time = [datetime(t.year, t.month, 1) for t in time] temp['time'] = time precip['time'] = time temp.lon.values = temp.lon if temp.lon <= 180 else temp.lon - 360 precip.lon.values = precip.lon if precip.lon <= 180 else precip.lon - 360 # Convert kg m-2 s-1 to mm mth-1 => 1 kg m-2 = 1 mm !!! if temp.time[0].dt.month != 1: raise ValueError('We expect the files to start in January!') ny, r = divmod(len(temp), 12) assert r == 0 precip = precip * precip.time.dt.days_in_month * (60 * 60 * 24) tempds.close() precipds.close() # Here: # - time_unit='days since 1870-01-15 12:00:00' # - calendar='standard' process_gcm_data(gdir, filesuffix=filesuffix, prcp=precip, temp=temp, time_unit=time_units, calendar=calendar, **kwargs)
def process_histalp_data(gdir): """Processes and writes the climate data for this glacier. Extracts the nearest timeseries and writes everything to a NetCDF file. """ if cfg.PARAMS['baseline_climate'] != 'HISTALP': raise ValueError("cfg.PARAMS['baseline_climate'] should be set to " "HISTALP.") # read the time out of the pure netcdf file ft = utils.get_histalp_file('tmp') fp = utils.get_histalp_file('pre') with utils.ncDataset(ft) as nc: vt = nc.variables['time'] assert vt[0] == 0 assert vt[-1] == vt.shape[0] - 1 t0 = vt.units.split(' since ')[1][:7] time_t = pd.date_range(start=t0, periods=vt.shape[0], freq='MS') with utils.ncDataset(fp) as nc: vt = nc.variables['time'] assert vt[0] == 0.5 assert vt[-1] == vt.shape[0] - .5 t0 = vt.units.split(' since ')[1][:7] time_p = pd.date_range(start=t0, periods=vt.shape[0], freq='MS') # Now open with salem nc_ts_tmp = salem.GeoNetcdf(ft, time=time_t) nc_ts_pre = salem.GeoNetcdf(fp, time=time_p) # set temporal subset for the ts data (hydro years) # the reference time is given by precip, which is shorter sm = cfg.PARAMS['hydro_month_nh'] em = sm - 1 if (sm > 1) else 12 yrs = nc_ts_pre.time.year y0, y1 = yrs[0], yrs[-1] if cfg.PARAMS['baseline_y0'] != 0: y0 = cfg.PARAMS['baseline_y0'] if cfg.PARAMS['baseline_y1'] != 0: y1 = cfg.PARAMS['baseline_y1'] nc_ts_tmp.set_period(t0='{}-{:02d}-01'.format(y0, sm), t1='{}-{:02d}-01'.format(y1, em)) nc_ts_pre.set_period(t0='{}-{:02d}-01'.format(y0, sm), t1='{}-{:02d}-01'.format(y1, em)) time = nc_ts_pre.time ny, r = divmod(len(time), 12) assert r == 0 # Units assert nc_ts_tmp._nc.variables['HSURF'].units.lower() in ['m', 'meters', 'meter', 'metres', 'metre'] assert nc_ts_tmp._nc.variables['T_2M'].units.lower() in ['degc', 'degrees', 'degrees celcius', 'degree', 'c'] assert nc_ts_pre._nc.variables['TOT_PREC'].units.lower() in ['kg m-2', 'l m-2', 'mm', 'millimeters', 'millimeter'] # geoloc lon = gdir.cenlon lat = gdir.cenlat nc_ts_tmp.set_subset(corners=((lon, lat), (lon, lat)), margin=1) nc_ts_pre.set_subset(corners=((lon, lat), (lon, lat)), margin=1) # read the data temp = nc_ts_tmp.get_vardata('T_2M') prcp = nc_ts_pre.get_vardata('TOT_PREC') hgt = nc_ts_tmp.get_vardata('HSURF') ref_lon = nc_ts_tmp.get_vardata('lon') ref_lat = nc_ts_tmp.get_vardata('lat') source = nc_ts_tmp._nc.title[:7] nc_ts_tmp._nc.close() nc_ts_pre._nc.close() # Should we compute the gradient? use_grad = cfg.PARAMS['temp_use_local_gradient'] igrad = None if use_grad: igrad = np.zeros(len(time)) * np.NaN for t, loct in enumerate(temp): slope, _, _, p_val, _ = stats.linregress(hgt.flatten(), loct.flatten()) igrad[t] = slope if (p_val < 0.01) else np.NaN gdir.write_monthly_climate_file(time, prcp[:, 1, 1], temp[:, 1, 1], hgt[1, 1], ref_lon[1], ref_lat[1], gradient=igrad) # metadata out = {'baseline_climate_source': source, 'baseline_hydro_yr_0': y0 + 1, 'baseline_hydro_yr_1': y1} gdir.write_pickle(out, 'climate_info')