def test_run(self): entity = gpd.read_file(self.rgi_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) centerlines.initialize_flowlines(gdir) centerlines.compute_downstream_line(gdir) centerlines.compute_downstream_bedshape(gdir) centerlines.catchment_area(gdir) centerlines.catchment_intersections(gdir) centerlines.catchment_width_geom(gdir) centerlines.catchment_width_correction(gdir) # Climate tasks -- only data IO and tstar interpolation! tasks.process_dummy_cru_file(gdir, seed=0) tasks.local_t_star(gdir) tasks.mu_star_calibration(gdir) # Inversion tasks tasks.find_inversion_calving(gdir) # Final preparation for the run tasks.init_present_time_glacier(gdir) # check that calving happens in the real context as well tasks.run_constant_climate(gdir, bias=0, nyears=200, temperature_bias=-0.5) with xr.open_dataset(gdir.get_filepath('model_diagnostics')) as ds: assert ds.calving_m3[-1] > 10
def test_coxe(): testdir = os.path.join(get_test_dir(), 'tmp_coxe') utils.mkdir(testdir, reset=True) # Init cfg.initialize() cfg.PARAMS['use_intersects'] = False cfg.PATHS['dem_file'] = get_demo_file('dem_RGI50-01.10299.tif') cfg.PARAMS['border'] = 40 cfg.PARAMS['clip_tidewater_border'] = False cfg.PARAMS['use_multiple_flowlines'] = False cfg.PARAMS['use_kcalving_for_inversion'] = True cfg.PARAMS['use_kcalving_for_run'] = True cfg.PARAMS['trapezoid_lambdas'] = 1 hef_file = get_demo_file('rgi_RGI50-01.10299.shp') entity = gpd.read_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=True) gis.define_glacier_region(gdir) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) centerlines.initialize_flowlines(gdir) centerlines.compute_downstream_line(gdir) centerlines.compute_downstream_bedshape(gdir) centerlines.catchment_area(gdir) centerlines.catchment_intersections(gdir) centerlines.catchment_width_geom(gdir) centerlines.catchment_width_correction(gdir) climate.apparent_mb_from_linear_mb(gdir) inversion.prepare_for_inversion(gdir) inversion.mass_conservation_inversion(gdir) inversion.filter_inversion_output(gdir) flowline.init_present_time_glacier(gdir) fls = gdir.read_pickle('model_flowlines') p = gdir.read_pickle('linear_mb_params') mb_mod = massbalance.LinearMassBalance(ela_h=p['ela_h'], grad=p['grad']) mb_mod.temp_bias = -0.3 model = flowline.FluxBasedModel(fls, mb_model=mb_mod, y0=0, inplace=True, is_tidewater=True) # run model.run_until(200) assert model.calving_m3_since_y0 > 0 fig, ax = plt.subplots() graphics.plot_modeloutput_map(gdir, ax=ax, model=model) fig.tight_layout() shutil.rmtree(testdir) return fig
def test_set_width(self): entity = gpd.read_file(self.rgi_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) centerlines.initialize_flowlines(gdir) centerlines.compute_downstream_line(gdir) centerlines.compute_downstream_bedshape(gdir) centerlines.catchment_area(gdir) centerlines.catchment_intersections(gdir) centerlines.catchment_width_geom(gdir) centerlines.catchment_width_correction(gdir) # Test that area and area-altitude elev is fine with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: mask = nc.variables['glacier_mask'][:] topo = nc.variables['topo_smoothed'][:] rhgt = topo[np.where(mask)][:] fls = gdir.read_pickle('inversion_flowlines') hgt, widths = gdir.get_inversion_flowline_hw() bs = 100 bins = np.arange(utils.nicenumber(np.min(hgt), bs, lower=True), utils.nicenumber(np.max(hgt), bs) + 1, bs) h1, b = np.histogram(hgt, weights=widths, density=True, bins=bins) h2, b = np.histogram(rhgt, density=True, bins=bins) h1 = h1 / np.sum(h1) h2 = h2 / np.sum(h2) assert utils.rmsd(h1, h2) < 0.02 # less than 2% error new_area = np.sum(widths * fls[-1].dx * gdir.grid.dx) np.testing.assert_allclose(new_area, gdir.rgi_area_m2) centerlines.terminus_width_correction(gdir, new_width=714) fls = gdir.read_pickle('inversion_flowlines') hgt, widths = gdir.get_inversion_flowline_hw() # Check that the width is ok np.testing.assert_allclose(fls[-1].widths[-1] * gdir.grid.dx, 714) # Check for area distrib bins = np.arange(utils.nicenumber(np.min(hgt), bs, lower=True), utils.nicenumber(np.max(hgt), bs) + 1, bs) h1, b = np.histogram(hgt, weights=widths, density=True, bins=bins) h2, b = np.histogram(rhgt, density=True, bins=bins) h1 = h1 / np.sum(h1) h2 = h2 / np.sum(h2) assert utils.rmsd(h1, h2) < 0.02 # less than 2% error new_area = np.sum(widths * fls[-1].dx * gdir.grid.dx) np.testing.assert_allclose(new_area, gdir.rgi_area_m2)
def test_set_width(self): entity = gpd.read_file(self.rgi_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) centerlines.initialize_flowlines(gdir) centerlines.compute_downstream_line(gdir) centerlines.compute_downstream_bedshape(gdir) centerlines.catchment_area(gdir) centerlines.catchment_intersections(gdir) centerlines.catchment_width_geom(gdir) centerlines.catchment_width_correction(gdir) # Test that area and area-altitude elev is fine with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc: mask = nc.variables['glacier_mask'][:] topo = nc.variables['topo_smoothed'][:] rhgt = topo[np.where(mask)][:] fls = gdir.read_pickle('inversion_flowlines') hgt, widths = gdir.get_inversion_flowline_hw() bs = 100 bins = np.arange(utils.nicenumber(np.min(hgt), bs, lower=True), utils.nicenumber(np.max(hgt), bs) + 1, bs) h1, b = np.histogram(hgt, weights=widths, density=True, bins=bins) h2, b = np.histogram(rhgt, density=True, bins=bins) h1 = h1 / np.sum(h1) h2 = h2 / np.sum(h2) assert utils.rmsd(h1, h2) < 0.02 # less than 2% error new_area = np.sum(widths * fls[-1].dx * gdir.grid.dx) np.testing.assert_allclose(new_area, gdir.rgi_area_m2) centerlines.terminus_width_correction(gdir, new_width=714) fls = gdir.read_pickle('inversion_flowlines') hgt, widths = gdir.get_inversion_flowline_hw() # Check that the width is ok np.testing.assert_allclose(fls[-1].widths[-1] * gdir.grid.dx, 714) # Check for area distrib bins = np.arange(utils.nicenumber(np.min(hgt), bs, lower=True), utils.nicenumber(np.max(hgt), bs) + 1, bs) h1, b = np.histogram(hgt, weights=widths, density=True, bins=bins) h2, b = np.histogram(rhgt, density=True, bins=bins) h1 = h1 / np.sum(h1) h2 = h2 / np.sum(h2) assert utils.rmsd(h1, h2) < 0.02 # less than 2% error new_area = np.sum(widths * fls[-1].dx * gdir.grid.dx) np.testing.assert_allclose(new_area, gdir.rgi_area_m2)
def init_hef(reset=False, border=40, logging_level='INFO'): from oggm.core import gis, inversion, climate, centerlines, flowline import geopandas as gpd # test directory testdir = os.path.join(get_test_dir(), 'tmp_border{}'.format(border)) if not os.path.exists(testdir): os.makedirs(testdir) reset = True # Init cfg.initialize(logging_level=logging_level) cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp')) cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc') cfg.PARAMS['baseline_climate'] = '' cfg.PATHS['working_dir'] = testdir cfg.PARAMS['border'] = border hef_file = get_demo_file('Hintereisferner_RGI5.shp') entity = gpd.read_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, reset=reset) if not gdir.has_file('inversion_params'): reset = True gdir = oggm.GlacierDirectory(entity, reset=reset) if not reset: return gdir gis.define_glacier_region(gdir) execute_entity_task(gis.glacier_masks, [gdir]) execute_entity_task(centerlines.compute_centerlines, [gdir]) centerlines.initialize_flowlines(gdir) centerlines.compute_downstream_line(gdir) centerlines.compute_downstream_bedshape(gdir) centerlines.catchment_area(gdir) centerlines.catchment_intersections(gdir) centerlines.catchment_width_geom(gdir) centerlines.catchment_width_correction(gdir) climate.process_custom_climate_data(gdir) mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE'] res = climate.t_star_from_refmb(gdir, mbdf=mbdf) climate.local_t_star(gdir, tstar=res['t_star'], bias=res['bias']) climate.mu_star_calibration(gdir) inversion.prepare_for_inversion(gdir, add_debug_var=True) ref_v = 0.573 * 1e9 glen_n = cfg.PARAMS['glen_n'] def to_optimize(x): # For backwards compat _fd = 1.9e-24 * x[0] glen_a = (glen_n + 2) * _fd / 2. fs = 5.7e-20 * x[1] v, _ = inversion.mass_conservation_inversion(gdir, fs=fs, glen_a=glen_a) return (v - ref_v)**2 out = optimization.minimize(to_optimize, [1, 1], bounds=((0.01, 10), (0.01, 10)), tol=1e-4)['x'] _fd = 1.9e-24 * out[0] glen_a = (glen_n + 2) * _fd / 2. fs = 5.7e-20 * out[1] v, _ = inversion.mass_conservation_inversion(gdir, fs=fs, glen_a=glen_a, write=True) d = dict(fs=fs, glen_a=glen_a) d['factor_glen_a'] = out[0] d['factor_fs'] = out[1] gdir.write_pickle(d, 'inversion_params') # filter inversion.filter_inversion_output(gdir) inversion.distribute_thickness_interp(gdir, varname_suffix='_interp') inversion.distribute_thickness_per_altitude(gdir, varname_suffix='_alt') flowline.init_present_time_glacier(gdir) return gdir
def apparent_mb(gdir): """Compute the apparent mb from the calibrated mustar. Parameters ---------- """ # Calibrated data df = pd.read_csv(gdir.get_filepath('local_mustar')).iloc[0] tstar = df['t_star'] prcp_fac = df['prcp_fac'] mu_star = df['mu_star'] bias = df['bias'] # Climate period mu_hp = int(cfg.PARAMS['mu_star_halfperiod']) yr = [tstar - mu_hp, tstar + mu_hp] # Do we have a calving glacier? cmb = calving_mb(gdir) # For each flowline compute the apparent MB fls = gdir.read_pickle('inversion_flowlines') # Reset flux for fl in fls: fl.flux = np.zeros(len(fl.surface_h)) # Flowlines in order to be sure for fl in fls: y, t, p = mb_yearly_climate_on_height(gdir, fl.surface_h, prcp_fac, year_range=yr, flatten=False) fl.set_apparent_mb(np.mean(p, axis=1) - mu_star * np.mean(t, axis=1)) # Sometimes, low lying tributaries have a non-physically consistent # Mass-balance. We should remove these, and start all over again until # all tributaries are consistent do_filter = [fl.flux_needed_correction for fl in fls] if cfg.PARAMS['filter_for_neg_flux'] and np.any(do_filter): assert not do_filter[-1] # This should not happen # Keep only the good lines heads = [fl.orig_head for fl in fls if not fl.flux_needed_correction] centerlines.compute_centerlines(gdir, heads=heads, reset=True) centerlines.initialize_flowlines(gdir, reset=True) if gdir.has_file('downstream_line'): centerlines.compute_downstream_line(gdir, reset=True) centerlines.compute_downstream_bedshape(gdir, reset=True) centerlines.catchment_area(gdir, reset=True) centerlines.catchment_intersections(gdir, reset=True) centerlines.catchment_width_geom(gdir, reset=True) centerlines.catchment_width_correction(gdir, reset=True) local_mustar(gdir, tstar=tstar, bias=bias, prcp_fac=prcp_fac, reset=True) # Ok, re-call ourselves return apparent_mb(gdir, reset=True) # Check and write aflux = fls[-1].flux[-1] * 1e-9 / cfg.RHO * gdir.grid.dx**2 # If not marine and a bit far from zero, warning if cmb == 0 and not np.allclose(fls[-1].flux[-1], 0., atol=0.01): log.warning('(%s) flux should be zero, but is: ' '%.4f km3 ice yr-1', gdir.rgi_id, aflux) # If not marine and quite far from zero, error if cmb == 0 and not np.allclose(fls[-1].flux[-1], 0., atol=1): msg = ('({}) flux should be zero, but is: {:.4f} km3 ice yr-1'.format( gdir.rgi_id, aflux)) raise RuntimeError(msg) gdir.write_pickle(fls, 'inversion_flowlines')
def test_run_until_and_store(self): """Test the volume/area scaling model against the oggm.FluxBasedModel. Both models run the Hintereisferner over the entire HistAlp climate period, initialized with the 2003 RGI outline without spin up. The following two parameters for length, area and volume are tested: - correlation coefficient - relative RMSE, i.e. RMSE/mean(OGGM). Whereby the results from the VAS model are offset with the average differences to the OGGM results. """ # read the Hintereisferner DEM hef_file = get_demo_file('Hintereisferner_RGI5.shp') entity = gpd.read_file(hef_file).iloc[0] # initialize the GlacierDirectory gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) # define the local grid and glacier mask gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) # process the given climate file climate.process_custom_climate_data(gdir) # run center line preprocessing tasks centerlines.compute_centerlines(gdir) centerlines.initialize_flowlines(gdir) centerlines.compute_downstream_line(gdir) centerlines.compute_downstream_bedshape(gdir) centerlines.catchment_area(gdir) centerlines.catchment_intersections(gdir) centerlines.catchment_width_geom(gdir) centerlines.catchment_width_correction(gdir) # read reference glacier mass balance data mbdf = gdir.get_ref_mb_data() # compute the reference t* for the glacier # given the reference of mass balance measurements res = climate.t_star_from_refmb(gdir, mbdf=mbdf['ANNUAL_BALANCE']) t_star, bias = res['t_star'], res['bias'] # -------------------- # SCALING MODEL # -------------------- # compute local t* and the corresponding mu* vascaling.local_t_star(gdir, tstar=t_star, bias=bias) # instance the mass balance models vas_mbmod = vascaling.VAScalingMassBalance(gdir) # get reference area a0 = gdir.rgi_area_m2 # get reference year y0 = gdir.read_json('climate_info')['baseline_hydro_yr_0'] # get min and max glacier surface elevation h0, h1 = vascaling.get_min_max_elevation(gdir) vas_model = vascaling.VAScalingModel(year_0=y0, area_m2_0=a0, min_hgt=h0, max_hgt=h1, mb_model=vas_mbmod) # let model run over entire HistAlp climate period vas_ds = vas_model.run_until_and_store(2003) # ------ # OGGM # ------ # compute local t* and the corresponding mu* climate.local_t_star(gdir, tstar=t_star, bias=bias) climate.mu_star_calibration(gdir) # instance the mass balance models mb_mod = massbalance.PastMassBalance(gdir) # perform ice thickness inversion inversion.prepare_for_inversion(gdir) inversion.mass_conservation_inversion(gdir) inversion.filter_inversion_output(gdir) # initialize present time glacier flowline.init_present_time_glacier(gdir) # instance flowline model fls = gdir.read_pickle('model_flowlines') y0 = gdir.read_json('climate_info')['baseline_hydro_yr_0'] fl_mod = flowline.FluxBasedModel(flowlines=fls, mb_model=mb_mod, y0=y0) # run model and store output as xarray data set _, oggm_ds = fl_mod.run_until_and_store(2003) # temporal indices must be equal assert (vas_ds.time == oggm_ds.time).all() # specify which parameters to compare and their respective correlation # coefficients and rmsd values params = ['length_m', 'area_m2', 'volume_m3'] corr_coeffs = np.array([0.96, 0.90, 0.93]) rmsds = np.array([0.43e3, 0.14e6, 0.03e9]) # compare given parameters for param, cc, rmsd in zip(params, corr_coeffs, rmsds): # correlation coefficient assert corrcoef(oggm_ds[param].values, vas_ds[param].values) >= cc # root mean squared deviation rmsd_an = rmsd_bc(oggm_ds[param].values, vas_ds[param].values) assert rmsd_an <= rmsd
# initialize the GlacierDirectory gdir = oggm.GlacierDirectory(entity, base_dir=testdir) # define the local grid and glacier mask gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) from oggm.core import climate # process the given climate file climate.process_custom_climate_data(gdir) from oggm.core import centerlines # run center line preprocessing tasks centerlines.compute_centerlines(gdir) centerlines.initialize_flowlines(gdir) centerlines.compute_downstream_line(gdir) centerlines.compute_downstream_bedshape(gdir) centerlines.catchment_area(gdir) centerlines.catchment_intersections(gdir) centerlines.catchment_width_geom(gdir) centerlines.catchment_width_correction(gdir) # -------------------- # MASS BALANCE TASKS # -------------------- # read reference glacier mass balance data mbdf = gdir.get_ref_mb_data() # compute the reference t* for the glacier # given the reference of mass balance measurements res = climate.t_star_from_refmb(gdir, mbdf=mbdf['ANNUAL_BALANCE']) t_star, bias = res['t_star'], res['bias']
def init_hef(reset=False, border=40, invert_with_sliding=True, invert_with_rectangular=True): # test directory testdir = os.path.join(get_test_dir(), 'tmp_border{}'.format(border)) if not invert_with_sliding: testdir += '_withoutslide' if not invert_with_rectangular: testdir += '_withoutrectangular' if not os.path.exists(testdir): os.makedirs(testdir) reset = True # Init cfg.initialize() cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp')) cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc') cfg.PARAMS['border'] = border cfg.PARAMS['use_optimized_inversion_params'] = True hef_file = get_demo_file('Hintereisferner_RGI5.shp') entity = gpd.read_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=reset) if not gdir.has_file('inversion_params'): reset = True gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=reset) if not reset: return gdir gis.define_glacier_region(gdir, entity=entity) execute_entity_task(gis.glacier_masks, [gdir]) execute_entity_task(centerlines.compute_centerlines, [gdir]) centerlines.initialize_flowlines(gdir) centerlines.compute_downstream_line(gdir) centerlines.compute_downstream_bedshape(gdir) centerlines.catchment_area(gdir) centerlines.catchment_intersections(gdir) centerlines.catchment_width_geom(gdir) centerlines.catchment_width_correction(gdir) climate.process_custom_climate_data(gdir) climate.mu_candidates(gdir) mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE'] res = climate.t_star_from_refmb(gdir, mbdf) climate.local_mustar(gdir, tstar=res['t_star'][-1], bias=res['bias'][-1], prcp_fac=res['prcp_fac']) climate.apparent_mb(gdir) inversion.prepare_for_inversion( gdir, add_debug_var=True, invert_with_rectangular=invert_with_rectangular) ref_v = 0.573 * 1e9 if invert_with_sliding: def to_optimize(x): # For backwards compat _fd = 1.9e-24 * x[0] glen_a = (cfg.N + 2) * _fd / 2. fs = 5.7e-20 * x[1] v, _ = inversion.mass_conservation_inversion(gdir, fs=fs, glen_a=glen_a) return (v - ref_v)**2 out = optimization.minimize(to_optimize, [1, 1], bounds=((0.01, 10), (0.01, 10)), tol=1e-4)['x'] _fd = 1.9e-24 * out[0] glen_a = (cfg.N + 2) * _fd / 2. fs = 5.7e-20 * out[1] v, _ = inversion.mass_conservation_inversion(gdir, fs=fs, glen_a=glen_a, write=True) else: def to_optimize(x): glen_a = cfg.A * x[0] v, _ = inversion.mass_conservation_inversion(gdir, fs=0., glen_a=glen_a) return (v - ref_v)**2 out = optimization.minimize(to_optimize, [1], bounds=((0.01, 10), ), tol=1e-4)['x'] glen_a = cfg.A * out[0] fs = 0. v, _ = inversion.mass_conservation_inversion(gdir, fs=fs, glen_a=glen_a, write=True) d = dict(fs=fs, glen_a=glen_a) d['factor_glen_a'] = out[0] try: d['factor_fs'] = out[1] except IndexError: d['factor_fs'] = 0. gdir.write_pickle(d, 'inversion_params') # filter inversion.filter_inversion_output(gdir) inversion.distribute_thickness_interp(gdir, varname_suffix='_interp') inversion.distribute_thickness_per_altitude(gdir, varname_suffix='_alt') flowline.init_present_time_glacier(gdir) return gdir
def init_hef(reset=False, border=40): # test directory testdir = os.path.join(get_test_dir(), 'tmp_border{}'.format(border)) if not os.path.exists(testdir): os.makedirs(testdir) reset = True # Init cfg.initialize() cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp')) cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc') cfg.PARAMS['baseline_climate'] = '' cfg.PATHS['working_dir'] = testdir cfg.PARAMS['border'] = border hef_file = get_demo_file('Hintereisferner_RGI5.shp') entity = gpd.read_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, reset=reset) if not gdir.has_file('inversion_params'): reset = True gdir = oggm.GlacierDirectory(entity, reset=reset) if not reset: return gdir gis.define_glacier_region(gdir, entity=entity) execute_entity_task(gis.glacier_masks, [gdir]) execute_entity_task(centerlines.compute_centerlines, [gdir]) centerlines.initialize_flowlines(gdir) centerlines.compute_downstream_line(gdir) centerlines.compute_downstream_bedshape(gdir) centerlines.catchment_area(gdir) centerlines.catchment_intersections(gdir) centerlines.catchment_width_geom(gdir) centerlines.catchment_width_correction(gdir) climate.process_custom_climate_data(gdir) mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE'] res = climate.t_star_from_refmb(gdir, mbdf=mbdf) climate.local_t_star(gdir, tstar=res['t_star'], bias=res['bias']) climate.mu_star_calibration(gdir) inversion.prepare_for_inversion(gdir, add_debug_var=True) ref_v = 0.573 * 1e9 glen_n = cfg.PARAMS['glen_n'] def to_optimize(x): # For backwards compat _fd = 1.9e-24 * x[0] glen_a = (glen_n+2) * _fd / 2. fs = 5.7e-20 * x[1] v, _ = inversion.mass_conservation_inversion(gdir, fs=fs, glen_a=glen_a) return (v - ref_v)**2 out = optimization.minimize(to_optimize, [1, 1], bounds=((0.01, 10), (0.01, 10)), tol=1e-4)['x'] _fd = 1.9e-24 * out[0] glen_a = (glen_n+2) * _fd / 2. fs = 5.7e-20 * out[1] v, _ = inversion.mass_conservation_inversion(gdir, fs=fs, glen_a=glen_a, write=True) d = dict(fs=fs, glen_a=glen_a) d['factor_glen_a'] = out[0] d['factor_fs'] = out[1] gdir.write_pickle(d, 'inversion_params') # filter inversion.filter_inversion_output(gdir) inversion.distribute_thickness_interp(gdir, varname_suffix='_interp') inversion.distribute_thickness_per_altitude(gdir, varname_suffix='_alt') flowline.init_present_time_glacier(gdir) return gdir
def mu_star_calibration(gdir): """Compute the flowlines' mu* and the associated apparent mass-balance. If low lying tributaries have a non-physically consistent Mass-balance this function will either filter them out or calibrate each flowline with a specific mu*. The latter is default and recommended. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` the glacier directory to process """ # Interpolated data df = gdir.read_json('local_mustar') t_star = df['t_star'] bias = df['bias'] # For each flowline compute the apparent MB fls = gdir.read_pickle('inversion_flowlines') # If someone calls the task a second time we need to reset this for fl in fls: fl.mu_star_is_valid = False force_mu = 0 if df['mu_star_glacierwide'] == 0 else None # Let's go _recursive_mu_star_calibration(gdir, fls, t_star, force_mu=force_mu) # If the user wants to filter the bad ones we remove them and start all # over again until all tributaries are physically consistent with one mu # This should only work if cfg.PARAMS['correct_for_neg_flux'] == False do_filter = [fl.flux_needs_correction for fl in fls] if cfg.PARAMS['filter_for_neg_flux'] and np.any(do_filter): assert not do_filter[-1] # This should not happen # Keep only the good lines # TODO: this should use centerline.line_inflows for more efficiency! heads = [fl.orig_head for fl in fls if not fl.flux_needs_correction] centerlines.compute_centerlines(gdir, heads=heads, reset=True) centerlines.initialize_flowlines(gdir, reset=True) if gdir.has_file('downstream_line'): centerlines.compute_downstream_line(gdir, reset=True) centerlines.compute_downstream_bedshape(gdir, reset=True) centerlines.catchment_area(gdir, reset=True) centerlines.catchment_intersections(gdir, reset=True) centerlines.catchment_width_geom(gdir, reset=True) centerlines.catchment_width_correction(gdir, reset=True) local_t_star(gdir, tstar=t_star, bias=bias, reset=True) # Ok, re-call ourselves return mu_star_calibration(gdir, reset=True) # Check and write rho = cfg.PARAMS['ice_density'] aflux = fls[-1].flux[-1] * 1e-9 / rho * gdir.grid.dx**2 # If not marine and a bit far from zero, warning cmb = calving_mb(gdir) if cmb == 0 and not np.allclose(fls[-1].flux[-1], 0., atol=0.01): log.info('(%s) flux should be zero, but is: ' '%.4f km3 ice yr-1', gdir.rgi_id, aflux) # If not marine and quite far from zero, error if cmb == 0 and not np.allclose(fls[-1].flux[-1], 0., atol=1): msg = ('({}) flux should be zero, but is: {:.4f} km3 ice yr-1'.format( gdir.rgi_id, aflux)) raise MassBalanceCalibrationError(msg) gdir.write_pickle(fls, 'inversion_flowlines') # Store diagnostics mus = [] weights = [] for fl in fls: mus.append(fl.mu_star) weights.append(np.sum(fl.widths)) df['mu_star_per_flowline'] = mus df['mu_star_flowline_avg'] = np.average(mus, weights=weights) all_same = np.allclose(mus, mus[0], atol=1e-3) df['mu_star_allsame'] = all_same if all_same: if not np.allclose(df['mu_star_flowline_avg'], df['mu_star_glacierwide'], atol=1e-3): raise MassBalanceCalibrationError('Unexpected difference between ' 'glacier wide mu* and the ' 'flowlines mu*.') # Write gdir.write_json(df, 'local_mustar')
def compare(rgi_id, glacier_name): """ :param rgi_id: :param glacier_name: :return: """ # --------------------- # PREPROCESSING TASKS # --------------------- # create test directory wdir = os.path.join(os.path.abspath('.'), 'comparison_wdir') if not os.path.exists(wdir): os.makedirs(wdir) shutil.rmtree(wdir) os.makedirs(wdir) # load default parameter file cfg.initialize() # RGI entity # get/downlaod the rgi entity including the outline shapefile rgi_df = utils.get_rgi_glacier_entities([rgi_id]) # set name, since not delivered with RGI if rgi_df.loc[int(rgi_id[-5:])-1, 'Name'] is None: rgi_df.loc[int(rgi_id[-5:])-1, 'Name'] = glacier_name # select single entry rgi_entity = rgi_df.iloc[0] # GlacierDirectory # specify the working directory and define the glacier directory cfg.PATHS['working_dir'] = wdir gdir = oggm.GlacierDirectory(rgi_entity) # DEM and GIS tasks # get the path to the DEM file (will download if necessary) dem = utils.get_topo_file(gdir.cenlon, gdir.cenlat) # set path in config file cfg.PATHS['dem_file'] = dem[0][0] cfg.PARAMS['border'] = 10 cfg.PARAMS['use_intersects'] = False # run GIS tasks gis.define_glacier_region(gdir, entity=rgi_entity) gis.glacier_masks(gdir) # Climate data # using HistAlp cfg.PARAMS['baseline_climate'] = 'HISTALP' # climate records before 1850 are hardly reliable, which is not so drastic for # qualitative experiments (could be driven with random climate anyway) # cfg.PARAMS['baseline_y0'] = 1850 # change hyper parameters for HistAlp cfg.PARAMS['prcp_scaling_factor'] = 1.75 cfg.PARAMS['temp_melt'] = -1.75 # run climate task climate.process_histalp_data(gdir) # run center line preprocessing tasks centerlines.compute_centerlines(gdir) centerlines.initialize_flowlines(gdir) centerlines.compute_downstream_line(gdir) centerlines.compute_downstream_bedshape(gdir) centerlines.catchment_area(gdir) centerlines.catchment_intersections(gdir) centerlines.catchment_width_geom(gdir) centerlines.catchment_width_correction(gdir) # -------------------- # SCALING MODEL # -------------------- # compute local t* and the corresponding mu* vascaling.local_t_star(gdir) # instance the mass balance models vas_mb_mod = vascaling.VAScalingMassBalance(gdir) # get reference area a0 = gdir.rgi_area_m2 # get reference year y0 = gdir.read_pickle('climate_info')['baseline_hydro_yr_0'] y1 = gdir.read_pickle('climate_info')['baseline_hydro_yr_1'] # get min and max glacier surface elevation h0, h1 = vascaling.get_min_max_elevation(gdir) # instance VAS model vas_model = vascaling.VAScalingModel(year_0=y0, area_m2_0=a0, min_hgt=h0, max_hgt=h1, mb_model=vas_mb_mod) # run model over all HistAlp climate period vas_df = vas_model.run_and_store(y1, reset=True) # get relevant parameters years_vas = vas_df.index.values length_m_vas = vas_df.length_m.values area_m2_vas = vas_df.area_m2.values volume_m3_vas = vas_df.volume_m3.values # ------ # OGGM # ------ # compute local t* and the corresponding mu* climate.local_t_star(gdir) climate.mu_star_calibration(gdir) # instance the mass balance models mb_mod = massbalance.PastMassBalance(gdir) # run inversion tasks inversion.prepare_for_inversion(gdir) inversion.mass_conservation_inversion(gdir) inversion.filter_inversion_output(gdir) # initialize present time glacier flowline.init_present_time_glacier(gdir) # instance flowline model fls = gdir.read_pickle('model_flowlines') y0 = gdir.read_pickle('climate_info')['baseline_hydro_yr_0'] y1 = gdir.read_pickle('climate_info')['baseline_hydro_yr_1'] fl_mod = flowline.FluxBasedModel(flowlines=fls, mb_model=mb_mod, y0=y0) # run model and store output as xarray data set _, oggm_ds = fl_mod.run_until_and_store(y1) years_oggm = oggm_ds.hydro_year.values # annual index must be equal np.testing.assert_array_equal(years_oggm, years_vas) length_m_oggm = oggm_ds.length_m.values area_m2_oggm = oggm_ds.area_m2.values volume_m3_oggm = oggm_ds.volume_m3.values # define column names for DataFrame names = ['length_vas', 'length_oggm', 'area_vas', 'area_oggm', 'volume_vas', 'volume_oggm'] # combine glacier geometries into DataFrame df = pd.DataFrame(np.array([length_m_vas, length_m_oggm, area_m2_vas, area_m2_oggm, volume_m3_vas, volume_m3_oggm]).T, index=years_vas, columns=names) # save to file store = True if store: # define path and file names folder = '/Users/oberrauch/work/master/data/' df.to_csv(folder+'run_comparison.csv') def plot_both(vas_df, oggm_df, ref=None, correct_bias=False, title='', ylabel='', file_path='', exp=0): """ Plot geometric parameters of both models. If a `file_path` is given, the figure will be saved. :param vas_df: (pandas.Series) geometric glacier parameter of the VAS model :param oggm_df: (pandas.Series) geometric glacier parameter of the OGGM :param ref: (pandas.Series) measured glacier parameter, optional :param title: (string) figure title, optional :param ylabel: (string) label for y-axis, optional :param file_path: (string) where to store the figure, optional :param exp: (int) exponent for labels in scientific notation, optional """ beamer = True if beamer: mpl.rc('axes', titlesize=18) mpl.rc('axes', labelsize=14) mpl.rc('xtick', labelsize=14) mpl.rc('ytick', labelsize=14) mpl.rc('legend', fontsize=10) # create figure and first axes fig = plt.figure(figsize=[6, 4]) ax = fig.add_axes([0.15, 0.1, 0.8, 0.8]) # define colors c1 = 'C0' c2 = 'C1' c3 = 'C3' # plot vas and OGGM parameters ax.plot(oggm_df.index, oggm_df.values, c=c2, label='OGGM') ax.plot(vas_df.index, vas_df.values, c=c1, label='VAS') if ref: # plot reference parameter if given ax.plot(ref.index, ref.values, c=c3, label='measurements') if correct_bias: # plot bias corrected vas df_ = pd.DataFrame([oggm_df, vas_df]).T bias = vas_df.values - df_.mean().diff().iloc[1] ax.plot(vas_df.index, bias, c=c1, ls='--', label='VAS, bias corrected') # add RMSD as text ax.text(0.05, 0.05, 'RMSD: {:.1e}'.format(utils.rmsd(oggm_df, bias)), transform=plt.gca().transAxes) # add correlation coefficient as text ax.text(0.05, 0.11, 'Corr. Coef.: {:.2f}'.format( utils.corrcoef(oggm_df, vas_df)), transform=plt.gca().transAxes) # add title, labels, legend ax.set_title(title) ax.set_ylabel(ylabel) ax.legend() import matplotlib.ticker class OOMFormatter(matplotlib.ticker.ScalarFormatter): def __init__(self, order=0, fformat="%1.1f", offset=False, mathText=False): self.oom = order self.fformat = fformat matplotlib.ticker.ScalarFormatter.__init__(self, useOffset=offset, useMathText=mathText) def _set_orderOfMagnitude(self, nothing): self.orderOfMagnitude = self.oom def _set_format(self, vmin, vmax): self.format = self.fformat if self._useMathText: self.format = '$%s$' % matplotlib.ticker._mathdefault(self.format) # use scientific notation with fixed exponent according ax.yaxis.set_major_formatter(OOMFormatter(exp, "%1.2f")) # store to file if file_path: plt.savefig(file_path, bbox_inches='tight', format=file_path.split('.')[-1]) # specify plot directory folder = '/Users/oberrauch/work/master/plots/' # plot length plot_both(df.length_vas, df.length_oggm, correct_bias=True, title='Glacier length - {}'.format(glacier_name), ylabel=r'Length [m]', file_path=os.path.join(folder, '{}_length.pdf'.format(rgi_id)), exp=3) # plot area plot_both(df.area_vas, df.area_oggm, correct_bias=True, title='Surface area - {}'.format(glacier_name), ylabel=r'Area [m$^2$]', file_path=os.path.join(folder, '{}_area.pdf'.format(rgi_id)), exp=6) # plot volume plot_both(df.volume_vas, df.volume_oggm, correct_bias=True, title='Glacier volume - {}'.format(glacier_name), ylabel=r'Volume [m$^3$]', file_path=os.path.join(folder, '{}_volume.pdf'.format(rgi_id)), exp=9)