def test_specific_mb(self): """Compare the specific mass balance to the one computed using the OGGM function of the PastMassBalance model. """ # run all needed prepro tasks gdir = self._setup_mb_test() # instance mb models vas_mbmod = vascaling.VAScalingMassBalance(gdir) past_mbmod = massbalance.PastMassBalance(gdir) # get relevant glacier surface elevation min_hgt, max_hgt = vascaling.get_min_max_elevation(gdir) # define temporal range ys = 1802 ye = 2003 years = np.arange(ys, ye + 1) # get flow lines fls = gdir.read_pickle('inversion_flowlines') # create empty container past_mb = np.empty(years.size) vas_mb = np.empty(years.size) # get specific mass balance for all years for i, year in enumerate(years): past_mb[i] = past_mbmod.get_specific_mb(fls=fls, year=year) vas_mb[i] = vas_mbmod.get_specific_mb(min_hgt, max_hgt, year) # compute and check correlation assert corrcoef(past_mb, vas_mb) >= 0.94 # relative error of average spec mb # TODO: does this even make any sense?! assert np.abs(rel_err(past_mb.mean(), vas_mb.mean())) <= 0.36 # check correlation of positive and negative mb years assert corrcoef(np.sign(past_mb), np.sign(vas_mb)) >= 0.72 # compare to reference mb measurements mbs = gdir.get_ref_mb_data()['ANNUAL_BALANCE'] assert corrcoef(vas_mb[np.in1d(years, mbs.index)], mbs) >= 0.79
def test_annual_climate(self): """Test my routine against the corresponding OGGM routine from the `PastMassBalance()` model. """ # run all needed prepro tasks gdir = self._setup_mb_test() # instance the mass balance models vas_mbmod = vascaling.VAScalingMassBalance(gdir) past_mbmod = massbalance.PastMassBalance(gdir) # get relevant glacier surface elevation min_hgt, max_hgt = vascaling.get_min_max_elevation(gdir) heights = np.array([min_hgt, (min_hgt + max_hgt) / 2, max_hgt]) # specify an (arbitray) year year = 1975 # get mass balance relevant climate information temp_for_melt_vas, prcp_solid_vas = \ vas_mbmod.get_annual_climate(min_hgt, max_hgt, year) _, temp_for_melt_oggm, _, prcp_solid_oggm = \ past_mbmod.get_annual_climate(heights, year) # prepare my (monthly) values for comparison temp_for_melt_vas = temp_for_melt_vas.sum() prcp_solid_vas = prcp_solid_vas.sum() # computed positive terminus melting temperature must be equal for both # used methods, i.e. temp_VAS == temp_OGGM np.testing.assert_allclose(temp_for_melt_vas, temp_for_melt_oggm[0], rtol=1e-3) # glacier averaged solid precipitation amount must be greater than (or # equal to) solid precipitation amount at glacier terminus elevation assert md(prcp_solid_oggm[0], prcp_solid_vas) >= 0 # glacier averaged solid precipitation amount must be comparable to the # solid precipitation amount at average glacier surface elevation assert rel_err(prcp_solid_oggm[1], prcp_solid_vas) <= 0.15 # glacier averaged solid precipitation amount must be less than (or # equal to) solid precipitation amount at maximum glacier elevation assert md(prcp_solid_oggm[2], prcp_solid_vas) <= 0
def _find_inital_glacier(final_model, firstguess_mb, y0, y1, rtol=0.01, atol=10, max_ite=100, init_bias=0., equi_rate=0.0005, ref_area=None): """ Iterative search for a plausible starting time glacier""" # Objective if ref_area is None: ref_area = final_model.area_m2 log.info( 'find_inital_glacier in year %d. Ref area to catch: %.3f km2. ' 'Tolerance: %.2f %%', np.int64(y0), ref_area * 1e-6, rtol * 100) # are we trying to grow or to shrink the glacier? prev_model = copy.deepcopy(final_model) prev_fls = copy.deepcopy(prev_model.fls) prev_model.reset_y0(y0) prev_model.run_until(y1) prev_area = prev_model.area_m2 # Just in case we already hit the correct starting state if np.allclose(prev_area, ref_area, atol=atol, rtol=rtol): model = copy.deepcopy(final_model) model.reset_y0(y0) log.info( 'find_inital_glacier: inital starting glacier converges ' 'to itself with a final dif of %.2f %%', utils.rel_err(ref_area, prev_area) * 100) return 0, None, model if prev_area < ref_area: sign_mb = 1. log.info( 'find_inital_glacier, ite: %d. Glacier would be too ' 'small of %.2f %%. Continue', 0, utils.rel_err(ref_area, prev_area) * 100) else: log.info( 'find_inital_glacier, ite: %d. Glacier would be too ' 'big of %.2f %%. Continue', 0, utils.rel_err(ref_area, prev_area) * 100) sign_mb = -1. # Log prefix logtxt = 'find_inital_glacier' # Loop until 100 iterations c = 0 bias_step = 50. mb_bias = init_bias - bias_step reduce_step = 5. mb = copy.deepcopy(firstguess_mb) mb.set_bias(sign_mb * mb_bias) grow_model = FluxBasedModel(copy.deepcopy(final_model.fls), mb_model=mb, fs=final_model.fs, glen_a=final_model.glen_a, min_dt=final_model.min_dt, max_dt=final_model.max_dt) while True and (c < max_ite): c += 1 # Grow mb_bias += bias_step mb.set_bias(sign_mb * mb_bias) log.info(logtxt + ', ite: %d. New bias: %.0f', c, sign_mb * mb_bias) grow_model.reset_flowlines(copy.deepcopy(prev_fls)) grow_model.reset_y0(0.) grow_model.run_until_equilibrium(rate=equi_rate) log.info( logtxt + ', ite: %d. Grew to equilibrium for %d years, ' 'new area: %.3f km2', c, grow_model.yr, grow_model.area_km2) # Shrink new_fls = copy.deepcopy(grow_model.fls) new_model = copy.deepcopy(final_model) new_model.reset_flowlines(copy.deepcopy(new_fls)) new_model.reset_y0(y0) new_model.run_until(y1) new_area = new_model.area_m2 # Maybe we done? if np.allclose(new_area, ref_area, atol=atol, rtol=rtol): new_model.reset_flowlines(new_fls) new_model.reset_y0(y0) log.info( logtxt + ', ite: %d. Converged with a ' 'final dif of %.2f %%', c, utils.rel_err(ref_area, new_area) * 100) return c, mb_bias, new_model # See if we did a step to far or if we have to continue growing do_cont_1 = (sign_mb > 0.) and (new_area < ref_area) do_cont_2 = (sign_mb < 0.) and (new_area > ref_area) if do_cont_1 or do_cont_2: # Reset the previous state and continue prev_fls = new_fls log.info(logtxt + ', ite: %d. Dif of %.2f %%. ' 'Continue', c, utils.rel_err(ref_area, new_area) * 100) continue # Ok. We went too far. Reduce the bias step but keep previous state mb_bias -= bias_step bias_step /= reduce_step log.info(logtxt + ', ite: %d. Went too far.', c) if bias_step < 0.1: break raise RuntimeError('Did not converge after {} iterations'.format(c))
def _find_inital_glacier(final_model, firstguess_mb, y0, y1, rtol=0.01, atol=10, max_ite=100, init_bias=0., equi_rate=0.0005, ref_area=None): """ Iterative search for a plausible starting time glacier""" # Objective if ref_area is None: ref_area = final_model.area_m2 log.info('find_inital_glacier in year %d. Ref area to catch: %.3f km2. ' 'Tolerance: %.2f %%' , np.int64(y0), ref_area*1e-6, rtol*100) # are we trying to grow or to shrink the glacier? prev_model = copy.deepcopy(final_model) prev_fls = copy.deepcopy(prev_model.fls) prev_model.reset_y0(y0) prev_model.run_until(y1) prev_area = prev_model.area_m2 # Just in case we already hit the correct starting state if np.allclose(prev_area, ref_area, atol=atol, rtol=rtol): model = copy.deepcopy(final_model) model.reset_y0(y0) log.info('find_inital_glacier: inital starting glacier converges ' 'to itself with a final dif of %.2f %%', utils.rel_err(ref_area, prev_area) * 100) return 0, None, model if prev_area < ref_area: sign_mb = 1. log.info('find_inital_glacier, ite: %d. Glacier would be too ' 'small of %.2f %%. Continue', 0, utils.rel_err(ref_area, prev_area) * 100) else: log.info('find_inital_glacier, ite: %d. Glacier would be too ' 'big of %.2f %%. Continue', 0, utils.rel_err(ref_area, prev_area) * 100) sign_mb = -1. # Log prefix logtxt = 'find_inital_glacier' # Loop until 100 iterations c = 0 bias_step = 50. mb_bias = init_bias - bias_step reduce_step = 5. mb = copy.deepcopy(firstguess_mb) mb.set_bias(sign_mb * mb_bias) grow_model = FluxBasedModel(copy.deepcopy(final_model.fls), mb_model=mb, fs=final_model.fs, glen_a=final_model.glen_a, min_dt=final_model.min_dt, max_dt=final_model.max_dt) while True and (c < max_ite): c += 1 # Grow mb_bias += bias_step mb.set_bias(sign_mb * mb_bias) log.info(logtxt + ', ite: %d. New bias: %.0f', c, sign_mb * mb_bias) grow_model.reset_flowlines(copy.deepcopy(prev_fls)) grow_model.reset_y0(0.) grow_model.run_until_equilibrium(rate=equi_rate) log.info(logtxt + ', ite: %d. Grew to equilibrium for %d years, ' 'new area: %.3f km2', c, grow_model.yr, grow_model.area_km2) # Shrink new_fls = copy.deepcopy(grow_model.fls) new_model = copy.deepcopy(final_model) new_model.reset_flowlines(copy.deepcopy(new_fls)) new_model.reset_y0(y0) new_model.run_until(y1) new_area = new_model.area_m2 # Maybe we done? if np.allclose(new_area, ref_area, atol=atol, rtol=rtol): new_model.reset_flowlines(new_fls) new_model.reset_y0(y0) log.info(logtxt + ', ite: %d. Converged with a ' 'final dif of %.2f %%', c, utils.rel_err(ref_area, new_area)*100) return c, mb_bias, new_model # See if we did a step to far or if we have to continue growing do_cont_1 = (sign_mb > 0.) and (new_area < ref_area) do_cont_2 = (sign_mb < 0.) and (new_area > ref_area) if do_cont_1 or do_cont_2: # Reset the previous state and continue prev_fls = new_fls log.info(logtxt + ', ite: %d. Dif of %.2f %%. ' 'Continue', c, utils.rel_err(ref_area, new_area)*100) continue # Ok. We went too far. Reduce the bias step but keep previous state mb_bias -= bias_step bias_step /= reduce_step log.info(logtxt + ', ite: %d. Went too far.', c) if bias_step < 0.1: break raise RuntimeError('Did not converge after {} iterations'.format(c))