def up_to_distrib(reset=False): # for cross val basically gdirs = up_to_climate(reset=reset) with open(CLI_LOGF, 'rb') as f: clilog = pickle.load(f) if clilog != 'cru': reset = True else: try: tasks.compute_ref_t_stars(gdirs) except Exception: reset = True if reset: # Use CRU cfg.PARAMS['prcp_scaling_factor'] = 2.5 cfg.PARAMS['temp_use_local_gradient'] = False cfg.PATHS['climate_file'] = '' cru_dir = get_demo_file('cru_ts3.23.1901.2014.tmp.dat.nc') cfg.PATHS['cru_dir'] = os.path.dirname(cru_dir) with warnings.catch_warnings(): # There is a warning from salem warnings.simplefilter("ignore") workflow.execute_entity_task(tasks.process_cru_data, gdirs) tasks.compute_ref_t_stars(gdirs) tasks.distribute_t_stars(gdirs) workflow.execute_entity_task(tasks.apparent_mb, gdirs) with open(CLI_LOGF, 'wb') as f: pickle.dump('cru', f) return gdirs
def climate_tasks(gdirs): """Shortcut function: run all climate related tasks. Parameters ---------- gdirs : list of GlacierDirectories """ # If not iterable it's ok try: len(gdirs) except TypeError: gdirs = [gdirs] # Which climate should we use? if cfg.PARAMS['baseline_climate'] == 'CRU': _process_task = tasks.process_cru_data elif cfg.PARAMS['baseline_climate'] == 'CUSTOM': _process_task = tasks.process_custom_climate_data elif cfg.PARAMS['baseline_climate'] == 'HISTALP': _process_task = tasks.process_histalp_data else: raise ValueError('baseline_climate parameter not understood') execute_entity_task(_process_task, gdirs) # Then, calibration? if cfg.PARAMS['run_mb_calibration']: tasks.compute_ref_t_stars(gdirs) # Mustar and the apparent mass-balance execute_entity_task(tasks.local_t_star, gdirs) execute_entity_task(tasks.mu_star_calibration, gdirs)
def climate_tasks(gdirs): """Prepare the climate data.""" # Only global tasks tasks.distribute_climate_data(gdirs) tasks.compute_ref_t_stars(gdirs) tasks.distribute_t_stars(gdirs)
def climate_tasks(gdirs): """Helper function: run all climate tasks.""" # If not iterable it's ok try: len(gdirs) except TypeError: gdirs = [gdirs] # I don't know where this logic is best placed... if (('climate_file' in cfg.PATHS) and os.path.exists(cfg.PATHS['climate_file'])): _process_task = tasks.process_custom_climate_data else: # OK, so use the default CRU "high-resolution" method _process_task = tasks.process_cru_data execute_entity_task(_process_task, gdirs) # Then, global tasks if cfg.PARAMS['run_mb_calibration']: tasks.compute_ref_t_stars(gdirs) tasks.distribute_t_stars(gdirs) # And the apparent mass-balance execute_entity_task(tasks.apparent_mb, gdirs)
def up_to_distrib(reset=False): # for cross val basically gdirs = up_to_climate(reset=reset) with open(CLI_LOGF, 'rb') as f: clilog = pickle.load(f) if clilog != 'cru': reset = True else: try: tasks.compute_ref_t_stars(gdirs) except Exception: reset = True if reset: # Use CRU cfg.PARAMS['prcp_scaling_factor'] = 2.5 cfg.PARAMS['temp_use_local_gradient'] = False cfg.PARAMS['baseline_climate'] = 'CRU' with warnings.catch_warnings(): # There is a warning from salem warnings.simplefilter("ignore") workflow.execute_entity_task(tasks.process_cru_data, gdirs) tasks.compute_ref_t_stars(gdirs) workflow.execute_entity_task(tasks.local_t_star, gdirs) workflow.execute_entity_task(tasks.mu_star_calibration, gdirs) with open(CLI_LOGF, 'wb') as f: pickle.dump('cru', f) return gdirs
def climate_tasks(gdirs): """Shortcut function: run all climate related tasks. Parameters ---------- gdirs : list of :py:class:`oggm.GlacierDirectory` objects the glacier directories to process """ # If not iterable it's ok try: len(gdirs) except TypeError: gdirs = [gdirs] # Which climate should we use? if cfg.PARAMS['baseline_climate'] == 'CRU': _process_task = tasks.process_cru_data elif cfg.PARAMS['baseline_climate'] == 'CUSTOM': _process_task = tasks.process_custom_climate_data elif cfg.PARAMS['baseline_climate'] == 'HISTALP': _process_task = tasks.process_histalp_data else: raise ValueError('baseline_climate parameter not understood') execute_entity_task(_process_task, gdirs) # Then, calibration? if cfg.PARAMS['run_mb_calibration']: tasks.compute_ref_t_stars(gdirs) # Mustar and the apparent mass-balance execute_entity_task(tasks.local_t_star, gdirs) execute_entity_task(tasks.mu_star_calibration, gdirs)
def up_to_distrib(reset=False): # for cross val basically gdirs = up_to_climate(reset=reset) with open(CLI_LOGF, 'rb') as f: clilog = pickle.load(f) if clilog != 'cru': reset = True else: try: tasks.compute_ref_t_stars(gdirs) except Exception: reset = True if reset: # Use CRU cfg.PARAMS['prcp_scaling_factor'] = 2.5 cfg.PARAMS['temp_use_local_gradient'] = False cfg.PARAMS['baseline_climate'] = 'CRU' cru_dir = get_demo_file('cru_ts3.23.1901.2014.tmp.dat.nc') cfg.PATHS['cru_dir'] = os.path.dirname(cru_dir) with warnings.catch_warnings(): # There is a warning from salem warnings.simplefilter("ignore") workflow.execute_entity_task(tasks.process_cru_data, gdirs) tasks.compute_ref_t_stars(gdirs) workflow.execute_entity_task(tasks.local_t_star, gdirs) workflow.execute_entity_task(tasks.mu_star_calibration, gdirs) with open(CLI_LOGF, 'wb') as f: pickle.dump('cru', f) return gdirs
def climate_tasks(gdirs): """Prepare the climate data.""" # I don't know where this logic is best placed... if ('climate_file' in cfg.PATHS) and \ os.path.exists(cfg.PATHS['climate_file']): _process_task = tasks.process_custom_climate_data else: # OK, so use the default CRU "high-resolution" method _process_task = tasks.process_cru_data execute_entity_task(_process_task, gdirs) # Then, only global tasks tasks.compute_ref_t_stars(gdirs) tasks.distribute_t_stars(gdirs)
def test_crossval(self): gdirs = up_to_distrib() # before crossval refmustars = [] for gdir in gdirs: tdf = pd.read_csv(gdir.get_filepath('local_mustar')) refmustars.append(tdf['mu_star'].values[0]) tasks.crossval_t_stars(gdirs) file = os.path.join(cfg.PATHS['working_dir'], 'crossval_tstars.csv') df = pd.read_csv(file, index_col=0) # after crossval we need to rerun tasks.compute_ref_t_stars(gdirs) tasks.distribute_t_stars(gdirs) # see if the process didn't brake anything mustars = [] for gdir in gdirs: tdf = pd.read_csv(gdir.get_filepath('local_mustar')) mustars.append(tdf['mu_star'].values[0]) np.testing.assert_allclose(refmustars, mustars) # make some mb tests from oggm.core.models.massbalance import PastMassBalanceModel for rid in df.index: gdir = [g for g in gdirs if g.rgi_id == rid][0] h, w = gdir.get_inversion_flowline_hw() cfg.PARAMS['use_bias_for_run'] = False mbmod = PastMassBalanceModel(gdir) mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE'].to_frame( name='ref') for yr in mbdf.index: mbdf.loc[yr, 'mine'] = mbmod.get_specific_mb(h, w, year=yr) mm = mbdf.mean() np.testing.assert_allclose(df.loc[rid].bias, mm['mine'] - mm['ref'], atol=1e-3) cfg.PARAMS['use_bias_for_run'] = True mbmod = PastMassBalanceModel(gdir) mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE'].to_frame( name='ref') for yr in mbdf.index: mbdf.loc[yr, 'mine'] = mbmod.get_specific_mb(h, w, year=yr) mm = mbdf.mean() np.testing.assert_allclose(mm['mine'], mm['ref'], atol=1e-3)
def calibration(gdirs, xval, major=0): # Climate tasks if mbcfg.PARAMS['histalp']: cfg.PATHS['climate_file'] = mbcfg.PATHS['histalpfile'] execute_entity_task(tasks.process_custom_climate_data, gdirs) else: execute_entity_task(tasks.process_cru_data, gdirs) with utils.DisableLogger(): tasks.compute_ref_t_stars(gdirs) tasks.distribute_t_stars(gdirs) execute_entity_task(tasks.apparent_mb, gdirs) # do the crossvalidation xval = quick_crossval(gdirs, xval, major=major) return xval
def climate_tasks(gdirs): """Shortcut function: run all climate related tasks. Parameters ---------- gdirs : list of :py:class:`oggm.GlacierDirectory` objects the glacier directories to process """ # Process climate data execute_entity_task(tasks.process_climate_data, gdirs) # Then, calibration? if cfg.PARAMS['run_mb_calibration']: tasks.compute_ref_t_stars(gdirs) # Mustar and the apparent mass-balance execute_entity_task(tasks.local_t_star, gdirs) execute_entity_task(tasks.mu_star_calibration, gdirs)
def calibration(gdirs, xval, major=0): # once for reference t_stars tasks.compute_ref_t_stars(gdirs) execute_entity_task(tasks.local_t_star, gdirs) execute_entity_task(tasks.mu_star_calibration, gdirs) full_ref_df = pd.read_csv(os.path.join(cfg.PATHS['working_dir'], 'ref_tstars.csv'), index_col=0) out = execute_entity_task(quick_crossval_entity, gdirs, full_ref_df=full_ref_df) # length of xval dict to get current position _x = len(xval) xval.loc[_x] = 0 nans = np.array([]) for col in xval.columns: if col == 'nans': continue values = np.array([]) for glc in out: # xval.loc[_x, col] += glc[0][col] # TODO values = np.append(values, glc[0][col]) if not major: # store cross validated values for key in glc[1].keys(): if ('cv_' in key) or ('mu_star' in key) or\ ('mustar' in key): full_ref_df.loc[glc[1]['rgi_id'], key] = glc[1][key] # sum of nans = number of failed glaciers nans = np.append(nans, np.isnan(values).sum()) # calculate means of bias, rmse, ... xval.loc[_x, col] = np.nanmean(values) # calculate standard deviation quotient xval.loc[_x, 'std_quot'] = (xval.loc[_x, 'std_oggm'] / xval.loc[_x, 'std_ref']) # treat and save nans: nans = nans[nans != len(gdirs)] if (nans == 0).all(): xval.loc[_x, 'nans'] = 0 elif nans[nans != 0].mean() == nans.max(): xval.loc[_x, 'nans'] = nans.max() else: raise RuntimeError('something unexpected happened during nan counting') if not major: # get interpolated mu star out = execute_entity_task(interpolate_mu_star, gdirs, full_ref_df=full_ref_df) for glc in out: full_ref_df.loc[glc[0], 'interp_mustar'] = glc[1] # write crossvalidation if minor file = os.path.join(cfg.PATHS['working_dir'], 'crossval_tstars.csv') # sort first full_ref_df.sort_index(axis=1, inplace=True) full_ref_df.to_csv(file) return xval
# Prepro tasks task_list = [ tasks.glacier_masks, tasks.compute_centerlines, tasks.initialize_flowlines, tasks.catchment_area, tasks.catchment_intersections, tasks.catchment_width_geom, tasks.catchment_width_correction, ] for task in task_list: execute_entity_task(task, gdirs) # Climate tasks tasks.compute_ref_t_stars(gdirs) execute_entity_task(tasks.local_t_star, gdirs) execute_entity_task(tasks.mu_star_calibration, gdirs) # We store the associated params mb_calib = gdirs[0].read_pickle('climate_info')['mb_calib_params'] with open(os.path.join(WORKING_DIR, 'mb_calib_params.json'), 'w') as fp: json.dump(mb_calib, fp) # And also some statistics utils.compile_glacier_statistics(gdirs) # Tests: for all glaciers, the mass-balance around tstar and the # bias with observation should be approx 0 for gd in gdirs:
entity = gpd.read_file(get_demo_file('Hintereisferner_RGI5.shp')).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=base_dir) tasks.define_glacier_region(gdir, entity=entity) tasks.glacier_masks(gdir) tasks.compute_centerlines(gdir) tasks.initialize_flowlines(gdir) tasks.compute_downstream_line(gdir) tasks.compute_downstream_bedshape(gdir) tasks.catchment_area(gdir) tasks.catchment_intersections(gdir) tasks.catchment_width_geom(gdir) tasks.catchment_width_correction(gdir) tasks.process_cru_data(gdir) tasks.mu_candidates(gdir) tasks.compute_ref_t_stars([gdir]) tasks.distribute_t_stars([gdir]) tasks.apparent_mb(gdir) tasks.prepare_for_inversion(gdir) tasks.volume_inversion(gdir, glen_a=cfg.A, fs=0) tasks.filter_inversion_output(gdir) tasks.init_present_time_glacier(gdir) df = utils.glacier_characteristics([gdir], path=False) reset = True seed = 0 tasks.random_glacier_evolution(gdir, nyears=800, seed=0,
# Basic tasks task_list = [ itmix.glacier_masks_itmix, tasks.compute_centerlines, tasks.catchment_area, tasks.initialize_flowlines, tasks.catchment_width_geom, tasks.catchment_width_correction ] for task in task_list: execute_entity_task(task, gdirs) # Climate related tasks execute_entity_task(tasks.process_cru_data, gdirs) tasks.compute_ref_t_stars(gdirs) tasks.distribute_t_stars(gdirs) # Inversion execute_entity_task(tasks.prepare_for_inversion, gdirs) itmix.optimize_thick(gdirs) execute_entity_task(tasks.volume_inversion, gdirs) # Write out glacier statistics df = utils.glacier_characteristics(gdirs) fpath = os.path.join(cfg.PATHS['working_dir'], 'glacier_char.csv') df.to_csv(fpath) if do_itmix: done = False
def up_to_inversion(reset=False): """Run the tasks you want.""" # test directory if not os.path.exists(TEST_DIR): os.makedirs(TEST_DIR) if reset: clean_dir(TEST_DIR) # Init cfg.initialize() # Use multiprocessing cfg.PARAMS['use_multiprocessing'] = not ON_TRAVIS # Working dir cfg.PATHS['working_dir'] = TEST_DIR cfg.PATHS['dem_file'] = get_demo_file('srtm_oetztal.tif') # Set up the paths and other stuffs cfg.set_divides_db(get_demo_file('divides_workflow.shp')) cfg.PATHS['wgms_rgi_links'] = get_demo_file('RGI_WGMS_oetztal.csv') cfg.PATHS['glathida_rgi_links'] = get_demo_file('RGI_GLATHIDA_oetztal.csv') # Read in the RGI file rgi_file = get_demo_file('rgi_oetztal.shp') rgidf = gpd.GeoDataFrame.from_file(rgi_file) # Be sure data is downloaded because lock doesn't work cl = utils.get_cru_cl_file() # Params cfg.PARAMS['border'] = 70 cfg.PARAMS['use_optimized_inversion_params'] = True # Go gdirs = workflow.init_glacier_regions(rgidf) try: flowline.init_present_time_glacier(gdirs[0]) except Exception: reset = True if reset: # First preprocessing tasks workflow.gis_prepro_tasks(gdirs) # Climate related tasks # See if CRU is running cfg.PARAMS['temp_use_local_gradient'] = False cfg.PATHS['climate_file'] = '~' cru_dir = get_demo_file('cru_ts3.23.1901.2014.tmp.dat.nc') cfg.PATHS['cru_dir'] = os.path.dirname(cru_dir) with warnings.catch_warnings(): # There is a warning from salem warnings.simplefilter("ignore") workflow.execute_entity_task(tasks.distribute_cru_style, gdirs) tasks.compute_ref_t_stars(gdirs) tasks.distribute_t_stars(gdirs) # Use histalp for the actual test cfg.PARAMS['temp_use_local_gradient'] = True cfg.PATHS['climate_file'] = get_demo_file('HISTALP_oetztal.nc') cfg.PATHS['cru_dir'] = '~' workflow.climate_tasks(gdirs) # Inversion workflow.inversion_tasks(gdirs) return gdirs
def test_crossval(self): gdirs = up_to_distrib() # in case we ran crossval we need to rerun tasks.compute_ref_t_stars(gdirs) tasks.distribute_t_stars(gdirs) workflow.execute_entity_task(tasks.apparent_mb, gdirs) # before crossval refmustars = [] for gdir in gdirs: tdf = pd.read_csv(gdir.get_filepath('local_mustar')) refmustars.append(tdf['mu_star'].values[0]) tasks.crossval_t_stars(gdirs) file = os.path.join(cfg.PATHS['working_dir'], 'crossval_tstars.csv') df = pd.read_csv(file, index_col=0) # after crossval we need to rerun tasks.compute_ref_t_stars(gdirs) tasks.distribute_t_stars(gdirs) workflow.execute_entity_task(tasks.apparent_mb, gdirs) # Test if quicker crossval is also OK tasks.quick_crossval_t_stars(gdirs) file = os.path.join(cfg.PATHS['working_dir'], 'crossval_tstars.csv') dfq = pd.read_csv(file, index_col=0) # after crossval we need to rerun tasks.compute_ref_t_stars(gdirs) tasks.distribute_t_stars(gdirs) workflow.execute_entity_task(tasks.apparent_mb, gdirs) assert np.all(np.abs(df.cv_bias) < 50) assert np.all(np.abs(dfq.cv_bias) < 50) # The biases aren't entirely equivalent and its ok np.testing.assert_allclose(df.cv_prcp_fac, dfq.cv_prcp_fac) # see if the process didn't brake anything mustars = [] for gdir in gdirs: tdf = pd.read_csv(gdir.get_filepath('local_mustar')) mustars.append(tdf['mu_star'].values[0]) np.testing.assert_allclose(refmustars, mustars) # make some mb tests from oggm.core.massbalance import PastMassBalance for rid in df.index: gdir = [g for g in gdirs if g.rgi_id == rid][0] h, w = gdir.get_inversion_flowline_hw() cfg.PARAMS['use_bias_for_run'] = False mbmod = PastMassBalance(gdir) mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE'].to_frame(name='ref') for yr in mbdf.index: mbdf.loc[yr, 'mine'] = mbmod.get_specific_mb(h, w, year=yr) mm = mbdf.mean() np.testing.assert_allclose(df.loc[rid].bias, mm['mine'] - mm['ref'], atol=1e-3) cfg.PARAMS['use_bias_for_run'] = True mbmod = PastMassBalance(gdir) mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE'].to_frame(name='ref') for yr in mbdf.index: mbdf.loc[yr, 'mine'] = mbmod.get_specific_mb(h, w, year=yr) mm = mbdf.mean() np.testing.assert_allclose(mm['mine'], mm['ref'], atol=1e-3)