def test_invert_and_run(self): from oggm.core.models import flowline, massbalance glen_a = cfg.A * 2 gdir = utils.GlacierDirectory(self.rgin, base_dir=self.testdir) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) centerlines.compute_downstream_lines(gdir) geometry.initialize_flowlines(gdir) geometry.catchment_area(gdir) geometry.catchment_width_geom(gdir) geometry.catchment_width_correction(gdir) climate.local_mustar_apparent_mb(gdir, tstar=1975, bias=0.) inversion.prepare_for_inversion(gdir) v, a = inversion.invert_parabolic_bed(gdir, glen_a=glen_a) cfg.PARAMS['bed_shape'] = 'parabolic' flowline.init_present_time_glacier(gdir) mb_mod = massbalance.TstarMassBalanceModel(gdir) fls = gdir.read_pickle('model_flowlines') model = flowline.FluxBasedModel(fls, mb_model=mb_mod, y0=0., fs=0, glen_a=glen_a) ref_vol = model.volume_m3 model.run_until_equilibrium() after_vol = model.volume_m3 np.testing.assert_allclose(ref_vol, after_vol, rtol=0.1)
def test_ideal_glacier(self): # we are making a glen_a = cfg.A * 1 from oggm.core.models import flowline, massbalance gdir = utils.GlacierDirectory(self.rgin, base_dir=self.testdir) fls = self._parabolic_bed() mbmod = massbalance.ConstantBalanceModel(2800.) model = flowline.FluxBasedModel(fls, mb_model=mbmod, glen_a=glen_a) model.run_until_equilibrium() # from dummy bed map_dx = 100. towrite = [] for fl in model.fls: # Distance between two points dx = fl.dx * map_dx # Widths widths = fl.widths * map_dx # Heights hgt = fl.surface_h # Flux mb = mbmod.get_mb(hgt) * cfg.SEC_IN_YEAR * cfg.RHO fl.flux = np.zeros(len(fl.surface_h)) fl.set_apparent_mb(mb) flux = fl.flux * (map_dx**2) / cfg.SEC_IN_YEAR / cfg.RHO pok = np.nonzero(widths > 10.) widths = widths[pok] hgt = hgt[pok] flux = flux[pok] angle = np.arctan(-np.gradient(hgt, dx)) # beware the minus sign # Clip flux to 0 assert not np.any(flux < -0.1) # add to output cl_dic = dict(dx=dx, flux=flux, width=widths, hgt=hgt, slope_angle=angle, is_last=True) towrite.append(cl_dic) # Write out gdir.write_pickle(towrite, 'inversion_input', div_id=1) v, a = inversion.invert_parabolic_bed(gdir, glen_a=glen_a) v_km3 = v * 1e-9 a_km2 = np.sum(widths * dx) * 1e-6 v_vas = 0.034 * (a_km2**1.375) np.testing.assert_allclose(v, model.volume_m3, rtol=0.01) cl = gdir.read_pickle('inversion_output', div_id=1)[0] assert utils.rmsd(cl['thick'], model.fls[0].thick[:len(cl['thick'])]) < 10.
def define_g2ti_glacier(path=None, base_dir=None): fname = os.path.join(path, 'outlines.shp') ent = gpd.read_file(fname) rid = ent.RGIId.values[0] if '5a' in rid: rid = rid.replace('5a', '60') ent['RGIId'] = rid ent['Name'] = '' if ent['Name'][0] == 'None' else ent['Name'] gdir = utils.GlacierDirectory(ent.iloc[0], base_dir=base_dir) ent.to_file(gdir.get_filepath('outlines')) proj_out = salem.check_crs(ent.crs) # Also transform the intersects if necessary gdf = cfg.PARAMS['intersects_gdf'] if len(gdf) > 0: gdf = gdf.loc[((gdf.RGIId_1 == gdir.rgi_id) | (gdf.RGIId_2 == gdir.rgi_id))] if len(gdf) > 0: gdf = salem.transform_geopandas(gdf, to_crs=proj_out) if hasattr(gdf.crs, 'srs'): # salem uses pyproj gdf.crs = gdf.crs.srs gdf.to_file(gdir.get_filepath('intersects')) else: # Sanity check if cfg.PARAMS['use_intersects']: raise RuntimeError('You seem to have forgotten to set the ' 'intersects file for this run. OGGM works ' 'better with such a file. If you know what ' 'your are doing, set ' "cfg.PARAMS['use_intersects'] = False to " "suppress this error.") # Topo shutil.copy(os.path.join(path, 'dem.tif'), gdir.get_filepath('dem')) mpath = gdir.get_filepath('dem').replace('dem', 'g2ti_mask') shutil.copy(os.path.join(path, 'mask.tif'), mpath) # Grid ds = salem.GeoTiff(gdir.get_filepath('dem')) ds.grid.to_json(gdir.get_filepath('glacier_grid')) gdir.write_pickle(['G2TI'], 'dem_source') return gdir
def single_flowline_glacier_directory(rgi_id, reset=False, prepro_border=80): """Prepare a GlacierDirectory for PyGEM (single flowline to start with) Parameters ---------- rgi_id : str the rgi id of the glacier reset : bool set to true to delete any pre-existing files. If false (the default), the directory won't be re-downloaded if already available locally in order to spare time. prepro_border : int the size of the glacier map: 10, 80, 160, 250 Returns ------- a GlacierDirectory object """ if type(rgi_id) != str: raise ValueError('We expect rgi_id to be a string') if 'RGI60-' not in rgi_id: raise ValueError('OGGM currently expects IDs to start with RGI60-') cfg.initialize() wd = utils.gettempdir(dirname='pygem-{}-b{}'.format(rgi_id, prepro_border), reset=reset) cfg.PATHS['working_dir'] = wd cfg.PARAMS['use_multiple_flowlines'] = False # Check if folder is already processed try: gdir = utils.GlacierDirectory(rgi_id) gdir.read_pickle('model_flowlines') # If the above works the directory is already processed, return return gdir except OSError: pass # If not ready, we download the preprocessed data for this glacier gdirs = workflow.init_glacier_regions([rgi_id], from_prepro_level=2, prepro_border=prepro_border) # Compute all the stuff list_talks = [ tasks.glacier_masks, tasks.compute_centerlines, tasks.initialize_flowlines, tasks.compute_downstream_line, tasks.catchment_area, tasks.catchment_width_geom, tasks.catchment_width_correction, tasks.compute_downstream_bedshape, tasks.local_t_star, tasks.mu_star_calibration, tasks.prepare_for_inversion, tasks.mass_conservation_inversion, tasks.filter_inversion_output, tasks.init_present_time_glacier, ] for task in list_talks: # The order matters! workflow.execute_entity_task(task, gdirs) return gdirs[0]
def single_flowline_glacier_directory(rgi_id, reset=False, prepro_border=80): """Prepare a GlacierDirectory for PyGEM (single flowline to start with) Parameters ---------- rgi_id : str the rgi id of the glacier (RGIv60-) reset : bool set to true to delete any pre-existing files. If false (the default), the directory won't be re-downloaded if already available locally in order to spare time. prepro_border : int the size of the glacier map: 10, 80, 160, 250 Returns ------- a GlacierDirectory object """ if type(rgi_id) != str: raise ValueError('We expect rgi_id to be a string') if rgi_id.startswith('RGI60-') == False: rgi_id = 'RGI60-' + rgi_id.split('.')[0].zfill(2) + '.' + rgi_id.split( '.')[1] else: raise ValueError('Check RGIId is correct') # Initialize OGGM and set up the default run parameters cfg.initialize(logging_level='WORKFLOW') cfg.PARAMS['border'] = 10 # Usually we recommend to set dl_verify to True - here it is quite slow # because of the huge files so we just turn it off. # Switch it on for real cases! cfg.PARAMS['dl_verify'] = True cfg.PARAMS['use_multiple_flowlines'] = False # temporary directory for testing (deleted on computer restart) cfg.PATHS['working_dir'] = pygem_prms.oggm_gdir_fp # Check if folder is already processed if not pygem_prms.overwrite_gdirs: try: gdir = utils.GlacierDirectory(rgi_id) gdir.read_pickle('inversion_flowlines') # If the above works the directory is already processed, return return gdir except: process_gdir = True else: process_gdir = True if process_gdir: # ===== SELECT BEST DEM ===== # gdirs = rgitopo.init_glacier_directories_from_rgitopo([rgi_id]) # removed 10/20/2020 # gdirs = workflow.init_glacier_directories([rgi_id]) # Download preprocessed data gdirs = workflow.init_glacier_regions([rgi_id], from_prepro_level=1, prepro_border=prepro_border) # Start after the prepro task level # base_url = 'https://cluster.klima.uni-bremen.de/~fmaussion/gdirs/prepro_l2_202010/single_fl' # gdirs = workflow.init_glacier_directories([rgi_id], from_prepro_level=2, prepro_border=40, # prepro_base_url=base_url, prepro_rgi_version='62') # Compute all the stuff list_tasks = [ tasks.glacier_masks, tasks.compute_centerlines, tasks.initialize_flowlines, tasks.compute_downstream_line, tasks.compute_downstream_bedshape, tasks.catchment_area, tasks.catchment_intersections, tasks.catchment_width_geom, tasks.catchment_width_correction, # # Consensus ice thickness # icethickness.consensus_gridded, # icethickness.consensus_binned, # Mass balance data mbdata.mb_df_to_gdir ] # Debris tasks if pygem_prms.include_debris: list_tasks.append(debris.debris_to_gdir) list_tasks.append(debris.debris_binned) for task in list_tasks: workflow.execute_entity_task(task, gdirs) return gdirs[0]
def single_flowline_glacier_directory_with_calving(rgi_id, reset=False, prepro_border=10, k_calving=2): """Prepare a GlacierDirectory for PyGEM (single flowline to start with) k_calving is free variable! Parameters ---------- rgi_id : str the rgi id of the glacier reset : bool set to true to delete any pre-existing files. If false (the default), the directory won't be re-downloaded if already available locally in order to spare time. prepro_border : int the size of the glacier map: 10, 80, 160, 250 Returns ------- a GlacierDirectory object """ assert 1 == 0, 'UPDATE LATEST GLACIER DIRECTORY OPTIONS FROM NON-CALVING' if type(rgi_id) != str: raise ValueError('We expect rgi_id to be a string') if rgi_id.startswith('RGI60-') == False: rgi_id = 'RGI60-' + rgi_id.split('.')[0].zfill(2) + '.' + rgi_id.split( '.')[1] else: raise ValueError('Check RGIId is correct') cfg.initialize() wd = '/Users/davidrounce/Documents/Dave_Rounce/HiMAT/Output/oggm-pygem-{}-b{}-k{}'.format( rgi_id, prepro_border, k_calving) cfg.PATHS['working_dir'] = wd cfg.PARAMS['use_multiple_flowlines'] = False cfg.PARAMS['use_multiprocessing'] = False # Check if folder is already processed try: gdir = utils.GlacierDirectory(rgi_id) gdir.read_pickle('model_flowlines') # If the above works the directory is already processed, return return gdir except: pass # If not ready, we download the preprocessed data for this glacier gdirs = workflow.init_glacier_regions([rgi_id], from_prepro_level=2, prepro_border=prepro_border) if not gdirs[0].is_tidewater: raise ValueError('This glacier is not tidewater!') # Compute all the stuff list_tasks = [ tasks.glacier_masks, tasks.compute_centerlines, tasks.initialize_flowlines, tasks.compute_downstream_line, tasks.compute_downstream_bedshape, tasks.catchment_area, tasks.catchment_intersections, # added 10/20/2020 tasks.catchment_width_geom, tasks.catchment_width_correction, # Consensus ice thickness icethickness.consensus_gridded, icethickness.consensus_binned, # Mass balance data # mbdata.mb_bins_to_glacierwide mbdata.mb_df_to_gdir ] # Debris tasks if pygem_prms.include_debris: list_tasks.append(debris.debris_to_gdir) list_tasks.append(debris.debris_binned) for task in list_tasks: # The order matters! workflow.execute_entity_task(task, gdirs) # Calving according to Recinos et al. 2019 # solves equality between ice derformation and Oerleman's calving law # reduces temperature sensitivity from oggm.core.inversion import find_inversion_calving cfg.PARAMS['k_calving'] = k_calving df = find_inversion_calving(gdirs[0]) print('Calving results:') print('k calving:', k_calving) for k, v in df.items(): print(k + ':', v) list_tasks = [ # THIS WILL NOW FAIL BECAUSE OF USING HUSS tasks.init_present_time_glacier, ] for task in list_tasks: # The order matters! workflow.execute_entity_task(task, gdirs) return gdirs[0]