def test_vdr(self, class_case_dir): # Init cfg.initialize() cfg.PARAMS['use_intersects'] = False cfg.PATHS['working_dir'] = class_case_dir cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') hef_file = get_demo_file('Hintereisferner_RGI5.shp') gdir = workflow.init_glacier_directories(gpd.read_file(hef_file))[0] exps = ['ERA5', 'ERA5dr'] files = [] ref_hgts = [] for base in exps: cfg.PARAMS['baseline_climate'] = base tasks.process_climate_data(gdir, output_filesuffix=base) files.append( gdir.get_filepath('climate_historical', filesuffix=base)) with xr.open_dataset(files[-1]) as ds: ref_hgts.append(ds.ref_hgt) assert ds.ref_pix_dis < 10000 with xr.open_dataset(files[0]) as d1, xr.open_dataset(files[1]) as d2: np.testing.assert_allclose(d1.temp, d2.temp) np.testing.assert_allclose(d1.prcp, d2.prcp) # Fake tests, the plots look plausible np.testing.assert_allclose(d2.gradient.mean(), -0.0058, atol=.001) np.testing.assert_allclose(d2.temp_std.mean(), 3.35, atol=0.1)
def test_ecmwf_workflow(self, class_case_dir): # Init cfg.initialize() cfg.PARAMS['use_intersects'] = False cfg.PATHS['working_dir'] = class_case_dir cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') hef_file = get_demo_file('Hintereisferner_RGI5.shp') gdir = workflow.init_glacier_directories(gpd.read_file(hef_file))[0] cfg.PARAMS['baseline_climate'] = 'CERA+ERA5L' tasks.process_climate_data(gdir) f_ref = gdir.get_filepath('climate_historical') with xr.open_dataset(f_ref) as his: # Let's do some basic checks ci = gdir.get_climate_info() assert ci['baseline_climate_source'] == 'CERA+ERA5L' assert ci['baseline_hydro_yr_0'] == 1902 assert ci['baseline_hydro_yr_1'] == 2018 cfg.PARAMS['baseline_climate'] = 'CERA|ERA5' tasks.process_climate_data(gdir) f_ref = gdir.get_filepath('climate_historical') with xr.open_dataset(f_ref) as his: # Let's do some basic checks ci = gdir.get_climate_info() assert ci['baseline_climate_source'] == 'CERA|ERA5' assert ci['baseline_hydro_yr_0'] == 1902 assert ci['baseline_hydro_yr_1'] == 2010
def init_columbia(reset=False): from oggm.core import gis, climate, centerlines import geopandas as gpd # test directory testdir = os.path.join(get_test_dir(), 'tmp_columbia') if not os.path.exists(testdir): os.makedirs(testdir) reset = True # Init cfg.initialize() cfg.PATHS['working_dir'] = testdir cfg.PARAMS['use_intersects'] = False cfg.PATHS['dem_file'] = get_demo_file('dem_Columbia.tif') cfg.PARAMS['border'] = 10 entity = gpd.read_file(get_demo_file('01_rgi60_Columbia.shp')).iloc[0] gdir = oggm.GlacierDirectory(entity, reset=reset) if gdir.has_file('climate_historical'): return gdir gis.define_glacier_region(gdir) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) centerlines.initialize_flowlines(gdir) centerlines.compute_downstream_line(gdir) centerlines.catchment_area(gdir) centerlines.catchment_intersections(gdir) centerlines.catchment_width_geom(gdir) centerlines.catchment_width_correction(gdir) tasks.process_dummy_cru_file(gdir, seed=0) return gdir
def test_ice_cap(): testdir = os.path.join(cfg.PATHS['test_dir'], 'tmp_icecap') utils.mkdir(testdir) cfg.initialize() cfg.PATHS['dem_file'] = get_demo_file('dem_RGI50-05.08389.tif') cfg.PARAMS['border'] = 20 cfg.set_divides_db(get_demo_file('divides_RGI50-05.08389.shp')) hef_file = get_demo_file('RGI50-05.08389.shp') entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=True) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) centerlines.compute_downstream_lines(gdir) geometry.initialize_flowlines(gdir) # We should have five groups lines = gdir.read_pickle('downstream_lines', div_id=0) assert len(np.unique(lines.group))==5 # This just checks that it works geometry.catchment_area(gdir) geometry.catchment_intersections(gdir) geometry.catchment_width_geom(gdir) geometry.catchment_width_correction(gdir) fig, ax = plt.subplots() graphics.plot_catchment_width(gdir, ax=ax, add_intersects=True, add_touches=True) fig.tight_layout() return fig
def test_multiple_inversion(): # test directory testdir = os.path.join(get_test_dir(), 'tmp_mdir') if not os.path.exists(testdir): os.makedirs(testdir) # Init cfg.initialize() cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp')) cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc') cfg.PARAMS['border'] = 40 cfg.PARAMS['run_mb_calibration'] = True cfg.PARAMS['baseline_climate'] = 'CUSTOM' cfg.PATHS['working_dir'] = testdir # Get the RGI ID hef_rgi = gpd.read_file(get_demo_file('divides_hef.shp')) hef_rgi.loc[0, 'RGIId'] = 'RGI50-11.00897' gdirs = workflow.init_glacier_regions(hef_rgi) workflow.gis_prepro_tasks(gdirs) workflow.climate_tasks(gdirs) workflow.inversion_tasks(gdirs) fig, ax = plt.subplots() graphics.plot_inversion(gdirs, ax=ax) fig.tight_layout() shutil.rmtree(testdir) return fig
def test_distribute_climate(self): hef_file = get_demo_file('Hintereisferner.shp') rgidf = gpd.GeoDataFrame.from_file(hef_file) gdirs = [] # loop because for some reason indexing wont work for index, entity in rgidf.iterrows(): gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gdirs.append(gdir) climate.distribute_climate_data(gdirs) with netCDF4.Dataset(get_demo_file('histalp_merged_hef.nc')) as nc_r: ref_h = nc_r.variables['hgt'][1, 1] ref_p = nc_r.variables['prcp'][:, 1, 1] ref_p *= cfg.PARAMS['prcp_scaling_factor'] ref_t = nc_r.variables['temp'][:, 1, 1] with netCDF4.Dataset(os.path.join(gdir.dir, 'climate_monthly.nc')) as nc_r: self.assertTrue(ref_h == nc_r.ref_hgt) np.testing.assert_allclose(ref_t, nc_r.variables['temp'][:]) np.testing.assert_allclose(ref_p, nc_r.variables['prcp'][:])
def test_nodivide(): # test directory testdir = TESTDIR_BASE + '_nodiv' if not os.path.exists(testdir): os.makedirs(testdir) # Init cfg.initialize() cfg.set_divides_db() cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc') cfg.PARAMS['border'] = 40 hef_file = get_demo_file('Hintereisferner.shp') entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=True) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) fig, ax = plt.subplots() graphics.plot_centerlines(gdir, ax=ax) fig.tight_layout() return fig
def up_to_climate(reset=False): """Run the tasks you want.""" # test directory if not os.path.exists(_TEST_DIR): os.makedirs(_TEST_DIR) if reset: clean_dir(_TEST_DIR) if not os.path.exists(CLI_LOGF): with open(CLI_LOGF, 'wb') as f: pickle.dump('none', f) # Init cfg.initialize() # Use multiprocessing cfg.PARAMS['use_multiprocessing'] = use_multiprocessing() # Working dir cfg.PATHS['working_dir'] = _TEST_DIR cfg.PATHS['dem_file'] = get_demo_file('srtm_oetztal.tif') cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp')) # Read in the RGI file rgi_file = get_demo_file('rgi_oetztal.shp') rgidf = gpd.read_file(rgi_file) # Make a fake marine and lake terminating glacier cfg.PARAMS['tidewater_type'] = 4 # make lake also calve rgidf.loc[0, 'GlacType'] = '0199' rgidf.loc[1, 'GlacType'] = '0299' # Use RGI6 rgidf['RGIId'] = [s.replace('RGI50', 'RGI60') for s in rgidf.RGIId] # Be sure data is downloaded cru.get_cru_cl_file() # Params cfg.PARAMS['border'] = 70 cfg.PARAMS['tstar_search_window'] = [1902, 0] cfg.PARAMS['prcp_scaling_factor'] = 1.75 cfg.PARAMS['temp_melt'] = -1.75 cfg.PARAMS['use_kcalving_for_inversion'] = True cfg.PARAMS['use_kcalving_for_run'] = True # Go gdirs = workflow.init_glacier_directories(rgidf) try: tasks.catchment_width_correction(gdirs[0]) except Exception: reset = True if reset: # First preprocessing tasks workflow.gis_prepro_tasks(gdirs) return gdirs
def setUp(self): """Instance the TestCase, create the test directory, OGGM initialisation and setting paths and parameters. Most input files, like the DEM, the climate file and the glacier outline, come from the oggm-sample-data repository and my hence be outdated. The test are performed on Hintereisferner (RGI60-11.00897), running with HISTALP climate data and the matching mass balance calibration parameters. """ # test directory self.testdir = os.path.join(get_test_dir(), 'tmp_vas') if not os.path.exists(self.testdir): os.makedirs(self.testdir) self.clean_dir() # load default parameter file and set working directory vascaling.initialize() cfg.PATHS['working_dir'] = self.testdir # set path to GIS files cfg.PARAMS['use_intersects'] = False cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') # set parameters for climate file and mass balance calibration cfg.PARAMS['baseline_climate'] = 'CUSTOM' cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc') cfg.PARAMS['run_mb_calibration'] = True # adjust parameters for HistAlp climate cfg.PARAMS['prcp_scaling_factor'] = 2.5 cfg.PARAMS['temp_melt'] = -0.5 cfg.PARAMS['temp_all_solid'] = 0. # coveralls.io has issues if multiprocessing is enabled cfg.PARAMS['use_multiprocessing'] = False
def test_nodivide_corrected(): # test directory testdir = os.path.join(cfg.PATHS['test_dir'], 'tmp_nodiv') if not os.path.exists(testdir): os.makedirs(testdir) # Init cfg.initialize() cfg.set_divides_db() cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc') cfg.PARAMS['border'] = 40 hef_file = get_demo_file('Hintereisferner_RGI5.shp') entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=True) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) geometry.initialize_flowlines(gdir) geometry.catchment_area(gdir) geometry.catchment_intersections(gdir) geometry.catchment_width_geom(gdir) geometry.catchment_width_correction(gdir) fig, ax = plt.subplots() graphics.plot_catchment_width(gdir, ax=ax, corrected=True, add_intersects=True, add_touches=True) fig.tight_layout() shutil.rmtree(testdir) return fig
def test_mu_candidates(self): hef_file = get_demo_file("Hintereisferner.shp") entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0] gdirs = [] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) geometry.initialize_flowlines(gdir) geometry.catchment_area(gdir) geometry.catchment_width_geom(gdir) geometry.catchment_width_correction(gdir) gdirs.append(gdir) climate.process_histalp_nonparallel(gdirs) climate.mu_candidates(gdir, div_id=0) se = gdir.read_pickle("mu_candidates")[2.5] self.assertTrue(se.index[0] == 1802) self.assertTrue(se.index[-1] == 2003) df = pd.DataFrame() df["mu"] = se # Check that the moovin average of temp is negatively correlated # with the mus with netCDF4.Dataset(get_demo_file("histalp_merged_hef.nc")) as nc_r: ref_t = nc_r.variables["temp"][:, 1, 1] ref_t = np.mean(ref_t.reshape((len(df), 12)), 1) ma = np.convolve(ref_t, np.ones(31) / float(31), "same") df["temp"] = ma df = df.dropna() self.assertTrue(np.corrcoef(df["mu"], df["temp"])[0, 1] < -0.75)
def test_distribute_climate_parallel(self): hef_file = get_demo_file("Hintereisferner.shp") entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0] gdirs = [] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gdirs.append(gdir) climate.process_custom_climate_data(gdir) ci = gdir.read_pickle("climate_info") self.assertEqual(ci["hydro_yr_0"], 1802) self.assertEqual(ci["hydro_yr_1"], 2003) with netCDF4.Dataset(get_demo_file("histalp_merged_hef.nc")) as nc_r: ref_h = nc_r.variables["hgt"][1, 1] ref_p = nc_r.variables["prcp"][:, 1, 1] ref_t = nc_r.variables["temp"][:, 1, 1] with netCDF4.Dataset(os.path.join(gdir.dir, "climate_monthly.nc")) as nc_r: self.assertTrue(ref_h == nc_r.ref_hgt) np.testing.assert_allclose(ref_t, nc_r.variables["temp"][:]) np.testing.assert_allclose(ref_p, nc_r.variables["prcp"][:])
def init_columbia_eb(dir_name, reset=False): from oggm.core import gis, centerlines import geopandas as gpd # test directory testdir = os.path.join(get_test_dir(), dir_name) mkdir(testdir, reset=reset) # Init cfg.initialize() cfg.PATHS['working_dir'] = testdir cfg.PARAMS['use_intersects'] = False cfg.PATHS['dem_file'] = get_demo_file('dem_Columbia.tif') cfg.PARAMS['border'] = 10 entity = gpd.read_file(get_demo_file('01_rgi60_Columbia.shp')).iloc[0] gdir = oggm.GlacierDirectory(entity) if gdir.has_file('climate_historical'): return gdir gis.define_glacier_region(gdir) gis.simple_glacier_masks(gdir) centerlines.elevation_band_flowline(gdir) centerlines.fixed_dx_elevation_band_flowline(gdir) centerlines.compute_downstream_line(gdir) tasks.process_dummy_cru_file(gdir, seed=0) return gdir
def test_mu_candidates(self): hef_file = get_demo_file('Hintereisferner.shp') entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0] gdirs = [] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) geometry.initialize_flowlines(gdir) geometry.catchment_area(gdir) geometry.catchment_width_geom(gdir) geometry.catchment_width_correction(gdir) gdirs.append(gdir) climate.distribute_climate_data(gdirs) climate.mu_candidates(gdir, div_id=0) se = gdir.read_pickle('mu_candidates') self.assertTrue(se.index[0] == 1802) self.assertTrue(se.index[-1] == 2003) df = pd.DataFrame() df['mu'] = se # Check that the moovin average of temp is negatively correlated # with the mus with netCDF4.Dataset(get_demo_file('histalp_merged_hef.nc')) as nc_r: ref_t = nc_r.variables['temp'][:, 1, 1] ref_t = np.mean(ref_t.reshape((len(df), 12)), 1) ma = np.convolve(ref_t, np.ones(31) / float(31), 'same') df['temp'] = ma df = df.dropna() self.assertTrue(np.corrcoef(df['mu'], df['temp'])[0, 1] < -0.75)
def test_distribute_climate_cru(self): hef_file = get_demo_file('Hintereisferner.shp') entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0] gdirs = [] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gdirs.append(gdir) gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir_cru) gis.define_glacier_region(gdir, entity=entity) gdirs.append(gdir) climate.distribute_climate_data([gdirs[0]]) cru_dir = get_demo_file('cru_ts3.23.1901.2014.tmp.dat.nc') cru_dir = os.path.dirname(cru_dir) cfg.PATHS['climate_file'] = '~' cfg.PATHS['cru_dir'] = cru_dir climate.distribute_climate_data([gdirs[1]]) cfg.PATHS['cru_dir'] = '~' cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc') gdh = gdirs[0] gdc = gdirs[1] with xr.open_dataset(os.path.join(gdh.dir, 'climate_monthly.nc')) as nc_h: with xr.open_dataset(os.path.join(gdc.dir, 'climate_monthly.nc')) as nc_c: # put on the same altitude # (using default gradient because better) temp_cor = nc_c.temp -0.0065 * (nc_h.ref_hgt - nc_c.ref_hgt) totest = temp_cor - nc_h.temp self.assertTrue(totest.mean() < 0.5) # precip totest = nc_c.prcp - nc_h.prcp self.assertTrue(totest.mean() < 100)
def test_distribute_climate(self): hef_file = get_demo_file('Hintereisferner.shp') rgidf = gpd.GeoDataFrame.from_file(hef_file) # loop because for some reason indexing wont work gdirs = [] for index, entity in rgidf.iterrows(): gdir = cfg.GlacierDir(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity) gdirs.append(gdir) climate.distribute_climate_data(gdirs) nc_r = netCDF4.Dataset(get_demo_file('histalp_merged_hef.nc')) ref_h = nc_r.variables['hgt'][1, 1] ref_p = nc_r.variables['prcp'][:, 1, 1] ref_p *= cfg.params['prcp_scaling_factor'] ref_t = nc_r.variables['temp'][:, 1, 1] nc_r.close() nc_r = netCDF4.Dataset(os.path.join(gdir.dir, 'climate_monthly.nc')) self.assertTrue(ref_h == nc_r.ref_hgt) np.testing.assert_allclose(ref_t, nc_r.variables['temp'][:]) np.testing.assert_allclose(ref_p, nc_r.variables['prcp'][:]) nc_r.close()
def test_nodivide(): # test directory testdir = TESTDIR_BASE + '_nodiv' if not os.path.exists(testdir): os.makedirs(testdir) # Init cfg.initialize() cfg.set_divides_db() cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc') cfg.PARAMS['border'] = 40 # loop because for some reason indexing wont work hef_file = get_demo_file('Hintereisferner.shp') rgidf = gpd.GeoDataFrame.from_file(hef_file) for index, entity in rgidf.iterrows(): gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=True) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) graphics.plot_centerlines(gdir)
def setUp(self): """Instance the TestCase, create the test directory, OGGM initialisation and setting paths and parameters. """ # test directory self.testdir = os.path.join(get_test_dir(), 'tmp_vas') if not os.path.exists(self.testdir): os.makedirs(self.testdir) self.clean_dir() # load default parametere file and set working directory cfg.initialize() cfg.PATHS['working_dir'] = self.testdir # set path to GIS files cfg.PARAMS['use_intersects'] = False cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') # set parameters for climate file and mass balance calibration cfg.PARAMS['baseline_climate'] = 'CUSTOM' cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc') cfg.PARAMS['run_mb_calibration'] = True # adjust parameters for HistAlp climate cfg.PARAMS['prcp_scaling_factor'] = 1.75 cfg.PARAMS['temp_melt'] = -1.75 cfg.PARAMS['temp_all_liq'] = 2. cfg.PARAMS['temp_all_solid'] = 0. cfg.PARAMS['temp_default_gradient'] = -0.0065 # coveralls.io has issues if multiprocessing is enabled cfg.PARAMS['use_multiprocessing'] = False
def test_ice_cap(): testdir = os.path.join(get_test_dir(), 'tmp_icecap') utils.mkdir(testdir, reset=True) cfg.initialize() cfg.PARAMS['use_intersects'] = False cfg.PATHS['dem_file'] = get_demo_file('dem_RGI50-05.08389.tif') cfg.PARAMS['border'] = 60 cfg.PATHS['working_dir'] = testdir df = gpd.read_file(get_demo_file('divides_RGI50-05.08389.shp')) df['Area'] = df.Area * 1e-6 # cause it was in m2 df['RGIId'] = ['RGI50-05.08389_d{:02d}'.format(d + 1) for d in df.index] gdirs = workflow.init_glacier_regions(df) workflow.gis_prepro_tasks(gdirs) from salem import mercator_grid, Map smap = mercator_grid((gdirs[0].cenlon, gdirs[0].cenlat), extent=[20000, 23000]) smap = Map(smap) fig, ax = plt.subplots() graphics.plot_catchment_width(gdirs, ax=ax, add_intersects=True, add_touches=True, smap=smap) fig.tight_layout() shutil.rmtree(testdir) return fig
def test_mb_climate(self): hef_file = get_demo_file('Hintereisferner.shp') entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0] gdirs = [] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gdirs.append(gdir) climate.distribute_climate_data(gdirs) with netCDF4.Dataset(get_demo_file('histalp_merged_hef.nc')) as nc_r: ref_h = nc_r.variables['hgt'][1, 1] ref_p = nc_r.variables['prcp'][:, 1, 1] ref_p *= cfg.PARAMS['prcp_scaling_factor'] ref_t = nc_r.variables['temp'][:, 1, 1] ref_t = np.where(ref_t < 0, 0, ref_t) hgts = np.array([ref_h, ref_h, -8000, 8000]) time, temp, prcp = climate.mb_climate_on_height(gdir, hgts) ref_nt = 202*12 self.assertTrue(len(time) == ref_nt) self.assertTrue(temp.shape == (4, ref_nt)) self.assertTrue(prcp.shape == (4, ref_nt)) np.testing.assert_allclose(temp[0, :], ref_t) np.testing.assert_allclose(temp[0, :], temp[1, :]) np.testing.assert_allclose(prcp[0, :], prcp[1, :]) np.testing.assert_allclose(prcp[3, :], ref_p) np.testing.assert_allclose(prcp[2, :], ref_p*0) np.testing.assert_allclose(temp[3, :], ref_p*0) yr = [1802, 1802] time, temp, prcp = climate.mb_climate_on_height(gdir, hgts, year_range=yr) ref_nt = 1*12 self.assertTrue(len(time) == ref_nt) self.assertTrue(temp.shape == (4, ref_nt)) self.assertTrue(prcp.shape == (4, ref_nt)) np.testing.assert_allclose(temp[0, :], ref_t[0:12]) np.testing.assert_allclose(temp[0, :], temp[1, :]) np.testing.assert_allclose(prcp[0, :], prcp[1, :]) np.testing.assert_allclose(prcp[3, :], ref_p[0:12]) np.testing.assert_allclose(prcp[2, :], ref_p[0:12]*0) np.testing.assert_allclose(temp[3, :], ref_p[0:12]*0) yr = [1803, 1804] time, temp, prcp = climate.mb_climate_on_height(gdir, hgts, year_range=yr) ref_nt = 2*12 self.assertTrue(len(time) == ref_nt) self.assertTrue(temp.shape == (4, ref_nt)) self.assertTrue(prcp.shape == (4, ref_nt)) np.testing.assert_allclose(temp[0, :], ref_t[12:36]) np.testing.assert_allclose(temp[0, :], temp[1, :]) np.testing.assert_allclose(prcp[0, :], prcp[1, :]) np.testing.assert_allclose(prcp[3, :], ref_p[12:36]) np.testing.assert_allclose(prcp[2, :], ref_p[12:36]*0) np.testing.assert_allclose(temp[3, :], ref_p[12:36]*0)
def cfg_init(self): # Init cfg.initialize() cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp')) cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc') cfg.PARAMS['baseline_climate'] = 'CUSTOM'
def test_all_at_once(self, class_case_dir): # Init cfg.initialize() cfg.PARAMS['use_intersects'] = False cfg.PATHS['working_dir'] = class_case_dir cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') hef_file = get_demo_file('Hintereisferner_RGI5.shp') gdir = workflow.init_glacier_directories(gpd.read_file(hef_file))[0] exps = ['CRU', 'HISTALP', 'ERA5', 'ERA5L', 'CERA'] files = [] ref_hgts = [] for base in exps: cfg.PARAMS['baseline_climate'] = base tasks.process_climate_data(gdir, output_filesuffix=base) files.append( gdir.get_filepath('climate_historical', filesuffix=base)) with xr.open_dataset(files[-1]) as ds: ref_hgts.append(ds.ref_hgt) assert ds.ref_pix_dis < 30000 # TEMP with xr.open_mfdataset(files, concat_dim=exps) as ds: dft = ds.temp.to_dataframe().unstack().T dft.index = dft.index.levels[1] # Common period dfy = dft.resample('AS').mean().dropna().iloc[1:] dfm = dft.groupby(dft.index.month).mean() assert dfy.corr().min().min() > 0.44 # ERA5L and CERA do no correlate assert dfm.corr().min().min() > 0.97 dfavg = dfy.describe() # Correct for hgt ref_h = ref_hgts[0] for h, d in zip(ref_hgts, exps): dfy[d] = dfy[d] - 0.0065 * (ref_h - h) dfm[d] = dfm[d] - 0.0065 * (ref_h - h) dfavg_cor = dfy.describe() # After correction less spread assert dfavg_cor.loc['mean'].std() < 0.8 * dfavg.loc['mean'].std() assert dfavg_cor.loc['mean'].std() < 2.1 # PRECIP with xr.open_mfdataset(files, concat_dim=exps) as ds: dft = ds.prcp.to_dataframe().unstack().T dft.index = dft.index.levels[1] # Common period dfy = dft.resample('AS').mean().dropna().iloc[1:] * 12 dfm = dft.groupby(dft.index.month).mean() assert dfy.corr().min().min() > 0.5 assert dfm.corr().min().min() > 0.8 dfavg = dfy.describe() assert dfavg.loc['mean'].std() / dfavg.loc['mean'].mean() < 0.25 # %
def test_coxe(): testdir = os.path.join(get_test_dir(), 'tmp_coxe') utils.mkdir(testdir, reset=True) # Init cfg.initialize() cfg.PARAMS['use_intersects'] = False cfg.PATHS['dem_file'] = get_demo_file('dem_RGI50-01.10299.tif') cfg.PARAMS['border'] = 40 cfg.PARAMS['clip_tidewater_border'] = False cfg.PARAMS['use_multiple_flowlines'] = False cfg.PARAMS['use_kcalving_for_inversion'] = True cfg.PARAMS['use_kcalving_for_run'] = True cfg.PARAMS['trapezoid_lambdas'] = 1 hef_file = get_demo_file('rgi_RGI50-01.10299.shp') entity = gpd.read_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=True) gis.define_glacier_region(gdir) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) centerlines.initialize_flowlines(gdir) centerlines.compute_downstream_line(gdir) centerlines.compute_downstream_bedshape(gdir) centerlines.catchment_area(gdir) centerlines.catchment_intersections(gdir) centerlines.catchment_width_geom(gdir) centerlines.catchment_width_correction(gdir) climate.apparent_mb_from_linear_mb(gdir) inversion.prepare_for_inversion(gdir) inversion.mass_conservation_inversion(gdir) inversion.filter_inversion_output(gdir) flowline.init_present_time_glacier(gdir) fls = gdir.read_pickle('model_flowlines') p = gdir.read_pickle('linear_mb_params') mb_mod = massbalance.LinearMassBalance(ela_h=p['ela_h'], grad=p['grad']) mb_mod.temp_bias = -0.3 model = flowline.FluxBasedModel(fls, mb_model=mb_mod, y0=0, inplace=True, is_tidewater=True) # run model.run_until(200) assert model.calving_m3_since_y0 > 0 fig, ax = plt.subplots() graphics.plot_modeloutput_map(gdir, ax=ax, model=model) fig.tight_layout() shutil.rmtree(testdir) return fig
def test_local_t_star(self): # set parameters for climate file and mass balance calibration cfg.PARAMS['baseline_climate'] = 'CUSTOM' cfg.PARAMS['baseline_y0'] = 1850 cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc') cfg.PARAMS['run_mb_calibration'] = False # read the Hintereisferner hef_file = get_demo_file('Hintereisferner_RGI5.shp') entity = gpd.read_file(hef_file).iloc[0] # initialize the GlacierDirectory gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) # define the local grid and the glacier mask gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) # run centerline prepro tasks centerlines.compute_centerlines(gdir) centerlines.initialize_flowlines(gdir) centerlines.catchment_area(gdir) centerlines.catchment_intersections(gdir) centerlines.catchment_width_geom(gdir) centerlines.catchment_width_correction(gdir) # process the given climate file climate.process_custom_climate_data(gdir) # compute the reference t* for the glacier # given the reference of mass balance measurements res = vascaling.t_star_from_refmb(gdir) t_star, bias = res['t_star'], res['bias'] # compute local t* and the corresponding mu* vascaling.local_t_star(gdir, tstar=t_star, bias=bias) # read calibration results vas_mustar_refmb = gdir.read_json('vascaling_mustar') # get reference t* list ref_df = cfg.PARAMS['vas_ref_tstars_rgi5_histalp'] # compute local t* and the corresponding mu* vascaling.local_t_star(gdir, ref_df=ref_df) # read calibration results vas_mustar_refdf = gdir.read_json('vascaling_mustar') # compute local t* and the corresponding mu* vascaling.local_t_star(gdir) # read calibration results vas_mustar = gdir.read_json('vascaling_mustar') # compare with each other assert vas_mustar_refdf == vas_mustar # TODO: this test is failing currently # np.testing.assert_allclose(vas_mustar_refmb['bias'], # vas_mustar_refdf['bias'], atol=1) vas_mustar_refdf.pop('bias') vas_mustar_refmb.pop('bias') # end of workaround assert vas_mustar_refdf == vas_mustar_refmb
def test_distribute(self): hef_file = get_demo_file("Hintereisferner.shp") entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) geometry.initialize_flowlines(gdir) geometry.catchment_area(gdir) geometry.catchment_width_geom(gdir) geometry.catchment_width_correction(gdir) climate.process_histalp_nonparallel([gdir]) climate.mu_candidates(gdir, div_id=0) hef_file = get_demo_file("mbdata_RGI40-11.00897.csv") mbdf = pd.read_csv(hef_file).set_index("YEAR") t_star, bias, prcp_fac = climate.t_star_from_refmb(gdir, mbdf["ANNUAL_BALANCE"]) t_star = t_star[-1] bias = bias[-1] climate.local_mustar_apparent_mb(gdir, tstar=t_star, bias=bias, prcp_fac=prcp_fac) # OK. Values from Fischer and Kuhn 2013 # Area: 8.55 # meanH = 67+-7 # Volume = 0.573+-0.063 # maxH = 242+-13 inversion.prepare_for_inversion(gdir) ref_v = 0.573 * 1e9 def to_optimize(x): glen_a = cfg.A * x[0] fs = cfg.FS * x[1] v, _ = inversion.invert_parabolic_bed(gdir, fs=fs, glen_a=glen_a) return (v - ref_v) ** 2 import scipy.optimize as optimization out = optimization.minimize(to_optimize, [1, 1], bounds=((0.01, 10), (0.01, 10)), tol=1e-1)["x"] glen_a = cfg.A * out[0] fs = cfg.FS * out[1] v, _ = inversion.invert_parabolic_bed(gdir, fs=fs, glen_a=glen_a, write=True) np.testing.assert_allclose(ref_v, v) inversion.distribute_thickness(gdir, how="per_altitude", add_nc_name=True) inversion.distribute_thickness(gdir, how="per_interpolation", add_slope=False, add_nc_name=True) grids_file = gdir.get_filepath("gridded_data") with netCDF4.Dataset(grids_file) as nc: t1 = nc.variables["thickness_per_altitude"][:] t2 = nc.variables["thickness_per_interpolation"][:] np.testing.assert_allclose(np.sum(t1), np.sum(t2)) if not HAS_NEW_GDAL: np.testing.assert_allclose(np.max(t1), np.max(t2), atol=30)
def setUp(self): # test directory self.testdir = os.path.join(current_dir, 'tmp') if not os.path.exists(self.testdir): os.makedirs(self.testdir) self.clean_dir() # Init cfg.initialize() cfg.set_divides_db(get_demo_file('HEF_divided.shp')) cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
def up_to_climate(reset=False): """Run the tasks you want.""" # test directory if not os.path.exists(TEST_DIR): os.makedirs(TEST_DIR) if reset: clean_dir(TEST_DIR) if not os.path.exists(CLI_LOGF): with open(CLI_LOGF, 'wb') as f: pickle.dump('none', f) # Init cfg.initialize() # Use multiprocessing # We don't use mp on TRAVIS because unsure if compatible with test coverage cfg.PARAMS['use_multiprocessing'] = not ON_TRAVIS # Working dir cfg.PATHS['working_dir'] = TEST_DIR cfg.PATHS['dem_file'] = get_demo_file('srtm_oetztal.tif') # Read in the RGI file rgi_file = get_demo_file('rgi_oetztal.shp') rgidf = gpd.GeoDataFrame.from_file(rgi_file) # Be sure data is downloaded because lock doesn't work cl = utils.get_cru_cl_file() # Params cfg.PARAMS['border'] = 70 cfg.PARAMS['use_optimized_inversion_params'] = True cfg.PARAMS['tstar_search_window'] = [1902, 0] cfg.PARAMS['invert_with_rectangular'] = False # Go gdirs = workflow.init_glacier_regions(rgidf) assert gdirs[14].name == 'Hintereisferner' try: tasks.catchment_width_correction(gdirs[0]) except Exception: reset = True if reset: # First preprocessing tasks workflow.gis_prepro_tasks(gdirs) return gdirs
def setUp(self): # test directory self.testdir = os.path.join(current_dir, 'tmp') if not os.path.exists(self.testdir): os.makedirs(self.testdir) self.clean_dir() # Init cfg.initialize() cfg.set_divides_db(get_demo_file('HEF_divided.shp')) cfg.paths['srtm_file'] = get_demo_file('hef_srtm.tif')
def test_coxe(): testdir = os.path.join(cfg.PATHS['test_dir'], 'tmp_coxe') utils.mkdir(testdir) # Init cfg.initialize() cfg.PATHS['dem_file'] = get_demo_file('dem_RGI50-01.10299.tif') cfg.PARAMS['border'] = 40 cfg.PARAMS['use_multiple_flowlines'] = False hef_file = get_demo_file('rgi_RGI50-01.10299.shp') entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=True) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) centerlines.compute_downstream_lines(gdir) geometry.initialize_flowlines(gdir) # Just check if the rest runs centerlines.compute_downstream_bedshape(gdir) geometry.catchment_area(gdir) geometry.catchment_intersections(gdir) geometry.catchment_width_geom(gdir) geometry.catchment_width_correction(gdir) climate.apparent_mb_from_linear_mb(gdir) inversion.prepare_for_inversion(gdir) inversion.volume_inversion(gdir, use_cfg_params={'glen_a': cfg.A, 'fs': 0}) inversion.filter_inversion_output(gdir) flowline.init_present_time_glacier(gdir) fls = gdir.read_pickle('model_flowlines') p = gdir.read_pickle('linear_mb_params') mb_mod = massbalance.LinearMassBalanceModel(ela_h=p['ela_h'], grad=p['grad']) mb_mod.temp_bias = -0.3 model = flowline.FluxBasedModel(fls, mb_model=mb_mod, y0=0, is_tidewater=True) # run model.run_until(200) assert model.calving_m3_since_y0 > 0 fig, ax = plt.subplots() graphics.plot_modeloutput_map(gdir, ax=ax, model=model) fig.tight_layout() return fig
def up_to_climate(reset=False): """Run the tasks you want.""" # test directory if not os.path.exists(TEST_DIR): os.makedirs(TEST_DIR) if reset: clean_dir(TEST_DIR) if not os.path.exists(CLI_LOGF): with open(CLI_LOGF, 'wb') as f: pickle.dump('none', f) # Init cfg.initialize() # Use multiprocessing cfg.PARAMS['use_multiprocessing'] = use_multiprocessing() # Working dir cfg.PATHS['working_dir'] = TEST_DIR cfg.PATHS['dem_file'] = get_demo_file('srtm_oetztal.tif') # Read in the RGI file rgi_file = get_demo_file('rgi_oetztal.shp') rgidf = gpd.read_file(rgi_file) # Be sure data is downloaded cl = utils.get_cru_cl_file() # Params cfg.PARAMS['border'] = 70 cfg.PARAMS['optimize_inversion_params'] = True cfg.PARAMS['use_optimized_inversion_params'] = True cfg.PARAMS['tstar_search_window'] = [1902, 0] cfg.PARAMS['invert_with_rectangular'] = False cfg.PARAMS['run_mb_calibration'] = True # Go gdirs = workflow.init_glacier_regions(rgidf) try: tasks.catchment_width_correction(gdirs[0]) except Exception: reset = True if reset: # First preprocessing tasks workflow.gis_prepro_tasks(gdirs) return gdirs
def setUp(self): # test directory self.testdir = os.path.join(current_dir, 'tmp') if not os.path.exists(self.testdir): os.makedirs(self.testdir) self.clean_dir() # Init cfg.initialize() cfg.set_divides_db(get_demo_file('divides_workflow.shp')) cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') cfg.PARAMS['border'] = 10
def setUp(self): # test directory self.testdir = os.path.join(current_dir, "tmp") if not os.path.exists(self.testdir): os.makedirs(self.testdir) self.clean_dir() # Init cfg.initialize() cfg.PATHS["dem_file"] = get_demo_file("hef_srtm.tif") cfg.PATHS["climate_file"] = get_demo_file("histalp_merged_hef.nc") cfg.PARAMS["border"] = 10
def setUp(self): # test directory self.testdir = os.path.join(get_test_dir(), 'tmp') if not os.path.exists(self.testdir): os.makedirs(self.testdir) self.clean_dir() # Init cfg.initialize() cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp')) cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') cfg.PATHS['working_dir'] = self.testdir
def setUp(self): # test directory self.testdir = os.path.join(current_dir, 'tmp') if not os.path.exists(self.testdir): os.makedirs(self.testdir) self.clean_dir() # Init cfg.initialize() cfg.set_divides_db(get_demo_file('divides_workflow.shp')) cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc') cfg.PARAMS['border'] = 10
def up_to_climate(reset=False): """Run the tasks you want.""" # test directory if not os.path.exists(TEST_DIR): os.makedirs(TEST_DIR) if reset: clean_dir(TEST_DIR) if not os.path.exists(CLI_LOGF): with open(CLI_LOGF, 'wb') as f: pickle.dump('none', f) # Init cfg.initialize() # Use multiprocessing cfg.PARAMS['use_multiprocessing'] = use_multiprocessing() # Working dir cfg.PATHS['working_dir'] = TEST_DIR cfg.PATHS['dem_file'] = get_demo_file('srtm_oetztal.tif') cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp')) # Read in the RGI file rgi_file = get_demo_file('rgi_oetztal.shp') rgidf = gpd.read_file(rgi_file) # Be sure data is downloaded utils.get_cru_cl_file() # Params cfg.PARAMS['border'] = 70 cfg.PARAMS['tstar_search_window'] = [1902, 0] cfg.PARAMS['run_mb_calibration'] = True # Go gdirs = workflow.init_glacier_regions(rgidf) try: tasks.catchment_width_correction(gdirs[0]) except Exception: reset = True if reset: # First preprocessing tasks workflow.gis_prepro_tasks(gdirs) return gdirs
def setUp(self): # test directory self.testdir = os.path.join(get_test_dir(), 'tmp') if not os.path.exists(self.testdir): os.makedirs(self.testdir) self.clean_dir() self.rgi_file = get_demo_file('rgi_RGI50-01.10299.shp') # Init cfg.initialize() cfg.PARAMS['use_intersects'] = False cfg.PATHS['dem_file'] = get_demo_file('dem_RGI50-01.10299.tif') cfg.PARAMS['border'] = 40
def test_chhota_shigri(): testdir = os.path.join(cfg.PATHS['test_dir'], 'tmp_chhota') utils.mkdir(testdir) # Init cfg.initialize() cfg.PATHS['dem_file'] = get_demo_file('dem_chhota_shigri.tif') cfg.PARAMS['border'] = 60 cfg.set_divides_db(get_demo_file('divides_RGI50-14.15990.shp')) hef_file = get_demo_file('RGI50-14.15990.shp') entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=testdir) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) centerlines.compute_downstream_lines(gdir) geometry.initialize_flowlines(gdir) # We should have two groups lines = gdir.read_pickle('downstream_lines', div_id=0) assert len(np.unique(lines.group)) == 2 # Just check if the rest runs centerlines.compute_downstream_bedshape(gdir) geometry.catchment_area(gdir) geometry.catchment_intersections(gdir) geometry.catchment_width_geom(gdir) geometry.catchment_width_correction(gdir) climate.apparent_mb_from_linear_mb(gdir) inversion.prepare_for_inversion(gdir) inversion.volume_inversion(gdir, use_cfg_params={'glen_a': cfg.A, 'fs': 0}) inversion.filter_inversion_output(gdir) flowline.init_present_time_glacier(gdir) fls = gdir.read_pickle('model_flowlines') for fl in fls: fl.thick = np.clip(fl.thick, 100, 1000) model = flowline.FlowlineModel(fls) fig, ax = plt.subplots() graphics.plot_modeloutput_map(gdir, ax=ax, model=model) fig.tight_layout() return fig
def test_catchment_area(self): hef_file = get_demo_file('Hintereisferner.shp') rgidf = gpd.GeoDataFrame.from_file(hef_file) # loop because for some reason indexing wont work for index, entity in rgidf.iterrows(): gdir = cfg.GlacierDir(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) geometry.catchment_area(gdir) for div_id in gdir.divide_ids: cis = gdir.read_pickle('catchment_indices', div_id=div_id) # The catchment area must be as big as expected nc = netCDF4.Dataset(gdir.get_filepath('grids', div_id=div_id)) mask = nc.variables['glacier_mask'][:] nc.close() mymask_a = mask * 0 mymask_b = mask * 0 for i, ci in enumerate(cis): mymask_a[tuple(ci.T)] += 1 mymask_b[tuple(ci.T)] = i+1 self.assertTrue(np.max(mymask_a) == 1) np.testing.assert_allclose(mask, mymask_a)
def test_flowlines(self): hef_file = get_demo_file("Hintereisferner.shp") entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) geometry.initialize_flowlines(gdir) for div_id in gdir.divide_ids: cls = gdir.read_pickle("inversion_flowlines", div_id=div_id) for cl in cls: for j, ip, ob in zip(cl.inflow_indices, cl.inflow_points, cl.inflows): self.assertTrue(cl.line.coords[j] == ip.coords[0]) self.assertTrue(ob.flows_to_point.coords[0] == ip.coords[0]) self.assertTrue(cl.line.coords[ob.flows_to_indice] == ip.coords[0]) lens = [len(gdir.read_pickle("centerlines", div_id=i)) for i in [1, 2, 3]] self.assertTrue(sorted(lens) == [1, 1, 2]) x, y = map(np.array, cls[0].line.xy) dis = np.sqrt((x[1:] - x[:-1]) ** 2 + (y[1:] - y[:-1]) ** 2) np.testing.assert_allclose(dis * 0 + cfg.PARAMS["flowline_dx"], dis, rtol=0.01)
def test_catchment_area(self): hef_file = get_demo_file("Hintereisferner.shp") entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) geometry.catchment_area(gdir) for div_id in gdir.divide_ids: cis = gdir.read_pickle("catchment_indices", div_id=div_id) # The catchment area must be as big as expected with netCDF4.Dataset(gdir.get_filepath("gridded_data", div_id=div_id)) as nc: mask = nc.variables["glacier_mask"][:] mymask_a = mask * 0 mymask_b = mask * 0 for i, ci in enumerate(cis): mymask_a[tuple(ci.T)] += 1 mymask_b[tuple(ci.T)] = i + 1 self.assertTrue(np.max(mymask_a) == 1) np.testing.assert_allclose(mask, mymask_a)
def test_run_constant_climate(self): """ Test the run_constant_climate task for a climate based on the equilibrium period centred around t*. Additionally a positive and a negative temperature bias are tested. """ # let's not use the mass balance bias since we want to reproduce # results from mass balance calibration cfg.PARAMS['use_bias_for_run'] = False # read the Hintereisferner DEM hef_file = get_demo_file('Hintereisferner_RGI5.shp') entity = gpd.read_file(hef_file).iloc[0] # initialize the GlacierDirectory gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) # define the local grid and glacier mask gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) # process the given climate file climate.process_custom_climate_data(gdir) # compute mass balance parameters ref_df = cfg.PARAMS['vas_ref_tstars_rgi5_histalp'] vascaling.local_t_star(gdir, ref_df=ref_df) # define some parameters for the constant climate model nyears = 500 temp_bias = 0.5 _ = vascaling.run_constant_climate(gdir, nyears=nyears, output_filesuffix='') _ = vascaling.run_constant_climate(gdir, nyears=nyears, temperature_bias=+temp_bias, output_filesuffix='_bias_p') _ = vascaling.run_constant_climate(gdir, nyears=nyears, temperature_bias=-temp_bias, output_filesuffix='_bias_n') # compile run outputs ds = utils.compile_run_output([gdir], input_filesuffix='') ds_p = utils.compile_run_output([gdir], input_filesuffix='_bias_p') ds_n = utils.compile_run_output([gdir], input_filesuffix='_bias_n') # the glacier should not change under a constant climate # based on the equilibirum period centered around t* assert abs(1 - ds.volume.mean() / ds.volume[0]) < 1e-7 # higher temperatures should result in a smaller glacier assert ds.volume.mean() > ds_p.volume.mean() # lower temperatures should result in a larger glacier assert ds.volume.mean() < ds_n.volume.mean() # compute volume change from one year to the next dV_p = (ds_p.volume[1:].values - ds_p.volume[:-1].values).flatten() dV_n = (ds_n.volume[1:].values - ds_n.volume[:-1].values).flatten() # compute relative volume change, with respect to the final volume rate_p = abs(dV_p / float(ds_p.volume.values[-1])) rate_n = abs(dV_n / float(ds_n.volume.values[-1])) # the glacier should be in a new equilibirum for last 300 years assert max(rate_p[-300:]) < 0.001 assert max(rate_n[-300:]) < 0.001
def test_flowlines(self): hef_file = get_demo_file('Hintereisferner.shp') rgidf = gpd.GeoDataFrame.from_file(hef_file) # loop because for some reason indexing wont work for index, entity in rgidf.iterrows(): gdir = cfg.GlacierDir(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) geometry.initialize_flowlines(gdir) for div_id in gdir.divide_ids: cls = gdir.read_pickle('inversion_flowlines', div_id=div_id) for cl in cls: for j, ip, ob in zip(cl.inflow_indices, cl.inflow_points, cl.inflows): self.assertTrue(cl.line.coords[j] == ip.coords[0]) self.assertTrue(ob.flows_to_point.coords[0] == ip.coords[0]) self.assertTrue(cl.line.coords[ob.flows_to_indice] == ip.coords[0]) lens = [len(gdir.read_pickle('centerlines', div_id=i)) for i in [1,2,3]] self.assertTrue(sorted(lens) == [1, 1, 3]) x, y = map(np.array, cls[0].line.xy) dis = np.sqrt((x[1:] - x[:-1])**2 + (y[1:] - y[:-1])**2) np.testing.assert_allclose(dis*0 + cfg.params['flowline_dx'], dis, rtol=0.01)
def up_to_inversion(reset=False): """Run the tasks you want.""" gdirs = up_to_climate(reset=reset) with open(CLI_LOGF, 'rb') as f: clilog = pickle.load(f) if clilog != 'histalp': reset = True else: try: tasks.prepare_for_inversion(gdirs[0]) except Exception: reset = True if reset: # Use histalp for the actual inversion test cfg.PARAMS['temp_use_local_gradient'] = True cfg.PARAMS['baseline_climate'] = 'HISTALP' cru_dir = get_demo_file('HISTALP_precipitation_all_abs_1801-2014.nc') cfg.PATHS['cru_dir'] = os.path.dirname(cru_dir) workflow.climate_tasks(gdirs) with open(CLI_LOGF, 'wb') as f: pickle.dump('histalp', f) # Inversion workflow.inversion_tasks(gdirs) return gdirs
def test_plot_region_model(): gdirs = random_for_plot() dfc = utils.compile_task_log(gdirs, task_names=['run_random_climate_plot']) assert np.all(dfc['run_random_climate_plot'] == 'SUCCESS') # We prepare for the plot, which needs our own map to proceed. # Lets do a local mercator grid g = salem.mercator_grid(center_ll=(10.86, 46.85), extent=(27000, 21000)) # And a map accordingly sm = salem.Map(g, countries=False) sm.set_topography(get_demo_file('srtm_oetztal.tif')) # Give this to the plot function fig, ax = plt.subplots() graphics.plot_modeloutput_map(gdirs, smap=sm, ax=ax, filesuffix='_plot', vmax=250, modelyr=10, linewidth=1.5) fig.tight_layout() return fig
def test_download(self): f = utils.get_demo_file('Hintereisferner.shp') self.assertTrue(os.path.exists(f)) sh = salem.utils.read_shapefile(f) self.assertTrue(hasattr(sh, 'geometry'))
def test_flowlines(self): hef_file = get_demo_file('Hintereisferner.shp') rgidf = gpd.GeoDataFrame.from_file(hef_file) # loop because for some reason indexing wont work for index, entity in rgidf.iterrows(): gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) geometry.initialize_flowlines(gdir) for div_id in gdir.divide_ids: cls = gdir.read_pickle('inversion_flowlines', div_id=div_id) for cl in cls: for j, ip, ob in zip(cl.inflow_indices, cl.inflow_points, cl.inflows): self.assertTrue(cl.line.coords[j] == ip.coords[0]) self.assertTrue( ob.flows_to_point.coords[0] == ip.coords[0]) self.assertTrue( cl.line.coords[ob.flows_to_indice] == ip.coords[0]) lens = [ len(gdir.read_pickle('centerlines', div_id=i)) for i in [1, 2, 3] ] self.assertTrue(sorted(lens) == [1, 1, 3]) x, y = map(np.array, cls[0].line.xy) dis = np.sqrt((x[1:] - x[:-1])**2 + (y[1:] - y[:-1])**2) np.testing.assert_allclose(dis * 0 + cfg.PARAMS['flowline_dx'], dis, rtol=0.01)
def up_to_distrib(reset=False): # for cross val basically gdirs = up_to_climate(reset=reset) with open(CLI_LOGF, 'rb') as f: clilog = pickle.load(f) if clilog != 'cru': reset = True else: try: tasks.compute_ref_t_stars(gdirs) except Exception: reset = True if reset: # Use CRU cfg.PARAMS['prcp_scaling_factor'] = 2.5 cfg.PARAMS['temp_use_local_gradient'] = False cfg.PARAMS['baseline_climate'] = 'CRU' cru_dir = get_demo_file('cru_ts3.23.1901.2014.tmp.dat.nc') cfg.PATHS['cru_dir'] = os.path.dirname(cru_dir) with warnings.catch_warnings(): # There is a warning from salem warnings.simplefilter("ignore") workflow.execute_entity_task(tasks.process_cru_data, gdirs) tasks.compute_ref_t_stars(gdirs) workflow.execute_entity_task(tasks.local_t_star, gdirs) workflow.execute_entity_task(tasks.mu_star_calibration, gdirs) with open(CLI_LOGF, 'wb') as f: pickle.dump('cru', f) return gdirs
def test_catchment_area(self): hef_file = get_demo_file('Hintereisferner.shp') rgidf = gpd.GeoDataFrame.from_file(hef_file) # loop because for some reason indexing wont work for index, entity in rgidf.iterrows(): gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) geometry.catchment_area(gdir) for div_id in gdir.divide_ids: cis = gdir.read_pickle('catchment_indices', div_id=div_id) # The catchment area must be as big as expected nc = netCDF4.Dataset( gdir.get_filepath('gridded_data', div_id=div_id)) mask = nc.variables['glacier_mask'][:] nc.close() mymask_a = mask * 0 mymask_b = mask * 0 for i, ci in enumerate(cis): mymask_a[tuple(ci.T)] += 1 mymask_b[tuple(ci.T)] = i + 1 self.assertTrue(np.max(mymask_a) == 1) np.testing.assert_allclose(mask, mymask_a)
def test_centerlines(self): hef_file = get_demo_file('Hintereisferner.shp') rgidf = gpd.GeoDataFrame.from_file(hef_file) # loop because for some reason indexing wont work for index, entity in rgidf.iterrows(): gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) for div_id in gdir.divide_ids: cls = gdir.read_pickle('centerlines', div_id=div_id) for cl in cls: for j, ip, ob in zip(cl.inflow_indices, cl.inflow_points, cl.inflows): self.assertTrue(cl.line.coords[j] == ip.coords[0]) self.assertTrue( ob.flows_to_point.coords[0] == ip.coords[0]) self.assertTrue( cl.line.coords[ob.flows_to_indice] == ip.coords[0]) lens = [ len(gdir.read_pickle('centerlines', div_id=i)) for i in [1, 2, 3] ] self.assertTrue(sorted(lens) == [1, 1, 3])
def setup_cache(self): utils.mkdir(self.testdir, reset=True) self.cfg_init() hef_file = get_demo_file('Hintereisferner_RGI5.shp') entity = gpd.read_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) tasks.define_glacier_region(gdir, entity=entity) tasks.glacier_masks(gdir) tasks.compute_centerlines(gdir) tasks.initialize_flowlines(gdir) tasks.compute_downstream_line(gdir) tasks.compute_downstream_bedshape(gdir) tasks.catchment_area(gdir) tasks.catchment_intersections(gdir) tasks.catchment_width_geom(gdir) tasks.catchment_width_correction(gdir) tasks.process_custom_climate_data(gdir) tasks.mu_candidates(gdir) mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE'] res = climate.t_star_from_refmb(gdir, mbdf) tasks.local_mustar(gdir, tstar=res['t_star'], bias=res['bias']) tasks.apparent_mb(gdir) tasks.prepare_for_inversion(gdir) tasks.mass_conservation_inversion(gdir) return gdir
def clean_dir(self): self.rm_dir() tfile = get_demo_file("dem.tif") gpath = os.path.dirname(tfile) self.rgin = os.path.basename(gpath) gpath = os.path.dirname(gpath) assert self.rgin == "RGI50-11.01270" shutil.copytree(gpath, self.testdir)
def setUp(self): # test directory self.testdir = os.path.join(current_dir, 'tmp_prepro') if not os.path.exists(self.testdir): os.makedirs(self.testdir) self.testdir_cru = os.path.join(current_dir, 'tmp_prepro_cru') if not os.path.exists(self.testdir_cru): os.makedirs(self.testdir_cru) self.clean_dir() # Init cfg.initialize() cfg.set_divides_db(get_demo_file('HEF_divided.shp')) cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif') cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc') cfg.PARAMS['border'] = 10
def test_glacierdir(self): hef_file = get_demo_file("Hintereisferner.shp") entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) # this should simply run mygdir = oggm.GlacierDirectory(entity.RGIID, base_dir=self.testdir)
def test_define_region(self): hef_file = get_demo_file("Hintereisferner.shp") entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) tdf = gpd.GeoDataFrame.from_file(gdir.get_filepath("outlines")) myarea = tdf.geometry.area * 10 ** -6 np.testing.assert_allclose(myarea, np.float(tdf["AREA"]), rtol=1e-2)
def test_downstream(self): hef_file = get_demo_file("Hintereisferner.shp") entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0] gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir) gis.define_glacier_region(gdir, entity=entity) gis.glacier_masks(gdir) centerlines.compute_centerlines(gdir) centerlines.compute_downstream_lines(gdir)
def test_present_time_glacier_massbalance(self): gdir = init_hef(border=DOM_BORDER) flowline.init_present_time_glacier(gdir) mb_mod = massbalance.HistalpMassBalanceModel(gdir) fls = gdir.read_pickle('model_flowlines') glacier = flowline.FlowlineModel(fls) hef_file = utils.get_demo_file('mbdata_RGI40-11.00897.csv') mbdf = pd.read_csv(hef_file).set_index('YEAR') hgts = np.array([]) widths = np.array([]) for fl in glacier.fls: hgts = np.concatenate((hgts, fl.surface_h)) widths = np.concatenate((widths, fl.widths_m)) tot_mb = [] refmb = [] grads = hgts * 0 for yr, mb in mbdf.iterrows(): refmb.append(mb['ANNUAL_BALANCE']) mbh = mb_mod.get_mb(hgts, yr) * SEC_IN_YEAR * cfg.RHO grads += mbh tot_mb.append(np.average(mbh, weights=widths)) grads /= len(tot_mb) # Bias self.assertTrue(np.abs(utils.md(tot_mb, refmb)) < 50) # Gradient dfg = pd.read_csv(utils.get_demo_file('mbgrads_RGI40-11.00897.csv'), index_col='ALTITUDE').mean(axis=1) # Take the altitudes below 3100 and fit a line dfg = dfg[dfg.index < 3100] pok = np.where(hgts < 3100) from scipy.stats import linregress slope_obs, _, _, _, _ = linregress(dfg.index, dfg.values) slope_our, _, _, _, _ = linregress(hgts[pok], grads[pok]) np.testing.assert_allclose(slope_obs, slope_our, rtol=0.1)
def setUp(self): # test directory self.testdir = os.path.join(current_dir, "tmp") if not os.path.exists(self.testdir): os.makedirs(self.testdir) self.clean_dir() # Init cfg.initialize() cfg.PATHS["dem_file"] = get_demo_file("hef_srtm.tif")