def load_glacier_data(glac_no=None, rgi_regionsO1=None, rgi_regionsO2='all', rgi_glac_number='all', load_caldata=0, startyear=2000, endyear=2018, option_wateryear=3): """ Load glacier data (main_glac_rgi, hyps, and ice thickness) """ # Load glaciers main_glac_rgi_all = modelsetup.selectglaciersrgitable( rgi_regionsO1=rgi_regionsO1, rgi_regionsO2 =rgi_regionsO2, rgi_glac_number=rgi_glac_number, glac_no=glac_no) # Glacier hypsometry [km**2], total area main_glac_hyps_all = modelsetup.import_Husstable(main_glac_rgi_all, pygem_prms.hyps_filepath, pygem_prms.hyps_filedict, pygem_prms.hyps_colsdrop) # Ice thickness [m], average main_glac_icethickness_all = modelsetup.import_Husstable(main_glac_rgi_all, pygem_prms.thickness_filepath, pygem_prms.thickness_filedict, pygem_prms.thickness_colsdrop) # Additional processing main_glac_hyps_all[main_glac_icethickness_all == 0] = 0 main_glac_hyps_all = main_glac_hyps_all.fillna(0) main_glac_icethickness_all = main_glac_icethickness_all.fillna(0) # Add degree groups to main_glac_rgi_all # Degrees main_glac_rgi_all['CenLon_round'] = np.floor(main_glac_rgi_all.CenLon.values/degree_size) * degree_size main_glac_rgi_all['CenLat_round'] = np.floor(main_glac_rgi_all.CenLat.values/degree_size) * degree_size deg_groups = main_glac_rgi_all.groupby(['CenLon_round', 'CenLat_round']).size().index.values.tolist() deg_dict = dict(zip(deg_groups, np.arange(0,len(deg_groups)))) main_glac_rgi_all.reset_index(drop=True, inplace=True) cenlon_cenlat = [(main_glac_rgi_all.loc[x,'CenLon_round'], main_glac_rgi_all.loc[x,'CenLat_round']) for x in range(len(main_glac_rgi_all))] main_glac_rgi_all['CenLon_CenLat'] = cenlon_cenlat main_glac_rgi_all['deg_id'] = main_glac_rgi_all.CenLon_CenLat.map(deg_dict) if load_caldata == 1: cal_datasets = ['shean'] startyear=2000 dates_table = modelsetup.datesmodelrun(startyear=startyear, endyear=endyear, spinupyears=0, option_wateryear=option_wateryear) # Calibration data cal_data_all = pd.DataFrame() for dataset in cal_datasets: cal_subset = class_mbdata.MBData(name=dataset) cal_subset_data = cal_subset.retrieve_mb(main_glac_rgi_all, main_glac_hyps_all, dates_table) cal_data_all = cal_data_all.append(cal_subset_data, ignore_index=True) cal_data_all = cal_data_all.sort_values(['glacno', 't1_idx']) cal_data_all.reset_index(drop=True, inplace=True) if load_caldata == 0: return main_glac_rgi_all, main_glac_hyps_all, main_glac_icethickness_all else: return main_glac_rgi_all, main_glac_hyps_all, main_glac_icethickness_all, cal_data_all
# Glacier hypsometry [km**2], total area main_glac_hyps = modelsetup.import_Husstable(main_glac_rgi, input.hyps_filepath, input.hyps_filedict, input.hyps_colsdrop) # Determine dates_table_idx that coincides with data rgi_regionsO1 dates_table = modelsetup.datesmodelrun(startyear, endyear, spinupyears=0) elev_bins = main_glac_hyps.columns.values.astype(int) elev_bin_interval = elev_bins[1] - elev_bins[0] #cal_datasets = ['larsen'] cal_data = pd.DataFrame() #for dataset in cal_datasets: cal_subset = class_mbdata.MBData(name='braun') #, rgi_region=rgi_regionsO1[0] cal_subset_data = cal_subset.retrieve_mb(main_glac_rgi, main_glac_hyps, dates_table) cal_data = cal_data.append(cal_subset_data, ignore_index=True) cal_data = cal_data.sort_values(['glacno', 't1_idx']) cal_data.reset_index(drop=True, inplace=True) ds2 = cal_data #ds2 = pd.read_csv(os.getcwd() + '/../DEMs/larsen/larsen2015_supplementdata_wRGIIds.csv') #ds2 = pd.read_csv(os.getcwd() + '/../DEMs/McNabb_data/wgms_dv/Alaska_dV_17jun_preprocessed.csv') larsen_glac = input.get_same_glaciers(os.getcwd() + '/../Output/cal_opt1/reg1') larsen_glac = ['RGI60-01.' + x for x in larsen_glac] #glac_idxs = ds2.RGIId #glac_idxs = glac_idxs.index.tolist(larsen_glac) mb_sim = ds.variables['massbaltotal_glac_monthly'].values[:, :, 0] mb_cal = ds2.mb_mwe
def load_masschange_monthly(regions, ds_ending, netcdf_fp=sim_netcdf_fp, option_add_caldata=0): """ Load monthly mass change data """ count = 0 for region in regions: count += 1 # Load datasets ds_fn = 'R' + str(region) + ds_ending ds = xr.open_dataset(netcdf_fp + ds_fn) main_glac_rgi_region_ds = pd.DataFrame(ds.glacier_table.values, columns=ds.glac_attrs) glac_wide_massbaltotal_region = ds.massbaltotal_glac_monthly.values[:, :, 0] glac_wide_area_annual_region = ds.area_glac_annual.values[:, :, 0] time_values = pd.Series( ds.massbaltotal_glac_monthly.coords['time'].values) # ===== GLACIER DATA ===== main_glac_rgi_region = modelsetup.selectglaciersrgitable( rgi_regionsO1=[region], rgi_regionsO2='all', rgi_glac_number='all') if (main_glac_rgi_region['glacno'] - main_glac_rgi_region_ds['glacno']).sum() == 0: print('Region', str(region), ': number of glaciers match') # Glacier hypsometry main_glac_hyps_region = modelsetup.import_Husstable( main_glac_rgi_region, pygem_prms.hyps_filepath, pygem_prms.hyps_filedict, pygem_prms.hyps_colsdrop) # Ice thickness [m], average main_glac_icethickness_region = modelsetup.import_Husstable( main_glac_rgi_region, input.thickness_filepath, input.thickness_filedict, input.thickness_colsdrop) main_glac_hyps_region[main_glac_icethickness_region == 0] = 0 # ===== CALIBRATION DATA ===== if option_add_caldata == 1: dates_table_nospinup = modelsetup.datesmodelrun( startyear=input.startyear, endyear=input.endyear, spinupyears=0) cal_data_region = pd.DataFrame() for dataset in cal_datasets: cal_subset = class_mbdata.MBData(name=dataset) cal_subset_data = cal_subset.retrieve_mb( main_glac_rgi_region, main_glac_hyps_region, dates_table_nospinup) cal_data_region = cal_data_region.append(cal_subset_data, ignore_index=True) cal_data_region = cal_data_region.sort_values(['glacno', 't1_idx']) cal_data_region.reset_index(drop=True, inplace=True) # ===== APPEND DATASETS ===== if count == 1: main_glac_rgi = main_glac_rgi_region main_glac_hyps = main_glac_hyps_region main_glac_icethickness = main_glac_icethickness_region glac_wide_massbaltotal = glac_wide_massbaltotal_region glac_wide_area_annual = glac_wide_area_annual_region if option_add_caldata == 1: cal_data = cal_data_region else: main_glac_rgi = main_glac_rgi.append(main_glac_rgi_region) glac_wide_massbaltotal = np.concatenate( [glac_wide_massbaltotal, glac_wide_massbaltotal_region]) glac_wide_area_annual = np.concatenate( [glac_wide_area_annual, glac_wide_area_annual_region]) if option_add_caldata == 1: cal_data = cal_data.append(cal_data_region) # If more columns in region, then need to expand existing dataset if main_glac_hyps_region.shape[1] > main_glac_hyps.shape[1]: all_col = list(main_glac_hyps.columns.values) reg_col = list(main_glac_hyps_region.columns.values) new_cols = [item for item in reg_col if item not in all_col] for new_col in new_cols: main_glac_hyps[new_col] = 0 main_glac_icethickness[new_col] = 0 elif main_glac_hyps_region.shape[1] < main_glac_hyps.shape[1]: all_col = list(main_glac_hyps.columns.values) reg_col = list(main_glac_hyps_region.columns.values) new_cols = [item for item in all_col if item not in reg_col] for new_col in new_cols: main_glac_hyps_region[new_col] = 0 main_glac_icethickness_region[new_col] = 0 main_glac_hyps = main_glac_hyps.append(main_glac_hyps_region) main_glac_icethickness = main_glac_icethickness.append( main_glac_icethickness_region) # reset index main_glac_rgi.reset_index(inplace=True, drop=True) main_glac_hyps.reset_index(inplace=True, drop=True) main_glac_icethickness.reset_index(inplace=True, drop=True) if option_add_caldata == 1: cal_data.reset_index(inplace=True, drop=True) # Volume [km**3] and mean elevation [m a.s.l.] main_glac_rgi['Volume'], main_glac_rgi[ 'Zmean'] = modelsetup.hypsometrystats(main_glac_hyps, main_glac_icethickness) # ===== MASS CHANGE CALCULATIONS ===== # Compute glacier volume change for every time step and use this to compute mass balance glac_wide_area = np.repeat(glac_wide_area_annual[:, :-1], 12, axis=1) # Mass change [km3 mwe] # mb [mwea] * (1 km / 1000 m) * area [km2] glac_wide_masschange = glac_wide_massbaltotal / 1000 * glac_wide_area if option_add_caldata == 1: return main_glac_rgi, glac_wide_masschange, glac_wide_area, time_values, cal_data else: return main_glac_rgi, glac_wide_masschange, glac_wide_area, time_values