def load_glacier_data(glac_no=None, rgi_regionsO1=None, rgi_regionsO2='all', rgi_glac_number='all', load_caldata=0, startyear=2000, endyear=2018, option_wateryear=3): """ Load glacier data (main_glac_rgi, hyps, and ice thickness) """ # Load glaciers main_glac_rgi_all = modelsetup.selectglaciersrgitable( rgi_regionsO1=rgi_regionsO1, rgi_regionsO2 =rgi_regionsO2, rgi_glac_number=rgi_glac_number, glac_no=glac_no) # Glacier hypsometry [km**2], total area main_glac_hyps_all = modelsetup.import_Husstable(main_glac_rgi_all, pygem_prms.hyps_filepath, pygem_prms.hyps_filedict, pygem_prms.hyps_colsdrop) # Ice thickness [m], average main_glac_icethickness_all = modelsetup.import_Husstable(main_glac_rgi_all, pygem_prms.thickness_filepath, pygem_prms.thickness_filedict, pygem_prms.thickness_colsdrop) # Additional processing main_glac_hyps_all[main_glac_icethickness_all == 0] = 0 main_glac_hyps_all = main_glac_hyps_all.fillna(0) main_glac_icethickness_all = main_glac_icethickness_all.fillna(0) # Add degree groups to main_glac_rgi_all # Degrees main_glac_rgi_all['CenLon_round'] = np.floor(main_glac_rgi_all.CenLon.values/degree_size) * degree_size main_glac_rgi_all['CenLat_round'] = np.floor(main_glac_rgi_all.CenLat.values/degree_size) * degree_size deg_groups = main_glac_rgi_all.groupby(['CenLon_round', 'CenLat_round']).size().index.values.tolist() deg_dict = dict(zip(deg_groups, np.arange(0,len(deg_groups)))) main_glac_rgi_all.reset_index(drop=True, inplace=True) cenlon_cenlat = [(main_glac_rgi_all.loc[x,'CenLon_round'], main_glac_rgi_all.loc[x,'CenLat_round']) for x in range(len(main_glac_rgi_all))] main_glac_rgi_all['CenLon_CenLat'] = cenlon_cenlat main_glac_rgi_all['deg_id'] = main_glac_rgi_all.CenLon_CenLat.map(deg_dict) if load_caldata == 1: cal_datasets = ['shean'] startyear=2000 dates_table = modelsetup.datesmodelrun(startyear=startyear, endyear=endyear, spinupyears=0, option_wateryear=option_wateryear) # Calibration data cal_data_all = pd.DataFrame() for dataset in cal_datasets: cal_subset = class_mbdata.MBData(name=dataset) cal_subset_data = cal_subset.retrieve_mb(main_glac_rgi_all, main_glac_hyps_all, dates_table) cal_data_all = cal_data_all.append(cal_subset_data, ignore_index=True) cal_data_all = cal_data_all.sort_values(['glacno', 't1_idx']) cal_data_all.reset_index(drop=True, inplace=True) if load_caldata == 0: return main_glac_rgi_all, main_glac_hyps_all, main_glac_icethickness_all else: return main_glac_rgi_all, main_glac_hyps_all, main_glac_icethickness_all, cal_data_all
gcm_name = 'ERA-Interim' option_gcm_downscale = 2 option_lapserate_fromgcm = 1 option_export = 1 time_start = time.time() #%% ===== LOAD GLACIER DATA ===== # RGI glacier attributes main_glac_rgi = modelsetup.selectglaciersrgitable(rgi_regionsO1=rgi_regionsO1, rgi_regionsO2='all', rgi_glac_number='all') # Glacier hypsometry [km**2], total area main_glac_hyps = modelsetup.import_Husstable(main_glac_rgi, pygem_prms.hyps_filepath, pygem_prms.hyps_filedict, pygem_prms.hyps_colsdrop) elev_bins = main_glac_hyps.columns.values.astype(int) # Ice thickness [m], average main_glac_icethickness = modelsetup.import_Husstable( main_glac_rgi, pygem_prms.thickness_filepath, pygem_prms.thickness_filedict, pygem_prms.thickness_colsdrop) main_glac_hyps[main_glac_icethickness == 0] = 0 # Width [km], average main_glac_width = modelsetup.import_Husstable(main_glac_rgi, pygem_prms.width_filepath, pygem_prms.width_filedict, pygem_prms.width_colsdrop) # Add volume [km**3] and mean elevation [m a.s.l.] to the main glaciers table main_glac_rgi['Volume'], main_glac_rgi['Zmean'] = modelsetup.hypsometrystats( main_glac_hyps, main_glac_icethickness)
# Glacier selection rgi_regionsO1 = [1] #rgi_glac_number = ['14683'] rgi_glac_number = input.get_same_glaciers(os.getcwd() + '/../Output/cal_opt4/') startyear = 1980 endyear = 2018 # Select glaciers main_glac_rgi = modelsetup.selectglaciersrgitable( rgi_regionsO1=rgi_regionsO1, rgi_regionsO2='all', rgi_glac_number=rgi_glac_number) # Glacier hypsometry [km**2], total area main_glac_hyps = modelsetup.import_Husstable(main_glac_rgi, input.hyps_filepath, input.hyps_filedict, input.hyps_colsdrop) # Determine dates_table_idx that coincides with data rgi_regionsO1 dates_table = modelsetup.datesmodelrun(startyear, endyear, spinupyears=0) elev_bins = main_glac_hyps.columns.values.astype(int) elev_bin_interval = elev_bins[1] - elev_bins[0] #cal_datasets = ['larsen'] cal_data = pd.DataFrame() #for dataset in cal_datasets: cal_subset = class_mbdata.MBData(name='braun') #, rgi_region=rgi_regionsO1[0] cal_subset_data = cal_subset.retrieve_mb(main_glac_rgi, main_glac_hyps, dates_table) cal_data = cal_data.append(cal_subset_data, ignore_index=True)
gcm_name = 'ERA-Interim' option_gcm_downscale = 2 option_lapserate_fromgcm = 1 option_export = 1 time_start = time.time() #%% ===== LOAD GLACIER DATA ===== # RGI glacier attributes main_glac_rgi = modelsetup.selectglaciersrgitable(rgi_regionsO1=rgi_regionsO1, rgi_regionsO2='all', rgi_glac_number='all') # Glacier hypsometry [km**2], total area main_glac_hyps = modelsetup.import_Husstable(main_glac_rgi, input.hyps_filepath, input.hyps_filedict, input.hyps_colsdrop) elev_bins = main_glac_hyps.columns.values.astype(int) # Ice thickness [m], average main_glac_icethickness = modelsetup.import_Husstable(main_glac_rgi, input.thickness_filepath, input.thickness_filedict, input.thickness_colsdrop) main_glac_hyps[main_glac_icethickness == 0] = 0 # Width [km], average main_glac_width = modelsetup.import_Husstable(main_glac_rgi, input.width_filepath, input.width_filedict, input.width_colsdrop) # Add volume [km**3] and mean elevation [m a.s.l.] to the main glaciers table main_glac_rgi['Volume'], main_glac_rgi['Zmean'] = modelsetup.hypsometrystats(
def load_masschange_monthly(regions, ds_ending, netcdf_fp=sim_netcdf_fp, option_add_caldata=0): """ Load monthly mass change data """ count = 0 for region in regions: count += 1 # Load datasets ds_fn = 'R' + str(region) + ds_ending ds = xr.open_dataset(netcdf_fp + ds_fn) main_glac_rgi_region_ds = pd.DataFrame(ds.glacier_table.values, columns=ds.glac_attrs) glac_wide_massbaltotal_region = ds.massbaltotal_glac_monthly.values[:, :, 0] glac_wide_area_annual_region = ds.area_glac_annual.values[:, :, 0] time_values = pd.Series( ds.massbaltotal_glac_monthly.coords['time'].values) # ===== GLACIER DATA ===== main_glac_rgi_region = modelsetup.selectglaciersrgitable( rgi_regionsO1=[region], rgi_regionsO2='all', rgi_glac_number='all') if (main_glac_rgi_region['glacno'] - main_glac_rgi_region_ds['glacno']).sum() == 0: print('Region', str(region), ': number of glaciers match') # Glacier hypsometry main_glac_hyps_region = modelsetup.import_Husstable( main_glac_rgi_region, pygem_prms.hyps_filepath, pygem_prms.hyps_filedict, pygem_prms.hyps_colsdrop) # Ice thickness [m], average main_glac_icethickness_region = modelsetup.import_Husstable( main_glac_rgi_region, input.thickness_filepath, input.thickness_filedict, input.thickness_colsdrop) main_glac_hyps_region[main_glac_icethickness_region == 0] = 0 # ===== CALIBRATION DATA ===== if option_add_caldata == 1: dates_table_nospinup = modelsetup.datesmodelrun( startyear=input.startyear, endyear=input.endyear, spinupyears=0) cal_data_region = pd.DataFrame() for dataset in cal_datasets: cal_subset = class_mbdata.MBData(name=dataset) cal_subset_data = cal_subset.retrieve_mb( main_glac_rgi_region, main_glac_hyps_region, dates_table_nospinup) cal_data_region = cal_data_region.append(cal_subset_data, ignore_index=True) cal_data_region = cal_data_region.sort_values(['glacno', 't1_idx']) cal_data_region.reset_index(drop=True, inplace=True) # ===== APPEND DATASETS ===== if count == 1: main_glac_rgi = main_glac_rgi_region main_glac_hyps = main_glac_hyps_region main_glac_icethickness = main_glac_icethickness_region glac_wide_massbaltotal = glac_wide_massbaltotal_region glac_wide_area_annual = glac_wide_area_annual_region if option_add_caldata == 1: cal_data = cal_data_region else: main_glac_rgi = main_glac_rgi.append(main_glac_rgi_region) glac_wide_massbaltotal = np.concatenate( [glac_wide_massbaltotal, glac_wide_massbaltotal_region]) glac_wide_area_annual = np.concatenate( [glac_wide_area_annual, glac_wide_area_annual_region]) if option_add_caldata == 1: cal_data = cal_data.append(cal_data_region) # If more columns in region, then need to expand existing dataset if main_glac_hyps_region.shape[1] > main_glac_hyps.shape[1]: all_col = list(main_glac_hyps.columns.values) reg_col = list(main_glac_hyps_region.columns.values) new_cols = [item for item in reg_col if item not in all_col] for new_col in new_cols: main_glac_hyps[new_col] = 0 main_glac_icethickness[new_col] = 0 elif main_glac_hyps_region.shape[1] < main_glac_hyps.shape[1]: all_col = list(main_glac_hyps.columns.values) reg_col = list(main_glac_hyps_region.columns.values) new_cols = [item for item in all_col if item not in reg_col] for new_col in new_cols: main_glac_hyps_region[new_col] = 0 main_glac_icethickness_region[new_col] = 0 main_glac_hyps = main_glac_hyps.append(main_glac_hyps_region) main_glac_icethickness = main_glac_icethickness.append( main_glac_icethickness_region) # reset index main_glac_rgi.reset_index(inplace=True, drop=True) main_glac_hyps.reset_index(inplace=True, drop=True) main_glac_icethickness.reset_index(inplace=True, drop=True) if option_add_caldata == 1: cal_data.reset_index(inplace=True, drop=True) # Volume [km**3] and mean elevation [m a.s.l.] main_glac_rgi['Volume'], main_glac_rgi[ 'Zmean'] = modelsetup.hypsometrystats(main_glac_hyps, main_glac_icethickness) # ===== MASS CHANGE CALCULATIONS ===== # Compute glacier volume change for every time step and use this to compute mass balance glac_wide_area = np.repeat(glac_wide_area_annual[:, :-1], 12, axis=1) # Mass change [km3 mwe] # mb [mwea] * (1 km / 1000 m) * area [km2] glac_wide_masschange = glac_wide_massbaltotal / 1000 * glac_wide_area if option_add_caldata == 1: return main_glac_rgi, glac_wide_masschange, glac_wide_area, time_values, cal_data else: return main_glac_rgi, glac_wide_masschange, glac_wide_area, time_values
def main(list_packed_vars): """ Climate data bias adjustment Parameters ---------- list_packed_vars : list list of packed variables that enable the use of parallels Returns ------- csv files of bias adjustment output The bias adjustment parameters are output instead of the actual temperature and precipitation to reduce file sizes. Additionally, using the bias adjustment will cause the GCM climate data to use the reference elevation since the adjustments were made from the GCM climate data to be consistent with the reference dataset. """ # Unpack variables count = list_packed_vars[0] chunk = list_packed_vars[1] main_glac_rgi_all = list_packed_vars[2] chunk_size = list_packed_vars[3] gcm_name = list_packed_vars[4] time_start = time.time() parser = getparser() args = parser.parse_args() if (gcm_name != pygem_prms.ref_gcm_name) and (args.rcp is None): rcp_scenario = os.path.basename(args.gcm_list_fn).split('_')[1] elif args.rcp is not None: rcp_scenario = args.rcp # rcp_scenario = os.path.basename(args.gcm_file).split('_')[1] # ===== LOAD OTHER GLACIER DATA ===== main_glac_rgi = main_glac_rgi_all.iloc[chunk:chunk + chunk_size, :] # Glacier hypsometry [km**2], total area main_glac_hyps = modelsetup.import_Husstable(main_glac_rgi, pygem_prms.hyps_filepath, pygem_prms.hyps_filedict, pygem_prms.hyps_colsdrop) # Ice thickness [m], average main_glac_icethickness = modelsetup.import_Husstable(main_glac_rgi, pygem_prms.thickness_filepath, pygem_prms.thickness_filedict, pygem_prms.thickness_colsdrop) main_glac_hyps[main_glac_icethickness == 0] = 0 # Width [km], average main_glac_width = modelsetup.import_Husstable(main_glac_rgi, pygem_prms.width_filepath, pygem_prms.width_filedict, pygem_prms.width_colsdrop) elev_bins = main_glac_hyps.columns.values.astype(int) # Select dates including future projections # If reference climate data starts or ends before or after the GCM data, then adjust reference climate data such # that the reference and GCM span the same period of time. if pygem_prms.startyear >= pygem_prms.gcm_startyear: ref_startyear = pygem_prms.startyear else: ref_startyear = pygem_prms.gcm_startyear if pygem_prms.endyear <= pygem_prms.gcm_endyear: ref_endyear = pygem_prms.endyear else: ref_endyear = pygem_prms.gcm_endyear dates_table_ref = modelsetup.datesmodelrun(startyear=ref_startyear, endyear=ref_endyear, spinupyears=pygem_prms.ref_spinupyears, option_wateryear=pygem_prms.ref_wateryear) dates_table = modelsetup.datesmodelrun(startyear=pygem_prms.gcm_startyear, endyear=pygem_prms.gcm_endyear, spinupyears=pygem_prms.gcm_spinupyears, option_wateryear=pygem_prms.gcm_wateryear) # ===== LOAD CLIMATE DATA ===== # Reference climate data ref_gcm = class_climate.GCM(name=pygem_prms.ref_gcm_name) # Air temperature [degC], Precipitation [m], Elevation [masl], Lapse rate [K m-1] ref_temp, ref_dates = ref_gcm.importGCMvarnearestneighbor_xarray(ref_gcm.temp_fn, ref_gcm.temp_vn, main_glac_rgi, dates_table_ref) ref_prec, ref_dates = ref_gcm.importGCMvarnearestneighbor_xarray(ref_gcm.prec_fn, ref_gcm.prec_vn, main_glac_rgi, dates_table_ref) ref_elev = ref_gcm.importGCMfxnearestneighbor_xarray(ref_gcm.elev_fn, ref_gcm.elev_vn, main_glac_rgi) ref_lr, ref_dates = ref_gcm.importGCMvarnearestneighbor_xarray(ref_gcm.lr_fn, ref_gcm.lr_vn, main_glac_rgi, dates_table_ref) ref_lr_monthly_avg = (ref_lr.reshape(-1,12).transpose().reshape(-1,int(ref_temp.shape[1]/12)).mean(1) .reshape(12,-1).transpose()) # GCM climate data if gcm_name == 'ERA-Interim' or gcm_name == 'COAWST': gcm = class_climate.GCM(name=gcm_name) else: gcm = class_climate.GCM(name=gcm_name, rcp_scenario=rcp_scenario) # Air temperature [degC], Precipitation [m], Elevation [masl], Lapse rate [K m-1] gcm_temp, gcm_dates = gcm.importGCMvarnearestneighbor_xarray(gcm.temp_fn, gcm.temp_vn, main_glac_rgi, dates_table) gcm_prec, gcm_dates = gcm.importGCMvarnearestneighbor_xarray(gcm.prec_fn, gcm.prec_vn, main_glac_rgi, dates_table) gcm_elev = gcm.importGCMfxnearestneighbor_xarray(gcm.elev_fn, gcm.elev_vn, main_glac_rgi) if gcm_name == 'ERA-Interim': gcm_lr, gcm_dates = gcm.importGCMvarnearestneighbor_xarray(gcm.lr_fn, gcm.lr_vn, main_glac_rgi, dates_table) else: gcm_lr = monthly_avg_array_rolled(ref_lr, dates_table_ref, dates_table) # COAWST data has two domains, so need to merge the two domains if gcm_name == 'COAWST': gcm_temp_d01, gcm_dates = gcm.importGCMvarnearestneighbor_xarray(gcm.temp_fn_d01, gcm.temp_vn, main_glac_rgi, dates_table) gcm_prec_d01, gcm_dates = gcm.importGCMvarnearestneighbor_xarray(gcm.prec_fn_d01, gcm.prec_vn, main_glac_rgi, dates_table) gcm_elev_d01 = gcm.importGCMfxnearestneighbor_xarray(gcm.elev_fn_d01, gcm.elev_vn, main_glac_rgi) # Check if glacier outside of high-res (d02) domain for glac in range(main_glac_rgi.shape[0]): glac_lat = main_glac_rgi.loc[glac,pygem_prms.rgi_lat_colname] glac_lon = main_glac_rgi.loc[glac,pygem_prms.rgi_lon_colname] if (~(pygem_prms.coawst_d02_lat_min <= glac_lat <= pygem_prms.coawst_d02_lat_max) or ~(pygem_prms.coawst_d02_lon_min <= glac_lon <= pygem_prms.coawst_d02_lon_max)): gcm_prec[glac,:] = gcm_prec_d01[glac,:] gcm_temp[glac,:] = gcm_temp_d01[glac,:] gcm_elev[glac] = gcm_elev_d01[glac] #%% ===== BIAS CORRECTIONS ===== # OPTION 1: Adjust temp and prec similar to Huss and Hock (2015) but limit maximum precipitation # - temperature accounts for means and interannual variability # - precipitation corrects if pygem_prms.option_bias_adjustment == 1: # Temperature bias correction gcm_temp_biasadj, gcm_elev_biasadj = temp_biasadj_HH2015(ref_temp, ref_elev, gcm_temp, dates_table_ref, dates_table) # Precipitation bias correction gcm_prec_biasadj, gcm_elev_biasadj = prec_biasadj_opt1(ref_prec, ref_elev, gcm_prec, dates_table_ref, dates_table) # OPTION 2: Adjust temp and prec according to Huss and Hock (2015) accounts for means and interannual variability elif pygem_prms.option_bias_adjustment == 2: # Temperature bias correction gcm_temp_biasadj, gcm_elev_biasadj = temp_biasadj_HH2015(ref_temp, ref_elev, gcm_temp, dates_table_ref, dates_table) # Precipitation bias correction gcm_prec_biasadj, gcm_elev_biasadj = prec_biasadj_HH2015(ref_prec, ref_elev, gcm_prec, dates_table_ref, dates_table) if gcm_prec_biasadj.max() > 10: print('precipitation bias too high, needs to be modified') print(np.where(gcm_prec_biasadj > 10)) elif gcm_prec_biasadj.min() < 0: print('Negative precipitation value') print(np.where(gcm_prec_biasadj < 0)) #%% PLOT BIAS ADJUSTED DATA if option_plot_adj: print('plotting') plot_biasadj(ref_temp, gcm_temp_biasadj, ref_prec, gcm_prec, gcm_prec_biasadj, dates_table_ref, dates_table) #%% Export variables as global to view in variable explorer if args.option_parallels == 0: global main_vars main_vars = inspect.currentframe().f_locals print('\nProcessing time of', gcm_name, 'for', count,':',time.time()-time_start, 's')
str(x)[:4] for x in larsen_summary.date1.values ] # Replace Mendenhall with '2000F', '2012F' # Lemon Creek with '1993F', '2012F' # Taku '1993F', '2012F' # Nizina is not there # Yanert is not available for the given time periods - others are # Load RGI attributes rgi_regionsO1 = [1] main_glac_rgi = modelsetup.selectglaciersrgitable(rgi_regionsO1=rgi_regionsO1, rgi_regionsO2='all', rgi_glac_number=glacno) main_glac_hyps_10m = modelsetup.import_Husstable(main_glac_rgi, pygem_prms.hyps_filepath, pygem_prms.hyps_filedict, pygem_prms.hyps_colsdrop) binsize_rgi = int(main_glac_hyps_10m.columns[1]) - int( main_glac_hyps_10m.columns[0]) #%% # Quick quality control check on Huss product huss_area_ones = main_glac_hyps_10m.copy().values huss_area_ones[huss_area_ones > 0] = 1 huss_area_ones_idxmax = np.argmax(huss_area_ones, axis=1) main_glac_rgi['huss_min_elev'] = [ int(main_glac_hyps_10m.columns.values[x]) for x in list(huss_area_ones_idxmax) ] main_glac_rgi[ 'dif_min_elev'] = main_glac_rgi.Zmin - main_glac_rgi.huss_min_elev