def calculate_volume_changes(dDEM, glacier_shapes, burn_handle=None, ind_glac_vals=None): if type(glacier_shapes) is str: ind_glac_mask, ind_glac_vals = it.rasterize_polygons( dDEM, glacier_shapes, burn_handle=burn_handle) elif type(glacier_shapes) is np.array: if ind_glac_vals is None: ind_glac_vals = np.unique(glacier_shapes) elif type(ind_glac_vals) is list: ind_glac_vals = np.array(ind_glac_vals) ind_glac_mask = glacier_shapes ind_vol_chgs = np.nan * np.zeros(ind_glac_vals.shape) for i, ind in enumerate(ind_glac_vals): glac_inds = np.where(ind_glac_mask == ind) if glac_inds[0].size == 0: continue glac_chgs = dDEM.img[glac_inds] # get the volume change by summing dh/dt, multiplying by cell ind_vol_chgs[i] = np.nansum(glac_chgs) * np.abs(dDEM.dx) * np.abs( dDEM.dy) return ind_glac_vals, ind_vol_chgs
def fill_holes_individually(dDEM, glacshapes, functype, burn_handle=None, **kwargs): # first, get the individual glacier mask. ind_glac_mask, ind_glac_vals = it.rasterize_polygons( dDEM, glacshapes, burn_handle) filled_ddem = dDEM.copy() # if we already have a glacier mask defined, remove it. if 'glacier_mask' in kwargs: del kwargs['glacier_mask'] for glac in ind_glac_vals: tmp_mask = ind_glac_mask == glac try: tmp_dem = fill_holes(dDEM, elevation_function, glacier_mask=tmp_mask, functype=functype, **kwargs) filled_ddem.img[tmp_mask] = tmp_dem.img[tmp_mask] except: continue return filled_ddem
def normalize_glacier_elevations(DEM, glacshapes, burn_handle=None): ind_glac_mask, raw_inds = it.rasterize_polygons(DEM, glacshapes, burn_handle) normed_els = DEM.copy().img ind_glac_vals = clean_glacier_indices(DEM, ind_glac_mask, raw_inds) for i in ind_glac_vals: glac_inds = np.where(ind_glac_mask == i) glac_els = DEM.img[glac_inds] if glac_els.size > 0: max_el = np.nanmax(glac_els) min_el = np.nanmin(glac_els) normed_els[glac_inds] = (glac_els - min_el) / (max_el - min_el) normDEM = DEM.copy(new_raster=normed_els) return normDEM, ind_glac_mask, ind_glac_vals
def main(): parser = _argparser() args = parser.parse_args() if args.plot_curves: # set font stuff font = {'family': 'sans', 'weight': 'normal', 'size': 22} # legend_font = {'family': 'sans', # 'weight': 'normal', # 'size': '16'} matplotlib.rc('font', **font) # load base dem print('Loading DEM {}'.format(args.basedem)) basedem = GeoImg(args.basedem) print('DEM loaded.') # get glacier masks if args.glac_mask is None: print('Rasterizing glacier polygons to DEM extent.') master_mask, master_glacs = it.rasterize_polygons(basedem, args.glac_outlines, burn_handle='fid') master_mask[master_mask < 0] = np.nan else: print('Loading raster of glacier polygons {}'.format(args.glac_mask)) master_mask_geo = GeoImg(args.glac_mask) master_mask = master_mask_geo.img master_glacs = np.unique(master_mask[np.isfinite(master_mask)]) # master_mask = np.logical_and(master_mask, np.isfinite(basedem.img)) # get names gshp = gpd.read_file(args.glac_outlines) print('Glacier masks loaded.') # create output folder if it doesn't already exist os.system('mkdir -p {}'.format(args.out_folder)) # create folders to store glacier dH curve figures for g in gshp[args.namefield]: os.system('mkdir -p {}'.format(os.path.sep.join([args.out_folder, g]))) print('Getting glacier AADs.') # get aad aad_bins, aads = area_alt_dist(basedem, master_mask, glacier_inds=master_glacs) # initialize pd dataframes for dH_curves df_list = [pd.DataFrame(aad_bin, columns=['elevation']) for aad_bin in aad_bins] g_list = [str(gshp[args.namefield][gshp['fid'] == glac].values[0]) for glac in master_glacs] df_dict = dict(zip(g_list, df_list)) # turn aad_bins, aads into dicts with RGIId as keys bin_dict = dict(zip(g_list, aad_bins)) aad_dict = dict(zip(g_list, aads)) for i, df in enumerate(df_list): df['area'] = pd.Series(aads[i], index=df.index) # now that we have the AADs, make sure we preserve that distribution when we reproject. bin_widths = [np.diff(b)[0] for b in aad_bins] basedem.img[np.isnan(master_mask)] = np.nan # remove all elevations outside of the glacier mask for i, g in enumerate(master_glacs): basedem.img[master_mask == g] = np.floor(basedem.img[master_mask == g] / bin_widths[i]) * bin_widths[i] # get a list of all dH dH_list = glob('{}/*.tif'.format(args.dH_folder)) # initialize ur_dataframe ur_df = pd.DataFrame([os.path.basename(x) for x in dH_list], columns=['filename']) ur_df['dem1'] = [nice_split(x)[0] for x in ur_df['filename']] ur_df['dem2'] = [nice_split(x)[1] for x in ur_df['filename']] date1 = [parse_filename(x) for x in ur_df['dem1']] date2 = [parse_filename(x) for x in ur_df['dem2']] ur_df['date1'] = date1 ur_df['date2'] = date2 ur_df['delta_t'] = [(x - y).days / 365.2425 for x, y in list(zip(date1, date2))] ur_df['centerdate'] = [(y + dt.timedelta((x - y).days / 2)) for x, y in list(zip(date1, date2))] print('Found {} files in {}'.format(len(dH_list), args.dH_folder)) print('Getting dH curves.') for i, dHfile in enumerate(dH_list): dH = GeoImg(dHfile) print('{} ({}/{})'.format(dH.filename, i+1, len(dH_list))) if args.glac_mask is None: dh_mask, dh_glacs = it.rasterize_polygons(dH, args.glac_outlines, burn_handle='fid') else: tmp_dh_mask = master_mask_geo.reproject(dH, method=GRA_NearestNeighbour) dh_mask = tmp_dh_mask.img dh_glacs = np.unique(dh_mask[np.isfinite(dh_mask)]) tmp_basedem = basedem.reproject(dH, method=GRA_NearestNeighbour) deltat = ur_df.loc[i, 'delta_t'] this_fname = ur_df.loc[i, 'filename'] for i, glac in enumerate(dh_glacs): this_name = str(gshp[args.namefield][gshp['fid'] == glac].values[0]) this_dem = tmp_basedem.img[dh_mask == glac] this_ddem = dH.img[dh_mask == glac] this_ddem[np.abs(this_ddem) > args.outlier] = np.nan if np.count_nonzero(np.isfinite(this_ddem)) / this_ddem.size < 0.25: continue # these_bins = get_bins(this_dem, dh_mask) filtered_ddem = outlier_filter(bin_dict[this_name], this_dem, this_ddem) # _, odH_curve = get_dH_curve(this_dem, this_ddem, dh_mask, bins=aad_bins) _, fdH_curve, fbin_area = get_dH_curve(this_dem, filtered_ddem, dh_mask, bins=bin_dict[this_name]) _, fdH_median, _ = get_dH_curve(this_dem, filtered_ddem, dh_mask, bins=bin_dict[this_name], mode='median') fbin_area = 100 * fbin_area * np.abs(dH.dx) * np.abs(dH.dy) / aad_dict[this_name] if args.plot_curves: plot_dH_curve(this_ddem, this_dem, bin_dict[this_name], fdH_curve, fdH_median, fbin_area, dH.filename.strip('.tif')) plt.savefig(os.path.join(args.out_folder, this_name, dH.filename.strip('.tif') + '.png'), bbox_inches='tight', dpi=200) plt.close() # write dH curve in units of dH/dt (so divide by deltat) this_fname = this_fname.rsplit('.tif', 1)[0] df_dict[this_name][this_fname + '_mean'] = pd.Series(fdH_curve / deltat, index=df_dict[this_name].index) df_dict[this_name][this_fname + '_med'] = pd.Series(fdH_median / deltat, index=df_dict[this_name].index) df_dict[this_name][this_fname + '_pct'] = pd.Series(fbin_area, index=df_dict[this_name].index) print('Writing dH curves to {}'.format(args.out_folder)) # write all dH_curves for g in df_dict.keys(): print(g) df_dict[g].to_csv(os.path.sep.join([args.out_folder, '{}_dH_curves.csv'.format(g)]), index=False)