def wrapper_reproj(argsin): arr, in_met, out_met = argsin # create input image gt, proj, npix_x, npix_y = in_met drv = gdal.GetDriverByName('MEM') dst = drv.Create('', npix_x, npix_y, 1, gdal.GDT_Float32) sp = dst.SetProjection(proj) sg = dst.SetGeoTransform(gt) arr[np.isnan(arr)] = -9999 wa = dst.GetRasterBand(1).WriteArray(arr) md = dst.SetMetadata({'Area_or_point': 'Point'}) nd = dst.GetRasterBand(1).SetNoDataValue(-9999) del sp, sg, wa, md, nd tmp_z = GeoImg(dst) # output res, outputBounds, utm_out = out_met dest = gdal.Warp('', tmp_z.gd, format='MEM', dstSRS='EPSG:{}'.format(vt.epsg_from_utm(utm_out)), xRes=res, yRes=res, outputBounds=outputBounds, resampleAlg=gdal.GRA_Bilinear) geoimg = GeoImg(dest) return geoimg.img
def getExtent(in_filename): #This function uses GeoImg.find_valid_bbox to get the extent, then projects #extent into the same reference system as in_filename #in_filename - input filename (string). should be a geotiff myDEM = GeoImg(in_filename) mybbox = myDEM.find_valid_bbox() # Setup the source projection - you can also import from epsg, proj4... source = osr.SpatialReference() source.ImportFromEPSG(myDEM.epsg) # The target projection target = osr.SpatialReference() target.ImportFromEPSG(4326) # Create the transform - this can be used repeatedly transform = osr.CoordinateTransformation(source, target) # Transform the point. You can also create an ogr geometry and use the more generic `point.Transform()` J1 = transform.TransformPoint(mybbox[0], mybbox[2]) J2 = transform.TransformPoint(mybbox[1], mybbox[3]) minLat = J1[1] maxLat = J2[1] minLon = J1[0] maxLon = J2[0] # if minLon<0: # minLon = 360 + minLon # if maxLon<0: # maxLon = 360 + maxLon return minLat, maxLat, minLon, maxLon
def get_footprints(filelist, proj4=None): """ Get a list of footprints, given a filelist of DEMs. :param filelist: List of DEMs to create footprints for. :param proj4: proj4 representation of output CRS. If None, the CRS is chosen from the first DEM loaded. Can also supply an EPSG code as an integer. :type filelist: array-like :type proj4: str, int :returns fprints, this_crs: A list of footprints and a proj4 string (or dict) representing the output CRS. """ fprints = [] if proj4 is not None: if type(proj4) is int: this_proj4 = {'init': 'epsg:{}'.format(proj4)} else: this_proj4 = proj4 else: tmp = GeoImg(filelist[0]) this_proj4 = tmp.proj4 for f in filelist: tmp = GeoImg(f) fp = Polygon(tmp.find_corners(mode='xy')) fprints.append(mt.reproject_geometry(fp, tmp.proj4, this_proj4)) return fprints, this_proj4
def corr_filter_aster(fn_dem, fn_corr, threshold=80): dem = GeoImg(fn_dem) corr = GeoImg(fn_corr) out = dem.copy() corr.img[corr.img < threshold] = 0 rem_open = binary_opening(corr.img, structure=disk(5)) out.img[~rem_open] = np.nan return out
def make_geoimg(ds, band=0, var='z'): """ Create a GeoImg representation of a given band from an xarray dataset. :param ds: xarray dataset to read shape, extent, CRS values from. :param band: band number of xarray dataset to use :param var: variable of xarray dataset to use :type ds: xarray.Dataset :type band: int :type var: string :returns geoimg: GeoImg representation of the given band. """ npix_y, npix_x = ds[var][band].shape dx = np.round((ds.x.max().values - ds.x.min().values) / float(npix_x)) dy = np.round((ds.y.min().values - ds.y.max().values) / float(npix_y)) newgt = (ds.x.min().values - 0, dx, 0, ds.y.max().values - 0, 0, dy) drv = gdal.GetDriverByName('MEM') dst = drv.Create('', npix_x, npix_y, 1, gdal.GDT_Float32) sp = dst.SetProjection(ds.crs.spatial_ref) sg = dst.SetGeoTransform(newgt) img = np.copy(ds[var][band].values) img[np.isnan(img)] = -9999 wa = dst.GetRasterBand(1).WriteArray(img) md = dst.SetMetadata({'Area_or_point': 'Point'}) nd = dst.GetRasterBand(1).SetNoDataValue(-9999) del wa, sg, sp, md, nd return GeoImg(dst)
def rasterize_list_poly(list_poly, in_met, i): print('Poly stack number ' + str(i + 1)) # create input image gt, proj, npix_x, npix_y = in_met drv = gdal.GetDriverByName('MEM') dst = drv.Create('', npix_x, npix_y, 1, gdal.GDT_Float32) sp = dst.SetProjection(proj) sg = dst.SetGeoTransform(gt) band = dst.GetRasterBand(1) band.SetNoDataValue(0) band.Fill(0, 0) del sp, sg img = GeoImg(dst) out_density = np.zeros(np.shape(img.img)) srs = osr.SpatialReference() srs.ImportFromWkt(proj) for j, poly in enumerate(list_poly): print('Poly ' + str(j + 1) + ' out of ' + str(len(list_poly))) ds_shp = ot.create_mem_shp(poly, srs) mask = ot.geoimg_mask_on_feat_shp_ds(ds_shp, img) out_density[mask] += 1 return out_density
def collect_subimages(demname,mysize): # import data myDEM = GeoImg(demname) # mybounds = myDEM.find_valid_bbox() # tx = np.asarray(np.floor_divide(mybounds[1]-mybounds[0],mysize),dtype=np.int32) # ty = np.asarray(np.floor_divide(mybounds[3]-mybounds[2],mysize),dtype=np.int32) tx = np.asarray(np.floor_divide(myDEM.xmax-myDEM.xmin,mysize),dtype=np.int32) ty = np.asarray(np.floor_divide(myDEM.ymax-myDEM.ymin,mysize),dtype=np.int32) # Divide the DEM into subimages for co-registration myDEMs = myDEM.subimages(tx,Ny=ty) # clear the list for empty DEMs myDEMs = [tDEM for tDEM in myDEMs if (np.sum(~np.isnan(tDEM.img)) > 1000)] return myDEMs
def worldwide_coverage_density(list_poly, fn_out, res=0.05, nproc=1): #worldwide raster in lat/lon proj xmin = -180 ymax = 90 gt = (xmin, res, 0, ymax, 0, -res) npix_x = int(360 / res) npix_y = int(180 / res) proj = osr.SpatialReference() proj.ImportFromEPSG(4326) ds_out = gdal.GetDriverByName('GTiff').Create(fn_out, npix_x, npix_y, 1, gdal.GDT_Int16) ds_out.SetGeoTransform(gt) ds_out.SetProjection(proj.ExportToWkt()) band = ds_out.GetRasterBand(1) band.SetNoDataValue(0) band.Fill(0, 0) img = GeoImg(ds_out) out_density = np.zeros(np.shape(img.img)) if nproc == 1: for i, poly in enumerate(list_poly): print('Rasterizing poly number ' + str(i + 1) + ' in ' + str(len(list_poly))) ds_shp = ot.create_mem_shp(poly, proj) mask = ot.geoimg_mask_on_feat_shp_ds(ds_shp, img) out_density[mask] += 1 else: print('Using ' + str(nproc) + ' processors...') # speed up things with multiprocessing pool = mp.Pool(nproc, maxtasksperchild=1) in_met = (img.gt, img.proj_wkt, img.npix_x, img.npix_y) pack_size = int(np.ceil(len(list_poly) / nproc)) argsin_packs = [{ 'list_poly': list_poly[i:min(i + pack_size, len(list_poly))], 'in_met': in_met, 'i': k } for k, i in enumerate(np.arange(0, len(list_poly), pack_size))] outputs = pool.map(wrapper_rasterize, argsin_packs, chunksize=1) pool.close() pool.join() for output in outputs: out_density += output ds_out.GetRasterBand(1).WriteArray(out_density) ds_out = None
def coreg_wrapper(argsin): ref_vrt, in_dem, fn_excl_mask, fn_incl_mask, strip_out_dir = argsin print('Coregistering strip: ' + in_dem) if not os.path.exists(strip_out_dir): try: # _, outslave, _, stats = dem_coregistration(ref_vrt, in_dem, glaciermask=fn_excl_mask, landmask=fn_incl_mask, # outdir=strip_out_dir, inmem=True) # rmse = stats[3] # clean_coreg_dir(strip_out_dir, '.') # if rmse < 10: # outslave.write(os.path.basename(strip_out_dir) + '_adj.tif', out_folder=strip_out_dir) _, _, shift_params, stats = dem_coregistration(ref_vrt, in_dem, glaciermask=fn_excl_mask, landmask=fn_incl_mask, outdir=strip_out_dir, inmem=True) rmse = stats[3] clean_coreg_dir(strip_out_dir, '.') orig_slv = GeoImg(in_dem) if rmse < 10: orig_slv.shift(shift_params[0], shift_params[1]) orig_slv.img = orig_slv.img + shift_params[2] orig_slv.write(os.path.basename(strip_out_dir)+ '_adj.tif', out_folder=strip_out_dir) # outslave.write(os.path.basename(strip_out_dir) + '_adj.tif', out_folder=strip_out_dir) except Exception: clean_coreg_dir(strip_out_dir, '.') else: print('Output dir already exists, skipping...')
def get_geoimg(indata): if type(indata) is str or type(indata) is gdal.Dataset: return GeoImg(indata) elif type(indata) is GeoImg: return indata else: raise TypeError( 'input data must be a string pointing to a gdal dataset, or a GeoImg object.' )
def __init__(self, in_filename, in_dir='.', unitConv=1, ftype='xy', dataName='z'): FileExt = in_filename.split('.')[-1] if FileExt.lower() == 'tif' or FileExt.lower() == 'tiff': tmp = GeoImg(in_filename, in_dir=in_dir) ndv = tmp.gd.GetRasterBand(1).GetNoDataValue() X, Y = tmp.xy() self.x = X.reshape(-1) self.y = Y.reshape(-1) self.c, self.r = tmp.img.shape self.data = tmp.img.reshape(-1) * unitConv self.data[self.data == ndv] = np.nan self.img = True elif FileExt.lower() == 'shp': tmp = gpd.GeoDataFrame.from_file(in_dir + os.path.sep + in_filename) self.x = np.empty(0) self.y = np.empty(0) for pt in tmp['geometry']: self.x = np.append(self.x, pt.x) self.y = np.append(self.y, pt.y) # not sure how people would call these things # just assume that the default is going to be 'z' self.data = tmp[dataName] * unitConv self.img = False elif FileExt.lower() == 'csv': tmp = pd.read_csv(in_dir + os.path.sep + in_filename, sep=',|;', engine='python') if ftype == 'xy': self.x = tmp['x'] self.y = tmp['y'] else: self.x = tmp['z'] self.y = None self.data = tmp[dataName] * unitConv self.img = False self.xy = ftype == 'xy'
def raster_to_point(fn_dem): extent, proj_wkt = ot.extent_rast(fn_dem) poly = ot.poly_from_extent(extent) transform = ot.coord_trans(True, proj_wkt, False, 4326) poly.Transform(transform) center_lon, center_lat = ot.get_poly_centroid(poly) epsg, utm_zone = ot.latlon_to_UTM(center_lat, center_lon) print('Reprojecting in ' + str(epsg)) img_vhr = GeoImg(fn_dem) dest = gdal.Warp('', img_vhr.gd, format='MEM', dstSRS='EPSG:{}'.format(epsg), xRes=out_res, yRes=out_res, resampleAlg=gdal.GRA_Bilinear, dstNodata=-9999) img_lr = GeoImg(dest) print('Extracting coords...') elevs = img_lr.img.flatten() x, y = img_lr.xy(ctype='center') coords = list(zip(x.flatten(), y.flatten())) coords_latlon = point_to_lonlat_trans(int(epsg), coords) lon, lat = zip(*coords_latlon) lon = np.array(lon) lat = np.array(lat) keep = ~np.isnan(elevs) h = elevs[keep] lat = lat[keep] lon = lon[keep] print('Done for this DEM') return h, lat, lon
def main(): parser = _argparser() args = parser.parse_args() if args.outputscene is None: args.outputscene = args.inputscene outfilename = args.outputscene + "_B8.TIF" # first, read in the bands (4, 3, 2) B4 = GeoImg( args.inputscene + "_B4.TIF" ) B3 = GeoImg( args.inputscene + "_B3.TIF" ) B2 = GeoImg( args.inputscene + "_B2.TIF" ) # now, make a new band B8sim = 0.5 * B4.img + 0.2 * B3.img + 0.3 * B2.img B8 = B4.copy(new_raster=B8sim) B8.write(outfilename)
def composite_raster(band1name, band2name, band3name, outname, out_dir='.', in_dir='.', driver='GTiff'): band1 = GeoImg(band1name, in_dir=in_dir) band2 = GeoImg(band2name, in_dir=in_dir) band3 = GeoImg(band3name, in_dir=in_dir) driver = gdal.GetDriverByName(driver) ncols = band1.npix_x nrows = band1.npix_y nband = 3 datatype = band1.gd.GetRasterBand(1).DataType out = driver.Create(out_dir + os.path.sep + outname, ncols, nrows, nband, datatype) out.SetGeoTransform(band1.gt) out.SetProjection(band1.proj_wkt) out.GetRasterBand(1).WriteArray(band1.gd.ReadAsArray()) out.GetRasterBand(2).WriteArray(band2.gd.ReadAsArray()) out.GetRasterBand(3).WriteArray(band3.gd.ReadAsArray()) for i in range(3): out.GetRasterBand(i+1).FlushCache()
def get_slope(geoimg, alg='Horn'): """ Wrapper function to calculate DEM slope using gdal.DEMProcessing. :param geoimg: GeoImg object of DEM to calculate slope :param alg: Algorithm for calculating Slope. One of 'ZevenbergenThorne' or 'Horn'. Default is 'Horn'. :type geoimg: pybob.GeoImg :type alg: str :returns geo_slope: new GeoImg object with slope raster """ assert alg in ['ZevenbergenThorne', 'Horn'], "alg not recognized: {}".format(alg) slope_ = gdal.DEMProcessing('', geoimg.gd, 'slope', format='MEM', alg=alg) return GeoImg(slope_)
def get_aspect(geoimg, alg='Horn'): """ Wrapper function to calculate DEM aspect using gdal.DEMProcessing. :param geoimg: GeoImg object of DEM to calculate aspect :param alg: Algorithm for calculating Aspect. One of 'ZevenbergenThorne' or 'Horn'. Default is 'Horn'. :type geoimg: pybob.GeoImg :type alg: str :returns geo_aspect: new GeoImg object with aspect raster """ assert alg in ['ZevenbergenThorne', 'Horn'], "alg not recognized: {}".format(alg) aspect_ = gdal.DEMProcessing('', geoimg.gd, 'aspect', format='MEM', alg=alg) return GeoImg(aspect_)
def generate_panchrome(imgname, outname=None, out_dir='.', interactive=False): if outname is None: outname = imgname + '_B8.TIF' B5 = GeoImg(imgname + '_B5.TIF') B4 = GeoImg(imgname + '_B4.TIF') B3 = GeoImg(imgname + '_B3.TIF') B2 = GeoImg(imgname + '_B2.TIF') B8sim = 0.45 * B4.img + 0.2 * B3.img + 0.25 * B2.img + 0.1 * B5.img B8 = B4.copy(new_raster=B8sim) B8.write(outname, out_folder=out_dir) if interactive: return B8
for feature in layer: if feature.GetField('RGIId') == gla[0]: poly = feature.GetGeometryRef() area = feature.GetField('Area') break layer.ResetReading() list_inters = get_footprints_inters_ext(list_files,poly,epsg_base,use_l1a_met=False) print('Found '+str(len(list_inters))+ ' DEMs out of '+str(len(list_files))+' intersecting glacier '+gla[1]) for fn_inters in list_inters: print('Working on file:'+fn_inters) tmp_img = GeoImg(fn_inters) mask_feat = ot.geoimg_mask_on_feat_shp_ds(ds_shp, tmp_img, layer_name=layer_name, feat_id='RGIId', feat_val=gla[0]) # nb_px_mask = np.count_nonzero(mask_feat) nb_px_valid = np.count_nonzero(~np.isnan(tmp_img.img[mask_feat])) area_valid = nb_px_valid*tmp_img.dx**2/1000000 cov = area_valid/area*100 print('DEM ' + fn_inters + ' has intersection of ' + str(cov)) if cov > 5.: list_final.append(os.path.basename(fn_inters)) list_cov.append(cov) list_date.append(tmp_img.datetime) mt.create_zip_from_flist([fn_inters],os.path.join(gla_dir,os.path.splitext(os.path.basename(fn_inters))[0]+'.zip'))
def add_inset(fig, extent, position, bounds=None, label=None, polygon=None, shades=True, hillshade=True, list_shp=None, main=False, markup=None, markpos='left', markadj=0, markup_sub=None, sub_pos='lt'): main_pos = [0.375, 0.21, 0.25, 0.25] if polygon is None and bounds is not None: polygon = poly_from_extent(bounds) if shades: shades_main_to_inset(main_pos, position, latlon_extent_to_robinson_axes_verts(polygon), label=label) sub_ax = fig.add_axes(position, projection=ccrs.Robinson(), label=label) sub_ax.set_extent(extent, ccrs.Geodetic()) sub_ax.add_feature( cfeature.NaturalEarthFeature('physical', 'ocean', '50m', facecolor='gainsboro')) sub_ax.add_feature( cfeature.NaturalEarthFeature('physical', 'land', '50m', facecolor='dimgrey')) if hillshade: def out_of_poly_mask(geoimg, poly_coords): poly = poly_from_coords(inter_poly_coords(poly_coords)) srs = osr.SpatialReference() srs.ImportFromEPSG(4326) # put in a memory vector ds_shp = create_mem_shp(poly, srs) return geoimg_mask_on_feat_shp_ds(ds_shp, geoimg) def inter_poly_coords(polygon_coords): list_lat_interp = [] list_lon_interp = [] for i in range(len(polygon_coords) - 1): lon_interp = np.linspace(polygon_coords[i][0], polygon_coords[i + 1][0], 50) lat_interp = np.linspace(polygon_coords[i][1], polygon_coords[i + 1][1], 50) list_lon_interp.append(lon_interp) list_lat_interp.append(lat_interp) all_lon_interp = np.concatenate(list_lon_interp) all_lat_interp = np.concatenate(list_lat_interp) return np.array(list(zip(all_lon_interp, all_lat_interp))) img = GeoImg(fn_hs) hs_tmp = hs_land.copy() hs_tmp_nl = hs_notland.copy() mask = out_of_poly_mask(img, polygon) hs_tmp[~mask] = 0 hs_tmp_nl[~mask] = 0 sub_ax.imshow(hs_tmp[:, :], extent=ext, transform=ccrs.Robinson(), cmap=cmap2, zorder=2, interpolation='nearest') sub_ax.imshow(hs_tmp_nl[:, :], extent=ext, transform=ccrs.Robinson(), cmap=cmap22, zorder=2, interpolation='nearest') if main: shape_feature = ShapelyFeature(Reader(list_shp).geometries(), ccrs.PlateCarree(), alpha=1, facecolor='indigo', linewidth=0.35, edgecolor='indigo') sub_ax.add_feature(shape_feature) if bounds is not None: verts = mpath.Path(latlon_extent_to_robinson_axes_verts(polygon)) sub_ax.set_boundary(verts, transform=sub_ax.transAxes) if not main: for i in range(len(tiles)): lat, lon = SRTMGL1_naming_to_latlon(tiles[i]) if group_by_spec: lat, lon = latlon_to_spec_center(lat, lon) else: lat = lat + 0.5 lon = lon + 0.5 if label == 'Arctic West' and ((lat < 71 and lon > 60) or (lat < 76 and lon > 100)): continue if label == 'HMA' and lat >= 46: continue # fac = 0.02 fac = 1000 if areas[i] > 10: rad = 15000 + np.sqrt(areas[i]) * fac else: rad = 15000 + 10 * fac # cmin = -1 # cmax = 1 col_bounds = np.array([ -1.5, -1.1, -0.8, -0.6, -0.4, -0.2, 0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6 ]) # col_bounds = np.array([-1, -0.7, -0.4, -0.2, -0.1, -0.05, 0.05, 0.1, 0.15, 0.2, 0.3, 0.5]) cb = [] cb_val = np.linspace(0, 1, len(col_bounds)) for j in range(len(cb_val)): cb.append(mpl.cm.RdYlBu(cb_val[j])) cmap_cus = mpl.colors.LinearSegmentedColormap.from_list( 'my_cb', list( zip((col_bounds - min(col_bounds)) / (max(col_bounds - min(col_bounds))), cb)), N=1000) if ~np.isnan(dhs[i]) and areas[i] > 0.2 and errs[ i] < 0.5: #and ((areas[i]>=5.) or label in ['Mexico','Indonesia','Africa']): dhdt = dhs[i] dhdt_col = max( 0.0001, min(0.9999, (dhdt - min(col_bounds)) / (max(col_bounds) - min(col_bounds)))) # ind = max(0, min(int((dhs[i]/20. - cmin) / (cmax - cmin) * 100), 99)) # if dhs[i]>=0: # ind = max(0, min(int((np.sqrt(dhs[i]/20.) - cmin) / (cmax - cmin) * 100), 99)) # else: # ind = max(0, min(int((-np.sqrt(-dhs[i]/20.) - cmin) / (cmax - cmin) * 100), 99)) col = cmap_cus(dhdt_col) # elif areas[i] <= 5: # continue elif areas[i] > 0.2: col = plt.cm.Greys(0.7) # col = 'black' # xy = [lon,lat] xy = coordXform(ccrs.PlateCarree(), ccrs.Robinson(), np.array([lon]), np.array([lat]))[0][0:2] sub_ax.add_patch( mpatches.Circle(xy=xy, radius=rad, facecolor=col, edgecolor='None', alpha=1, transform=ccrs.Robinson(), zorder=30)) # sub_ax.add_patch( # mpatches.Circle(xy=xy, radius=rad, facecolor='None', edgecolor='dimgrey', alpha=1, transform=ccrs.Robinson(), zorder=30)) if markup is not None: if markpos == 'left': lon_upleft = np.min(list(zip(*polygon))[0]) lat_upleft = np.max(list(zip(*polygon))[1]) else: lon_upleft = np.max(list(zip(*polygon))[0]) lat_upleft = np.max(list(zip(*polygon))[1]) robin = coordXform(ccrs.PlateCarree(), ccrs.Robinson(), np.array([lon_upleft]), np.array([lat_upleft])) rob_x = robin[0][0] rob_y = robin[0][1] size_y = 200000 size_x = 80000 * len(markup) + markadj if markpos == 'right': rob_x = rob_x - 50000 else: rob_x = rob_x + 50000 sub_ax_2 = fig.add_axes(position, projection=ccrs.Robinson(), label=label + 'markup') # sub_ax_2.add_patch(mpatches.Rectangle((rob_x, rob_y), size_x, size_y , linewidth=1, edgecolor='grey', facecolor='white',transform=ccrs.Robinson())) sub_ax_2.set_extent(extent, ccrs.Geodetic()) verts = mpath.Path(rect_units_to_verts([rob_x, rob_y, size_x, size_y])) sub_ax_2.set_boundary(verts, transform=sub_ax.transAxes) sub_ax_2.text(rob_x, rob_y + 50000, markup, horizontalalignment=markpos, verticalalignment='bottom', transform=ccrs.Robinson(), color='black', fontsize=4.5, fontweight='bold', bbox=dict(facecolor='white', alpha=1, linewidth=0.35, pad=1.5)) if markup_sub is not None: lon_min = np.min(list(zip(*polygon))[0]) lon_max = np.max(list(zip(*polygon))[0]) lon_mid = 0.5 * (lon_min + lon_max) lat_min = np.min(list(zip(*polygon))[1]) lat_max = np.max(list(zip(*polygon))[1]) lat_mid = 0.5 * (lat_min + lat_max) size_y = 150000 size_x = 150000 lat_midup = lat_min + 0.87 * (lat_max - lat_min) robin = coordXform( ccrs.PlateCarree(), ccrs.Robinson(), np.array([ lon_min, lon_min, lon_min, lon_mid, lon_mid, lon_max, lon_max, lon_max, lon_min ]), np.array([ lat_min, lat_mid, lat_max, lat_min, lat_max, lat_min, lat_mid, lat_max, lat_midup ])) if sub_pos == 'lb': rob_x = robin[0][0] rob_y = robin[0][1] ha = 'left' va = 'bottom' elif sub_pos == 'lm': rob_x = robin[1][0] rob_y = robin[1][1] ha = 'left' va = 'center' elif sub_pos == 'lm2': rob_x = robin[8][0] rob_y = robin[8][1] ha = 'left' va = 'center' elif sub_pos == 'lt': rob_x = robin[2][0] rob_y = robin[2][1] ha = 'left' va = 'top' elif sub_pos == 'mb': rob_x = robin[3][0] rob_y = robin[3][1] ha = 'center' va = 'bottom' elif sub_pos == 'mt': rob_x = robin[4][0] rob_y = robin[4][1] ha = 'center' va = 'top' elif sub_pos == 'rb': rob_x = robin[5][0] rob_y = robin[5][1] ha = 'right' va = 'bottom' elif sub_pos == 'rm': rob_x = robin[6][0] rob_y = robin[6][1] ha = 'right' va = 'center' elif sub_pos == 'rt': rob_x = robin[7][0] rob_y = robin[7][1] ha = 'right' va = 'top' if sub_pos[0] == 'r': rob_x = rob_x - 50000 elif sub_pos[0] == 'l': rob_x = rob_x + 50000 if sub_pos[1] == 'b': rob_y = rob_y + 50000 elif sub_pos[1] == 't': rob_y = rob_y - 50000 sub_ax_3 = fig.add_axes(position, projection=ccrs.Robinson(), label=label + 'markup2') # sub_ax_3.add_patch(mpatches.Rectangle((rob_x, rob_y), size_x, size_y , linewidth=1, edgecolor='grey', facecolor='white',transform=ccrs.Robinson())) sub_ax_3.set_extent(extent, ccrs.Geodetic()) verts = mpath.Path(rect_units_to_verts([rob_x, rob_y, size_x, size_y])) sub_ax_3.set_boundary(verts, transform=sub_ax.transAxes) sub_ax_3.text(rob_x, rob_y, markup_sub, horizontalalignment=ha, verticalalignment=va, transform=ccrs.Robinson(), color='black', fontsize=4.5, bbox=dict(facecolor='white', alpha=1, linewidth=0.35, pad=1.5), fontweight='bold', zorder=25) if not main: sub_ax.outline_patch.set_edgecolor('white') else: sub_ax.outline_patch.set_edgecolor('lightgrey')
def main(): parser = _argparser() args = parser.parse_args() if args.plot_curves: # set font stuff font = {'family': 'sans', 'weight': 'normal', 'size': 22} # legend_font = {'family': 'sans', # 'weight': 'normal', # 'size': '16'} matplotlib.rc('font', **font) # load base dem print('Loading DEM {}'.format(args.basedem)) basedem = GeoImg(args.basedem) print('DEM loaded.') # get glacier masks if args.glac_mask is None: print('Rasterizing glacier polygons to DEM extent.') master_mask, master_glacs = it.rasterize_polygons(basedem, args.glac_outlines, burn_handle='fid') master_mask[master_mask < 0] = np.nan else: print('Loading raster of glacier polygons {}'.format(args.glac_mask)) master_mask_geo = GeoImg(args.glac_mask) master_mask = master_mask_geo.img master_glacs = np.unique(master_mask[np.isfinite(master_mask)]) # master_mask = np.logical_and(master_mask, np.isfinite(basedem.img)) # get names gshp = gpd.read_file(args.glac_outlines) print('Glacier masks loaded.') # create output folder if it doesn't already exist os.system('mkdir -p {}'.format(args.out_folder)) # create folders to store glacier dH curve figures for g in gshp[args.namefield]: os.system('mkdir -p {}'.format(os.path.sep.join([args.out_folder, g]))) print('Getting glacier AADs.') # get aad aad_bins, aads = area_alt_dist(basedem, master_mask, glacier_inds=master_glacs) # initialize pd dataframes for dH_curves df_list = [pd.DataFrame(aad_bin, columns=['elevation']) for aad_bin in aad_bins] g_list = [str(gshp[args.namefield][gshp['fid'] == glac].values[0]) for glac in master_glacs] df_dict = dict(zip(g_list, df_list)) # turn aad_bins, aads into dicts with RGIId as keys bin_dict = dict(zip(g_list, aad_bins)) aad_dict = dict(zip(g_list, aads)) for i, df in enumerate(df_list): df['area'] = pd.Series(aads[i], index=df.index) # now that we have the AADs, make sure we preserve that distribution when we reproject. bin_widths = [np.diff(b)[0] for b in aad_bins] basedem.img[np.isnan(master_mask)] = np.nan # remove all elevations outside of the glacier mask for i, g in enumerate(master_glacs): basedem.img[master_mask == g] = np.floor(basedem.img[master_mask == g] / bin_widths[i]) * bin_widths[i] # get a list of all dH dH_list = glob('{}/*.tif'.format(args.dH_folder)) # initialize ur_dataframe ur_df = pd.DataFrame([os.path.basename(x) for x in dH_list], columns=['filename']) ur_df['dem1'] = [nice_split(x)[0] for x in ur_df['filename']] ur_df['dem2'] = [nice_split(x)[1] for x in ur_df['filename']] date1 = [parse_filename(x) for x in ur_df['dem1']] date2 = [parse_filename(x) for x in ur_df['dem2']] ur_df['date1'] = date1 ur_df['date2'] = date2 ur_df['delta_t'] = [(x - y).days / 365.2425 for x, y in list(zip(date1, date2))] ur_df['centerdate'] = [(y + dt.timedelta((x - y).days / 2)) for x, y in list(zip(date1, date2))] print('Found {} files in {}'.format(len(dH_list), args.dH_folder)) print('Getting dH curves.') for i, dHfile in enumerate(dH_list): dH = GeoImg(dHfile) print('{} ({}/{})'.format(dH.filename, i+1, len(dH_list))) if args.glac_mask is None: dh_mask, dh_glacs = it.rasterize_polygons(dH, args.glac_outlines, burn_handle='fid') else: tmp_dh_mask = master_mask_geo.reproject(dH, method=GRA_NearestNeighbour) dh_mask = tmp_dh_mask.img dh_glacs = np.unique(dh_mask[np.isfinite(dh_mask)]) tmp_basedem = basedem.reproject(dH, method=GRA_NearestNeighbour) deltat = ur_df.loc[i, 'delta_t'] this_fname = ur_df.loc[i, 'filename'] for i, glac in enumerate(dh_glacs): this_name = str(gshp[args.namefield][gshp['fid'] == glac].values[0]) this_dem = tmp_basedem.img[dh_mask == glac] this_ddem = dH.img[dh_mask == glac] this_ddem[np.abs(this_ddem) > args.outlier] = np.nan if np.count_nonzero(np.isfinite(this_ddem)) / this_ddem.size < 0.25: continue # these_bins = get_bins(this_dem, dh_mask) filtered_ddem = outlier_filter(bin_dict[this_name], this_dem, this_ddem) # _, odH_curve = get_dH_curve(this_dem, this_ddem, dh_mask, bins=aad_bins) _, fdH_curve, fbin_area = get_dH_curve(this_dem, filtered_ddem, dh_mask, bins=bin_dict[this_name]) _, fdH_median, _ = get_dH_curve(this_dem, filtered_ddem, dh_mask, bins=bin_dict[this_name], mode='median') fbin_area = 100 * fbin_area * np.abs(dH.dx) * np.abs(dH.dy) / aad_dict[this_name] if args.plot_curves: plot_dH_curve(this_ddem, this_dem, bin_dict[this_name], fdH_curve, fdH_median, fbin_area, dH.filename.strip('.tif')) plt.savefig(os.path.join(args.out_folder, this_name, dH.filename.strip('.tif') + '.png'), bbox_inches='tight', dpi=200) plt.close() # write dH curve in units of dH/dt (so divide by deltat) this_fname = this_fname.rsplit('.tif', 1)[0] df_dict[this_name][this_fname + '_mean'] = pd.Series(fdH_curve / deltat, index=df_dict[this_name].index) df_dict[this_name][this_fname + '_med'] = pd.Series(fdH_median / deltat, index=df_dict[this_name].index) df_dict[this_name][this_fname + '_pct'] = pd.Series(fbin_area, index=df_dict[this_name].index) print('Writing dH curves to {}'.format(args.out_folder)) # write all dH_curves for g in df_dict.keys(): print(g) df_dict[g].to_csv(os.path.sep.join([args.out_folder, '{}_dH_curves.csv'.format(g)]), index=False)
def add_inset(fig, extent, pos, bounds, label=None, polygon=None, anom=None, draw_cmap_y=None, hillshade=True, markup_sub=None, sub_pos=None, sub_adj=None): sub_ax = fig.add_axes(pos, projection=ccrs.Robinson(), label=label) sub_ax.set_extent(extent, ccrs.Geodetic()) sub_ax.add_feature( cfeature.NaturalEarthFeature('physical', 'ocean', '50m', facecolor='gainsboro')) sub_ax.add_feature( cfeature.NaturalEarthFeature('physical', 'land', '50m', facecolor='dimgrey')) # if anom is not None: # shape_feature = ShapelyFeature(Reader(shp_buff).geometries(), ccrs.PlateCarree(), edgecolor='black', alpha=0.5, # facecolor='black', linewidth=1) # sub_ax.add_feature(shape_feature) if polygon is None and bounds is not None: polygon = poly_from_extent(bounds) if bounds is not None: verts = mpath.Path(latlon_extent_to_robinson_axes_verts(polygon)) sub_ax.set_boundary(verts, transform=sub_ax.transAxes) if hillshade: def out_of_poly_mask(geoimg, poly_coords): poly = ot.poly_from_coords(inter_poly_coords(poly_coords)) srs = osr.SpatialReference() srs.ImportFromEPSG(54030) # put in a memory vector ds_shp = ot.create_mem_shp(poly, srs) return ot.geoimg_mask_on_feat_shp_ds(ds_shp, geoimg) def inter_poly_coords(polygon_coords): list_lat_interp = [] list_lon_interp = [] for i in range(len(polygon_coords) - 1): lon_interp = np.linspace(polygon_coords[i][0], polygon_coords[i + 1][0], 50) lat_interp = np.linspace(polygon_coords[i][1], polygon_coords[i + 1][1], 50) list_lon_interp.append(lon_interp) list_lat_interp.append(lat_interp) all_lon_interp = np.concatenate(list_lon_interp) all_lat_interp = np.concatenate(list_lat_interp) return np.array(list(zip(all_lon_interp, all_lat_interp))) img = GeoImg(fn_hs) hs_tmp = hs_land.copy() hs_tmp_nl = hs_notland.copy() mask = out_of_poly_mask(img, polygon) hs_tmp[~mask] = 0 hs_tmp_nl[~mask] = 0 sub_ax.imshow(hs_tmp[:, :], extent=ext, transform=ccrs.Robinson(), cmap=cmap2, zorder=2, interpolation='nearest') sub_ax.imshow(hs_tmp_nl[:, :], extent=ext, transform=ccrs.Robinson(), cmap=cmap22, zorder=2, interpolation='nearest') sub_ax.outline_patch.set_edgecolor('white') if anom is not None: if anom == 'dhs_1' or 'dhs_2' or 'dhs_3' or 'dhs_3': col_bounds = np.array([0, 1, 2, 3, 5, 7, 10, 15, 20]) cb = [] cb_val = np.linspace(0, 1, len(col_bounds)) for j in range(len(cb_val)): cb.append(mpl.cm.viridis(cb_val[j])) cmap_cus = mpl.colors.LinearSegmentedColormap.from_list( 'my_cb', list( zip((col_bounds - min(col_bounds)) / (max(col_bounds - min(col_bounds))), cb)), N=1000) if anom == 'dhs_1': vals = dhs_1 elif anom == 'dhs_2': vals = dhs_2 elif anom == 'dhs_3': vals = dhs_3 elif anom == 'dhs_4': vals = dhs_4 lab = 'Number of valid observations' # elif anom == 'dt': # col_bounds = np.array([-0.3, -0.15, 0, 0.3, 0.6]) # cb = [] # cb_val = np.linspace(0, 1, len(col_bounds)) # for j in range(len(cb_val)): # cb.append(mpl.cm.RdBu_r(cb_val[j])) # cmap_cus = mpl.colors.LinearSegmentedColormap.from_list('my_cb', list( # zip((col_bounds - min(col_bounds)) / (max(col_bounds - min(col_bounds))), cb)), N=1000) # # vals = dts # lab = 'Decadal difference in temperature (K)' # # elif anom == 'dp': # col_bounds = np.array([-0.2, -0.1, 0, 0.1, 0.2]) # cb = [] # cb_val = np.linspace(0, 1, len(col_bounds)) # for j in range(len(cb_val)): # cb.append(mpl.cm.BrBG(cb_val[j])) # cmap_cus = mpl.colors.LinearSegmentedColormap.from_list('my_cb', list( # zip((col_bounds - min(col_bounds)) / (max(col_bounds - min(col_bounds))), cb)), N=1000) # # vals = dps # lab = 'Decadal difference in precipitation (m)' # elif anom == 'du': # col_bounds = np.array([-1, -0.5, 0, 0.5, 1]) # cb = [] # cb_val = np.linspace(0, 1, len(col_bounds)) # for j in range(len(cb_val)): # cb.append(mpl.cm.RdBu_r(cb_val[j])) # cmap_cus = mpl.colors.LinearSegmentedColormap.from_list('my_cb', list( # zip((col_bounds - min(col_bounds)) / (max(col_bounds - min(col_bounds))), cb)), N=1000) # # vals = dus # lab = 'Wind speed anomaly (m s$^{-1}$)' # # elif anom == 'dz': # # col_bounds = np.array([-100, -50, 0, 50, 100]) # cb = [] # cb_val = np.linspace(0, 1, len(col_bounds)) # for j in range(len(cb_val)): # cb.append(mpl.cm.RdBu_r(cb_val[j])) # cmap_cus = mpl.colors.LinearSegmentedColormap.from_list('my_cb', list( # zip((col_bounds - min(col_bounds)) / (max(col_bounds - min(col_bounds))), cb)), N=1000) # # vals = dzs # lab = 'Geopotential height anomaly at 500 hPa (m)' # # elif anom =='dk': # # col_bounds = np.array([-200000, -100000, 0, 100000, 200000]) # cb = [] # cb_val = np.linspace(0, 1, len(col_bounds)) # for j in range(len(cb_val)): # cb.append(mpl.cm.RdBu_r(cb_val[j])) # cmap_cus = mpl.colors.LinearSegmentedColormap.from_list('my_cb', list( # zip((col_bounds - min(col_bounds)) / (max(col_bounds - min(col_bounds))), cb)), N=1000) # # vals = dks # lab = 'Net clear-sky downwelling SW surface radiation anomaly (J m$^{-2}$)' if draw_cmap_y is not None: sub_ax_2 = fig.add_axes([0.2, draw_cmap_y, 0.6, 0.05]) sub_ax_2.set_xticks([]) sub_ax_2.set_yticks([]) sub_ax_2.spines['top'].set_visible(False) sub_ax_2.spines['left'].set_visible(False) sub_ax_2.spines['right'].set_visible(False) sub_ax_2.spines['bottom'].set_visible(False) cbaxes = sub_ax_2.inset_axes([0, 0.85, 1, 0.2], label='legend_' + label) norm = mpl.colors.Normalize(vmin=min(col_bounds), vmax=max(col_bounds)) sm = plt.cm.ScalarMappable(cmap=cmap_cus, norm=norm) sm.set_array([]) cb = plt.colorbar(sm, cax=cbaxes, ticks=col_bounds, orientation='horizontal', extend='both', shrink=0.9) # cb.ax.tick_params(labelsize=12) cb.set_label(lab) for i in range(len(tiles)): lat, lon = SRTMGL1_naming_to_latlon(tiles[i]) if group_by_spec: lat, lon, s = latlon_to_spec_center(lat, lon) else: lat = lat + 0.5 lon = lon + 0.5 s = (1, 1) # fac = 0.02 fac = 7000000. if anom == 'dhs_1': errs = errs_1 elif anom == 'dhs_2': errs = errs_2 elif anom == 'dhs_3': errs = errs_3 elif anom == 'dhs_4': errs = errs_4 if np.isnan(errs[i]): continue #need to square because Rectangle already shows a surface f = np.sqrt(((1 / min(max(errs[i], 0.25), 1)**2 - 1 / 1**2) / (1 / 0.25**2 - 1 / 1**2))) * (1 - np.sqrt(0.1)) if ~np.isnan(vals[i]) and areas[i] > 0.2: val = vals[i] val_col = max( 0.0001, min(0.9999, (val - min(col_bounds)) / (max(col_bounds) - min(col_bounds)))) col = cmap_cus(val_col) elif areas[i] <= 5: continue else: col = plt.cm.Greys(0.7) # xy = [lon,lat] xy = coordXform(ccrs.PlateCarree(), ccrs.Robinson(), np.array([lon]), np.array([lat]))[0][0:2] # sub_ax.add_patch( # mpatches.Circle(xy=xy, radius=rad, color=col, alpha=1, transform=ccrs.Robinson(), zorder=30)) xl = np.sqrt(0.1) * s[0] + f * s[0] yl = np.sqrt(0.1) * s[1] + f * s[1] sub_ax.add_patch( mpatches.Rectangle((lon - xl / 2, lat - yl / 2), xl, yl, facecolor=col, alpha=1, transform=ccrs.PlateCarree(), zorder=30)) if markup_sub is not None and anom == 'dhs_1': lon_min = np.min(list(zip(*polygon))[0]) lon_max = np.max(list(zip(*polygon))[0]) lon_mid = 0.5 * (lon_min + lon_max) lat_min = np.min(list(zip(*polygon))[1]) lat_max = np.max(list(zip(*polygon))[1]) lat_mid = 0.5 * (lat_min + lat_max) robin = np.array( list( zip([ lon_min, lon_min, lon_min, lon_mid, lon_mid, lon_max, lon_max, lon_max ], [ lat_min, lat_mid, lat_max, lat_min, lat_max, lat_min, lat_mid, lat_max ]))) if sub_pos == 'lb': rob_x = robin[0][0] rob_y = robin[0][1] ha = 'left' va = 'bottom' elif sub_pos == 'lm': rob_x = robin[1][0] rob_y = robin[1][1] ha = 'left' va = 'center' elif sub_pos == 'lt': rob_x = robin[2][0] rob_y = robin[2][1] ha = 'left' va = 'top' elif sub_pos == 'mb': rob_x = robin[3][0] rob_y = robin[3][1] ha = 'center' va = 'bottom' elif sub_pos == 'mt': rob_x = robin[4][0] rob_y = robin[4][1] ha = 'center' va = 'top' elif sub_pos == 'rb': rob_x = robin[5][0] rob_y = robin[5][1] ha = 'right' va = 'bottom' elif sub_pos == 'rm': rob_x = robin[6][0] rob_y = robin[6][1] ha = 'right' va = 'center' elif sub_pos == 'rt': rob_x = robin[7][0] rob_y = robin[7][1] ha = 'right' va = 'top' if sub_pos[0] == 'r': rob_x = rob_x - 100000 elif sub_pos[0] == 'l': rob_x = rob_x + 100000 if sub_pos[1] == 'b': rob_y = rob_y + 100000 elif sub_pos[1] == 't': rob_y = rob_y - 100000 if sub_adj is not None: rob_x += sub_adj[0] rob_y += sub_adj[1] sub_ax.text(rob_x, rob_y, markup_sub, horizontalalignment=ha, verticalalignment=va, transform=ccrs.Robinson(), color='black', fontsize=4.5, bbox=dict(facecolor='white', alpha=1, linewidth=0.35, pad=1.5), fontweight='bold', zorder=25)
#"add option doesn't exist in Python bindings... can't seem to find a way to replicate it and keep the speed, so here I go: #count #"gdal_rasterize -l fig3_icesat_utm -add -burn 1 -ts 5656.0 5318.0 -init 0.0 -a_nodata -9999.0 -te 223478.0 6541348.0 789141.0 7073207.0 -ot Float32 -of GTiff fig3_icesat_utm.shp icesat_count.tif" #attr #"gdal_rasterize -l fig3_icesat_utm -add -a zsc -ts 5656.0 5318.0 -init 0.0 -a_nodata -9999.0 -te 223478.0 6541348.0 789141.0 7073207.0 -ot Float32 -of GTiff fig3_icesat_utm.shp icesat_sum.tif" #and same for icebridge #then fn_count_icesat = '/home/atom/ongoing/work_worldwide/figures/fig3/icesat_count_800m.tif' fn_sum_icesat = '/home/atom/ongoing/work_worldwide/figures/fig3/icesat_sum_800m.tif' count_ics = GeoImg(fn_count_icesat) sum_ics = GeoImg(fn_sum_icesat) nodata = count_ics.img == 0. mean_ics = np.zeros(np.shape(sum_ics.img)) * np.nan mean_ics[~nodata] = sum_ics.img[~nodata] / count_ics.img[~nodata] out = count_ics.copy() out.img = np.abs(mean_ics) out.write('/home/atom/ongoing/work_worldwide/figures/fig3/zsc_icesat_800m.tif') fn_count_ib = '/home/atom/ongoing/work_worldwide/figures/fig3/ib_count_300m.tif' fn_sum_ib = '/home/atom/ongoing/work_worldwide/figures/fig3/ib_sum_300m.tif' count_ics = GeoImg(fn_count_ib)
def reproj_stack(ds, utm_out, nice_latlon_tiling=False, write_ds=None, nproc=1): ds_out = ds.copy() tmp_img = make_geoimg(ds) res = tmp_img.dx if nice_latlon_tiling: tile_name = tilename_stack(ds) outputBounds = vt.niceextent_utm_latlontile(tile_name, utm_out, res) else: outputBounds = None dest = gdal.Warp('', tmp_img.gd, format='MEM', dstSRS='EPSG:{}'.format(vt.epsg_from_utm(utm_out)), xRes=res, yRes=res, outputBounds=outputBounds, resampleAlg=gdal.GRA_Bilinear) first_img = GeoImg(dest) if first_img.is_area(): first_img.to_point() x, y = first_img.xy(grid=False) ds_out = ds_out.drop(('z', 'z_ci', 'crs')) ds_out = ds_out.drop_dims(('x', 'y')) ds_out = ds_out.expand_dims(dim={'y': y, 'x': x}) ds_out.x.attrs = ds.x.attrs ds_out.y.attrs = ds.y.attrs if nproc == 1: for i in range(ds.time.size): new_z = np.zeros((ds.time.size, len(y), len(x)), dtype=np.float32) new_z_ci = np.zeros((ds.time.size, len(y), len(x)), dtype=np.float32) tmp_z = make_geoimg(ds, i, var='z') tmp_z_ci = make_geoimg(ds, i, var='z_ci') new_z[i, :] = tmp_z.reproject(first_img).img new_z_ci[i, :] = tmp_z_ci.reproject(first_img).img else: arr_z = ds.z.values arr_z_ci = ds.z_ci.values in_met = (tmp_img.gt, tmp_img.proj_wkt, tmp_img.npix_x, tmp_img.npix_y) out_met = (res, outputBounds, utm_out) argsin_z = [(arr_z[i, :], in_met, out_met) for i in range(ds.time.size)] argsin_z_ci = [(arr_z_ci[i, :], in_met, out_met) for i in range(ds.time.size)] pool = mp.Pool(nproc, maxtasksperchild=1) outputs_z = pool.map(wrapper_reproj, argsin_z) outputs_z_ci = pool.map(wrapper_reproj, argsin_z_ci) pool.close() pool.join() new_z = np.stack(outputs_z, axis=0) new_z_ci = np.stack(outputs_z_ci, axis=0) if nice_latlon_tiling: mask = vt.latlontile_nodatamask(first_img, tile_name) new_z[:, ~mask] = np.nan new_z_ci[:, ~mask] = np.nan ds_out['z'] = (['time', 'y', 'x'], new_z) ds_out['z_ci'] = (['time', 'y', 'x'], new_z_ci) ds_out['crs'] = ds['crs'] ds_out.z.attrs = ds.z.attrs ds_out.z_ci.attrs = ds.z_ci.attrs ds_out.crs.attrs = create_crs_variable(epsg=vt.epsg_from_utm(utm_out)) if write_ds is not None: ds_out.to_netcdf(write_ds) return ds_out
from scipy.ndimage.filters import generic_filter from pybob.GeoImg import GeoImg from pybob import image_tools as it from pybob import ddem_tools as dt def neighborhood_filter(img, radius): @jit_filter_function def nanmean(a): return np.nanmean(a) return generic_filter(img, nanmean, footprint=disk(radius)) # load the full mask, which we'll re-project later mask_full = GeoImg('../southeast_average_corr.tif') for yr in [2012, 2013]: print("Loading {} data files.".format(yr)) ifsar = GeoImg('{}/seak.ifsar.{}.dem.30m_adj.tif'.format(yr, yr)) ifsar_srtm = GeoImg('{}/ifsar_srtm_{}_dh.tif'.format(yr, yr)) srtm = GeoImg('{}/SRTM_SE_Alaska_30m_{}IfSAR_adj.tif'.format(yr, yr)) valid_area = np.isfinite(ifsar.img) glac_shp = '../outlines/01_rgi60_Alaska_GlacierBay_02km_UTM_{}.shp'.format(yr) glacier_mask = it.create_mask_from_shapefile(ifsar, glac_shp) mask_geo = mask_full.reproject(ifsar_srtm) corrs = [35, 50, 70, 80, 90, 95]
def create_mmaster_stack(filelist, extent=None, res=None, epsg=None, outfile='mmaster_stack.nc', clobber=False, uncert=False, coreg=False, ref_tiles=None, exc_mask=None, inc_mask=None, outdir='tmp', filt_dem=None, add_ref=False, add_corr=False, latlontile_nodata=None, filt_mm_corr=False, l1a_zipped=False, y0=1900, tmptag=None): """ Given a list of DEM files, create a stacked NetCDF file. :param filelist: List of DEM filenames to stack. :param extent: Spatial extent of DEMs to limit stack to [xmin, xmax, ymin, ymax]. :param res: Output spatial resolution of DEMs. :param epsg: EPSG code of output CRS. :param outfile: Filename for output NetCDF file. :param clobber: clobber existing dataset when creating NetCDF file. :param uncert: Include uncertainty variable in the output NetCDF. :param coreg: Co-register DEMs to an input DEM (given by a shapefile of tiles). :param ref_tiles: Filename of input reference DEM tiles. :param exc_mask: Filename of exclusion mask (i.e., glaciers) to use in co-registration :param inc_mask: Filename of inclusion mask (i.e., land) to use in co-registration. :param outdir: Output directory for temporary files. :param filt_dem: Filename of DEM to filter elevation differences to. :param add_ref: Add reference DEM as a stack variable :param add_corr: Add correlation masks as a stack variable :param latlontile_nodata: Apply nodata for a lat/lon tile footprint to avoid overlapping and simplify xarray merging :param filt_mm_corr: Filter MMASTER DEM with correlation mask out of mmaster_tools when stacking (disk space), :param l1a_zipped: Use if files have been zipped to save on space. :param y0: Year 0 to reference NetCDF time variable to. :param tmptag: string to append to temporary files. :type filelist: array-like :type extent: array-like :type res: float :type epsg: int :type outfile: str :type clobber: bool :type uncert: bool :type coreg: bool :type ref_tiles: str :type exc_mask: str :type inc_mask: str :type outdir: str :type filt_dem: str :type add_ref: bool :type add_corr: bool :type latlontile_nodata: str :type filt_mm_corr: bool :type l1a_zipped: bool :type y0: float :type tmptag: str :returns nco: NetCDF Dataset of stacked DEMs. """ if extent is not None: if type(extent) in [list, tuple]: xmin, xmax, ymin, ymax = extent elif type(extent) is Polygon: x, y = extent.boundary.coords.xy xmin, xmax = min(x), max(x) ymin, ymax = min(y), max(y) else: raise ValueError( 'extent should be a list, tuple, or shapely.Polygon') else: xmin, xmax, ymin, ymax = get_common_bbox(filelist, epsg) print('Searching for intersecting DEMs among the list of ' + str(len(filelist)) + '...') # check if each footprint falls within our given extent, and if not - remove from the list. if l1a_zipped: # if l1a are zipped, too long to extract archives and read extent from rasters ; so read metadata instead l1a_filelist = [ fn for fn in filelist if os.path.basename(fn)[0:3] == 'AST' ] rest_filelist = [fn for fn in filelist if fn not in l1a_filelist] l1a_inters = get_footprints_inters_ext(l1a_filelist, [xmin, ymin, xmax, ymax], epsg, use_l1a_met=True) rest_inters = get_footprints_inters_ext(rest_filelist, [xmin, ymin, xmax, ymax], epsg) filelist = l1a_inters + rest_inters else: filelist = get_footprints_inters_ext(filelist, [xmin, ymin, xmax, ymax], epsg) print('Found ' + str(len(filelist)) + '.') if len(filelist) == 0: print('Found no DEMs intersecting extent to stack. Skipping...') sys.exit() datelist = np.array([parse_date(f) for f in filelist]) sorted_inds = np.argsort(datelist) print(filelist[sorted_inds[0]]) if l1a_zipped and os.path.basename(filelist[sorted_inds[0]])[0:3] == 'AST': tmp_zip = filelist[sorted_inds[0]] z_name = '_'.join( os.path.basename(tmp_zip).split('_')[0:3]) + '_Z_adj_XAJ_final.tif' if tmptag is None: fn_tmp = os.path.join(os.path.dirname(tmp_zip), 'tmp_out.tif') else: fn_tmp = os.path.join(os.path.dirname(tmp_zip), 'tmp_out_' + tmptag + '.tif') mt.extract_file_from_zip(tmp_zip, z_name, fn_tmp) tmp_img = GeoImg(fn_tmp) else: tmp_img = GeoImg(filelist[sorted_inds[0]]) if res is None: res = np.round( tmp_img.dx) # make sure that we have a nice resolution for gdal if epsg is None: epsg = tmp_img.epsg # now, reproject the first image to the extent, resolution, and coordinate system needed. dest = gdal.Warp('', tmp_img.gd, format='MEM', dstSRS='EPSG:{}'.format(epsg), xRes=res, yRes=res, outputBounds=(xmin, ymin, xmax, ymax), resampleAlg=gdal.GRA_Bilinear) if l1a_zipped and os.path.basename(filelist[sorted_inds[0]])[0:3] == 'AST': os.remove(fn_tmp) first_img = GeoImg(dest) first_img.filename = filelist[sorted_inds[0]] # NetCDF assumes that coordinates are the cell center if first_img.is_area(): first_img.to_point() # first_img.info() nco, to, xo, yo = create_nc(first_img.img, outfile=outfile, clobber=clobber, t0=np.datetime64('{}-01-01'.format(y0))) create_crs_variable(first_img.epsg, nco) # crso.GeoTransform = ' '.join([str(i) for i in first_img.gd.GetGeoTransform()]) # maxchar = max([len(f.rsplit('.tif', 1)[0]) for f in args.filelist]) go = nco.createVariable('dem_names', str, ('time', )) go.long_name = 'Source DEM Filename' zo = nco.createVariable('z', 'f4', ('time', 'y', 'x'), fill_value=-9999, zlib=True, chunksizes=[ 500, min(150, first_img.npix_y), min(150, first_img.npix_x) ]) zo.units = 'meters' zo.long_name = 'Height above WGS84 ellipsoid' zo.grid_mapping = 'crs' zo.coordinates = 'x y' zo.set_auto_mask(True) if ref_tiles is not None: if ref_tiles.endswith('.shp'): master_tiles = gpd.read_file(ref_tiles) s = STRtree([f for f in master_tiles['geometry'].values]) bounds = Polygon([(xmin, ymin), (xmax, ymin), (xmax, ymax), (xmin, ymax)]) ref_vrt = get_tiles(bounds, master_tiles, s, outdir) elif ref_tiles.endswith('.vrt') or ref_tiles.endswith('.tif'): ref_vrt = ref_tiles ref = GeoImg(ref_vrt) if filt_dem is not None: filt_dem_img = GeoImg(filt_dem) filt_dem = filt_dem_img.reproject(first_img) # 3 overlapping pixels on each side of the tile in case reprojection is necessary; will be removed when merging if latlontile_nodata is not None and epsg is not None: mask = binary_dilation(vt.latlontile_nodatamask( first_img, latlontile_nodata), iterations=3) if uncert: uo = nco.createVariable('uncert', 'f4', ('time', )) uo.long_name = 'RMSE of stable terrain differences.' uo.units = 'meters' if add_ref and ref_tiles is not None: ro = nco.createVariable('ref_z', 'f4', ('y', 'x'), fill_value=-9999, chunksizes=[ min(150, first_img.npix_y), min(150, first_img.npix_x) ]) ro.units = 'meters' ro.long_name = 'Height above WGS84 ellipsoid' ro.grid_mapping = 'crs' ro.coordinates = 'x y' ro.set_auto_mask(True) ref_img = ref.reproject(first_img).img if latlontile_nodata is not None and epsg is not None: ref_img[~mask] = np.nan ro[:, :] = ref_img if add_corr: co = nco.createVariable('corr', 'i1', ('time', 'y', 'x'), fill_value=-1, zlib=True, chunksizes=[ 500, min(150, first_img.npix_y), min(150, first_img.npix_x) ]) co.units = 'percent' co.long_name = 'MMASTER correlation' co.grid_mapping = 'crs' co.coordinates = 'x y' co.set_auto_mask(True) x, y = first_img.xy(grid=False) xo[:] = x yo[:] = y # trying something else to speed up writting in compressed chunks list_img, list_corr, list_uncert, list_dt, list_name = ([] for i in range(5)) outind = 0 for ind in sorted_inds[0:]: print(filelist[ind]) # get instrument bname = os.path.splitext(os.path.basename(filelist[ind]))[0] splitname = bname.split('_') instru = splitname[0] # special case for MMASTER outputs (for disk usage) if instru == 'AST': fn_z = '_'.join(splitname[0:3]) + '_Z_adj_XAJ_final.tif' fn_corr = '_'.join(splitname[0:3]) + '_CORR_adj_final.tif' # to avoid running into issues in parallel if tmptag is None: fn_z_tmp = os.path.join(os.path.dirname(filelist[ind]), fn_z) fn_corr_tmp = os.path.join(os.path.dirname(filelist[ind]), fn_corr) else: fn_z_tmp = os.path.join( os.path.dirname(filelist[ind]), os.path.splitext(fn_z)[0] + '_' + tmptag + '.tif') fn_corr_tmp = os.path.join( os.path.dirname(filelist[ind]), os.path.splitext(fn_corr)[0] + '_' + tmptag + '.tif') list_fn_rm = [fn_z_tmp, fn_corr_tmp] # unzip if needed if l1a_zipped: mt.extract_file_from_zip(filelist[ind], fn_z, fn_z_tmp) if filt_mm_corr or add_corr: mt.extract_file_from_zip(filelist[ind], fn_corr, fn_corr_tmp) # open dem, filter with correlation mask if it comes out of MMASTER if filt_mm_corr: img = corr_filter_aster(fn_z_tmp, fn_corr_tmp, 70) else: img = GeoImg(fn_z_tmp) else: img = GeoImg(filelist[ind]) if img.is_area(): # netCDF assumes coordinates are the cell center img.to_point() if add_corr: if instru == 'AST': corr = GeoImg(fn_corr_tmp) if corr.is_area(): corr.to_point() if coreg: try: NDV = img.NDV coreg_outdir = os.path.join( outdir, os.path.basename(filelist[ind]).rsplit('.tif', 1)[0]) _, img, _, stats_final = dem_coregistration( ref, img, glaciermask=exc_mask, landmask=inc_mask, outdir=coreg_outdir, inmem=True) dest = gdal.Warp('', img.gd, format='MEM', dstSRS='EPSG:{}'.format(epsg), xRes=res, yRes=res, outputBounds=(xmin, ymin, xmax, ymax), resampleAlg=gdal.GRA_Bilinear, srcNodata=NDV, dstNodata=-9999) img = GeoImg(dest) if add_corr: if instru == 'AST': corr = corr.reproject(img) else: corr = img.copy() corr.img[:] = 100 co[outind, :, :] = corr.img.astype(np.int8) if filt_dem is not None: valid = np.logical_and(img.img - filt_dem.img > -400, img.img - filt_dem.img < 1000) img.img[~valid] = np.nan if latlontile_nodata is not None and epsg is not None: img.img[~mask] = np.nan if add_corr: corr.img[~mask] = -1 nvalid = np.count_nonzero(~np.isnan(img.img)) if nvalid == 0: print('No valid pixel in the stack extent: skipping...') if l1a_zipped and (instru == 'AST'): for fn_rm in list_fn_rm: if os.path.exists(fn_rm): os.remove(fn_rm) continue zo[outind, :, :] = img.img if uncert: uo[outind] = stats_final[3] print('Adding DEM that has ' + str(nvalid) + ' valid pixels in this extent, with a global RMSE of ' + str(stats_final[3])) except: print('Coregistration failed: skipping...') if l1a_zipped and (instru == 'AST'): for fn_rm in list_fn_rm: if os.path.exists(fn_rm): os.remove(fn_rm) continue else: img = img.reproject(first_img) if add_corr: if instru == 'AST': corr = corr.reproject(first_img) else: corr = img.copy() corr.img[:] = 100 # co[outind, :, :] = corr.img.astype(np.int8) if filt_dem is not None: valid = np.logical_and(img.img - filt_dem.img > -400, img.img - filt_dem.img < 1000) img.img[~valid] = np.nan if latlontile_nodata is not None and epsg is not None: img.img[~mask] = np.nan if add_corr: corr.img[~mask] = -1 nvalid = np.count_nonzero(~np.isnan(img.img)) if nvalid == 0: print('No valid pixel in the stack extent: skipping...') if l1a_zipped and (instru == 'AST'): for fn_rm in list_fn_rm: if os.path.exists(fn_rm): os.remove(fn_rm) continue # zo[outind, :, :] = img.img if uncert: try: stats = read_stats(os.path.dirname(filelist[ind])) except: stats = None # uo[outind] = stats['RMSE'] # to[outind] = datelist[ind].toordinal() - dt.date(y0, 1, 1).toordinal() # go[outind] = os.path.basename(filelist[ind]).rsplit('.tif', 1)[0] if stats is None: list_uncert.append(5.) else: try: list_uncert.append(stats['RMSE']) except KeyError: print('KeyError for RMSE here:' + filelist[ind]) continue list_img.append(img.img) list_corr.append(corr.img.astype(np.int8)) list_dt.append(datelist[ind].toordinal() - dt.date(y0, 1, 1).toordinal()) list_name.append(os.path.basename(filelist[ind]).rsplit('.tif', 1)[0]) outind += 1 if l1a_zipped and (instru == 'AST'): for fn_rm in list_fn_rm: if os.path.exists(fn_rm): os.remove(fn_rm) # then write all at once zo[0:outind, :, :] = np.stack(list_img, axis=0) co[0:outind, :, :] = np.stack(list_corr, axis=0) uo[0:outind] = np.array(list_uncert) to[0:outind] = np.array(list_dt) go[0:outind] = np.array(list_name) return nco
'/home/atom/data/other/Hugonnet_2020/Matthias_2000_2020/DEMs_periods/final/dhdt_gor_AT_2015-08-26_AT_2007-09-13.tif' ] for fn_dhdt in list_fn_dhdt: print('Working on dDEM: ' + fn_dhdt) list_rgiid_valid = list_valid_feat_intersect(fn_dhdt, fn_shp, 'RGIId', 70.) if len(list_rgiid_valid) > 0: print('Found ' + str(len(list_rgiid_valid)) + ' valid outlines intersecting') dhdt = GeoImg(fn_dhdt) split_fn = os.path.splitext( os.path.basename(fn_dhdt))[0].split('_') sens_early = split_fn[-2] sens_late = split_fn[-4] date_early = split_fn[-1] date_late = split_fn[-3] site = split_fn[1] for rgiid_valid in list_rgiid_valid: print('Working on ' + rgiid_valid) dhdt.img[np.abs(dhdt.img) > 15] = np.nan
def get_geophys_var_hypso(fn_ddem, fn_dem, fn_shp, out_dir, path_to_r_geophys): pp = PdfPages(os.path.join(out_dir, 'hypsometric_fit_results.pdf')) ddem = read_nanarray(fn_ddem) ddem[np.absolute(ddem) > 60] = np.nan # ddem = ddem*12. dem = read_nanarray(fn_dem) mask = rasterize_shp(fn_shp, fn_dem) gsd = pixel_size(fn_ddem) fn_proxi = os.path.join(out_dir, 'proxi.tif') proxi = proximity_shp(fn_shp, fn_ddem, type_prox='interior') #first get residuals of poly fit res, res_stdized, elev, med, std, nmad, area_tot, area_meas, prox = ddem_med_hypso( ddem, dem, mask, gsd, pp=pp, proxi=proxi, get_elev_residual=True) plt.close('all') fn_mask = os.path.join(out_dir, 'mask.tif') write_nanarray(fn_mask, fn_ddem, mask) fn_res = os.path.join(out_dir, 'residual.tif') fn_res_stdized = os.path.join(out_dir, 'residual_standardized.tif') write_nanarray(fn_res, fn_ddem, res) write_nanarray(fn_res_stdized, fn_ddem, res_stdized) mask_geo = GeoImg(fn_mask) res_geo = GeoImg(fn_res) res_stdized_geo = GeoImg(fn_res_stdized) ddem_geo = GeoImg(fn_ddem) dem_geo = GeoImg(fn_dem) # res_geo.img[np.invert(mask)] = np.nan extent = extent_shp_ref(fn_shp, fn_dem) crop_res = res_geo.crop_to_extent( [extent[0], extent[2], extent[1], extent[3]]) crop_res_stdized = res_stdized_geo.crop_to_extent( [extent[0], extent[2], extent[1], extent[3]]) crop_ddem = ddem_geo.crop_to_extent( [extent[0], extent[2], extent[1], extent[3]]) crop_mask = mask_geo.crop_to_extent( [extent[0], extent[2], extent[1], extent[3]]) crop_dem = dem_geo.crop_to_extent( [extent[0], extent[2], extent[1], extent[3]]) fn_crop_res_stdized = os.path.join(out_dir, 'res_stdized_cropped.tif') fn_crop_mask = os.path.join(out_dir, 'mask_cropped.tif') fn_crop_dem = os.path.join(out_dir, 'dem_cropped.tif') crop_res_stdized.img[crop_mask.img != 1] = np.nan crop_res_stdized.write(fn_crop_res_stdized) crop_mask.write(fn_crop_mask) crop_dem.write(fn_crop_dem) crop_res.img[crop_mask.img != 1] = np.nan crop_res_stdized.img[crop_mask.img != 1] = np.nan # crop_ddem.img = 12*crop_ddem.img clim_ddem_raw = np.nanmax(np.absolute(med)) outline_gla = gpd.read_file(fn_shp) fig, _ = plot_polygon_df(outline_gla, edgecolor='k', lw=2, alpha=0.5) plt.title('Outline') pp.savefig(fig, dpi=300) fig = plot_ddem_results(crop_ddem, clim=(-clim_ddem_raw, clim_ddem_raw), colormap='Spectral')[0] plt.title('Elevation change [m] (Large scale)') pp.savefig(fig, dpi=300) fig = plot_ddem_results(crop_ddem, clim=(-3, 3), colormap='Spectral')[0] plt.title('Elevation change [m] (Thin scale)') pp.savefig(fig, dpi=300) clim_res = np.nanmean(np.absolute(nmad)) fig = plot_ddem_results(crop_res, clim=(-clim_res, clim_res), colormap='Spectral')[0] plt.title( 'Hypsometric residual of elevation change [m] \n (Elevation change minus hypsometric median)' ) pp.savefig(fig, dpi=300) fig = plot_ddem_results(crop_res_stdized, clim=(-1, 1), colormap='Spectral')[0] plt.title( 'Standardized hypsometric residual of elevation change [no unit] \n (Elevation change minus hypsometric median divided by hypsometric nmad)' ) pp.savefig(fig, dpi=300) pp.close() plt.close('all') os.remove(fn_res) os.remove(fn_mask) os.remove(fn_res_stdized) #normalize elevation max_elev = np.nanmax(elev) min_elev = np.nanmin(elev) elev_n = (elev - min_elev) / (max_elev - min_elev) #normalize dh max_dh = np.nanmax(med) min_dh = np.nanmin(med) accu_elev = min_elev + 80 * (max_elev - min_elev) / 100 tmp_max_dh = np.nanmean( med[elev > accu_elev]) #use mean of accumulation instead of max if np.abs((np.nanmax(med) - tmp_max_dh) / (max_dh - min_dh)) < 0.3: max_dh = tmp_max_dh med_n = (min_dh - med) / (max_dh - min_dh) std_n = std / (max_dh - min_dh) nmad_n = nmad / (max_dh - min_dh) #write normalized data elev_rs = np.arange(0, 1, 0.01) med_rs = np.interp(elev_rs, elev_n, med_n) std_rs = np.interp(elev_rs, elev_n, std_n) nmad_rs = np.interp(elev_rs, elev_n, nmad_n) area_rs = np.interp(elev_rs, elev_n, area_tot) df = pd.DataFrame() df = df.assign(norm_elev=elev_rs, norm_med_dh=med_rs, norm_std_dh=std_rs, norm_nmad_rs=nmad_rs, area_rs=area_rs) df_raw = pd.DataFrame() df_raw = df_raw.assign(elev=elev, med_dh=med, std_dh=std, nmad_dh=nmad, area_tot=area_tot, area_meas=area_meas, prox=prox) df.to_csv(os.path.join(out_dir, 'df_norm_dh_elev.csv')) df_raw.to_csv(os.path.join(out_dir, 'df_raw_dh_elev.csv')) ddem = dem = mask = res = res_stdized = crop_mask = crop_res_stdized = crop_res = crop_ddem = crop_dem = ddem_geo = dem_geo = res_geo = res_stdized_geo = None #get variogram with moving elevation window from R # cmd = 'Rscript '+path_to_r_geophys+' -d '+fn_crop_dem+' -r '+fn_crop_res_stdized+' -m '+fn_crop_mask+' -v Exp -o '+out_dir # fn_log = os.path.join(out_dir,'r_geophys.log') # log=open(fn_log,'w') # p=Popen(cmd,stdout=log,stderr=log,shell=True) # p.wait() # log.close() os.remove(fn_crop_dem) os.remove(fn_crop_res_stdized) os.remove(fn_crop_mask)
def reshape_geoimg(fname, xr, yr, rescale=True): ds = gdal.Warp('', fname, xRes=xr, yRes=yr, format='VRT', resampleAlg=gdal.GRA_Lanczos) resamp = GeoImg(ds) if rescale: resamp.img = (resamp.img / 256).astype(np.uint8) return resamp
rect_u[1]], [rect_u[0] + rect_u[2], rect_u[1]], [rect_u[0] + rect_u[2], rect_u[1] + rect_u[3]], [rect_u[0], rect_u[1] + rect_u[3]], [rect_u[0], rect_u[1]]]) def cerc_units_to_verts(cerc_u): xy, rad = cerc_u theta = np.linspace(0, 2 * np.pi, 100) verts = np.vstack([np.sin(theta), np.cos(theta)]).T return verts * rad + xy img = GeoImg(fn_hs) land_mask = create_mask_from_shapefile(img, fn_land) ds = gdal.Open(fn_hs) hs = ds.ReadAsArray() hs = hs.astype(float) def stretch_hs(hs, stretch_factor=1.): max_hs = 255 min_hs = 0 hs_s = (hs - (max_hs - min_hs) / 2) * stretch_factor + (max_hs - min_hs) / 2 return hs_s
res = 15 list_las = glob(os.path.join(ilaks1b_dir, '*.las'), recursive=True) for las in list_las: print('Working on ' + las) xmldoc = minidom.parse(las + '.xml') itemlist = xmldoc.getElementsByTagName('RangeBeginningDate') date = itemlist[0].childNodes[0].nodeValue fn_out = os.path.join( output_dir, 'ILAKS1B_' + ''.join(date.split('-')) + '_' + '_'.join(os.path.splitext(os.path.basename(las))[0].split('_')[2:])) if not os.path.exists(fn_out + '-DEM.tif'): os.system( os.path.join(asp_path, 'point2dem') + ' --dem-spacing ' + str(res) + ' -o ' + fn_out + ' ' + las) else: img = GeoImg(fn_out + '-DEM.tif') #try again, something failed if img.npix_x < 5: print('Wrong output raster; reprocessing...') os.remove(fn_out + '-DEM.tif') os.system( os.path.join(asp_path, 'point2dem') + ' --dem-spacing ' + str(res) + ' -o ' + fn_out + ' ' + las) else: print('Already processed.')