def plot_avgcoherence(self): ''' Generate average coherence raster. ''' # Import functions from ARIAtools.vrtmanager import renderVRT import glob outname = os.path.join(self.workdir, 'avgcoherence{}'.format(self.mask_ext)) #Delete existing average coherence file for i in glob.glob(os.path.join(self.workdir, 'avgcoherence*')): os.remove(i) # Iterate through all IFGs for i, j in enumerate(self.product_dict[0]): coh_file = gdal.Warp('', j, options=gdal.WarpOptions( format="MEM", cutlineDSName=self.prods_TOTbbox, outputBounds=self.bbox_file)) coh_file_arr = np.ma.masked_where( coh_file.ReadAsArray() == coh_file.GetRasterBand( 1).GetNoDataValue(), coh_file.ReadAsArray()) # Apply mask (if specified). if self.mask is not None: coh_file_arr = np.ma.masked_where(self.mask == 0.0, coh_file_arr) # Iteratively update average coherence file # If looping through first coherence file, nothing to sum so just save to file if os.path.exists(outname): coh_file = gdal.Open(outname, gdal.GA_Update) coh_file = coh_file.GetRasterBand(1).WriteArray( coh_file_arr + coh_file.ReadAsArray()) else: renderVRT(outname, coh_file_arr, geotrans=coh_file.GetGeoTransform(), drivername=self.outputFormat, gdal_fmt=coh_file_arr.dtype.name, proj=coh_file.GetProjection(), nodata=coh_file.GetRasterBand(1).GetNoDataValue()) coh_file = coh_val = coh_file_arr = None # Take average of coherence sum coh_file = gdal.Open(outname, gdal.GA_Update) coh_file = coh_file.GetRasterBand(1).WriteArray( coh_file.ReadAsArray() / len(self.product_dict[0])) coh_file = None return
def finalize_metadata(outname, bbox_bounds, prods_TOTbbox, dem, lat, lon, mask=None, outputFormat='ENVI', verbose=None): ''' 2D metadata layer is derived by interpolating and then intersecting 3D layers with a DEM. Lat/lon arrays must also be passed for this process. ''' # import dependencies import scipy # Import functions from ARIAtools.vrtmanager import renderVRT # File must be physically extracted, cannot proceed with VRT format. Defaulting to ENVI format. if outputFormat == 'VRT': outputFormat = 'ENVI' # Check and buffer bounds if <4pix in x/y, which would raise interpolation error geotrans = gdal.Open(outname + '.vrt').GetGeoTransform() if round((max(bbox_bounds[0::2]) - min(bbox_bounds[0::2])), 2) < round( (abs(geotrans[1]) * 4), 2) or round( (max(bbox_bounds[1::2]) - min(bbox_bounds[1::2])), 2) < round( (abs(geotrans[-1]) * 4), 2): data_array = gdal.Warp('', outname + '.vrt', options=gdal.WarpOptions(format="MEM")) else: data_array = gdal.Warp('', outname + '.vrt', options=gdal.WarpOptions( format="MEM", outputBounds=bbox_bounds)) # Define lat/lon/height arrays for metadata layers heightsMeta = np.array(gdal.Open(outname + '.vrt').GetMetadataItem( 'NETCDF_DIM_heightsMeta_VALUES')[1:-1].split(','), dtype='float32') ##SS Do we need lon lat if we would be doing gdal reproject using projection and transformation? See our earlier discussions. latitudeMeta = np.linspace( data_array.GetGeoTransform()[3], data_array.GetGeoTransform()[3] + (data_array.GetGeoTransform()[5] * data_array.RasterYSize), data_array.RasterYSize) longitudeMeta = np.linspace( data_array.GetGeoTransform()[0], data_array.GetGeoTransform()[0] + (data_array.GetGeoTransform()[1] * data_array.RasterXSize), data_array.RasterXSize) # First, using the height/latitude/longitude arrays corresponding to the metadata layer, set-up spatial 2D interpolator. Using this, perform vertical 1D interpolation on cube, and then use result to set-up a regular-grid-interpolator. Using this, pass DEM and full-res lat/lon arrays in order to get intersection with DEM. # 2D interpolation interp_2d = InterpCube(data_array.ReadAsArray(), heightsMeta, np.flip(latitudeMeta, axis=0), longitudeMeta) out_interpolated = np.zeros( (heightsMeta.shape[0], latitudeMeta.shape[0], longitudeMeta.shape[0])) # 3D interpolation for iz, hgt in enumerate(heightsMeta): for iline, line in enumerate(latitudeMeta): for ipix, pixel in enumerate(longitudeMeta): out_interpolated[iz, iline, ipix] = interp_2d(line, pixel, hgt) out_interpolated = np.flip(out_interpolated, axis=0) # interpolate to interferometric grid interpolator = scipy.interpolate.RegularGridInterpolator( (heightsMeta, np.flip(latitudeMeta, axis=0), longitudeMeta), out_interpolated, method='linear', fill_value=data_array.GetRasterBand(1).GetNoDataValue()) out_interpolated = interpolator( np.stack((np.flip(dem.ReadAsArray(), axis=0), lat, lon), axis=-1)) # Save file renderVRT(outname, out_interpolated, geotrans=dem.GetGeoTransform(), drivername=outputFormat, gdal_fmt=data_array.ReadAsArray().dtype.name, proj=dem.GetProjection(), nodata=data_array.GetRasterBand(1).GetNoDataValue()) # Since metadata layer extends at least one grid node outside of the expected track bounds, it must be cut to conform with these bounds. # Crop to track extents out_interpolated = gdal.Warp('', outname, options=gdal.WarpOptions( format="MEM", cutlineDSName=prods_TOTbbox, outputBounds=bbox_bounds, dstNodata=data_array.GetRasterBand( 1).GetNoDataValue())).ReadAsArray() # Apply mask (if specified). if mask is not None: ##SS Just to double check if the no-data is being tracked here. The vrt is setting a no-data value, but here no-data is not used. out_interpolated = np.multiply(out_interpolated, mask, out=out_interpolated, where=mask == 0) update_file = gdal.Open(outname, gdal.GA_Update) update_file = update_file.GetRasterBand(1).WriteArray(out_interpolated) del out_interpolated, interpolator, interp_2d, data_array, update_file
def prep_mask(product_dict, maskfilename, bbox_file, prods_TOTbbox, proj, amp_thresh=None, arrshape=None, workdir='./', outputFormat='ENVI'): ''' Function to load and export mask file. If "Download" flag is specified, GSHHS water mask will be donwloaded on the fly. ''' # Import functions from ARIAtools.vrtmanager import renderVRT import glob _world_watermask = [ ' /vsizip/vsicurl/http://www.soest.hawaii.edu/pwessel/gshhg/gshhg-shp-2.3.7.zip/GSHHS_shp/f/GSHHS_f_L1.shp', ' /vsizip/vsicurl/http://www.soest.hawaii.edu/pwessel/gshhg/gshhg-shp-2.3.7.zip/GSHHS_shp/f/GSHHS_f_L2.shp', ' /vsizip/vsicurl/http://www.soest.hawaii.edu/pwessel/gshhg/gshhg-shp-2.3.7.zip/GSHHS_shp/f/GSHHS_f_L3.shp', ' /vsizip/vsicurl/http://www.soest.hawaii.edu/pwessel/gshhg/gshhg-shp-2.3.7.zip/GSHHS_shp/f/GSHHS_f_L4.shp', ' /vsizip/vsicurl/https://osmdata.openstreetmap.de/download/land-polygons-complete-4326.zip/land-polygons-complete-4326/land_polygons.shp' ] # Get bounds of user bbox_file bounds = open_shapefile(bbox_file, 0, 0).bounds # File must be physically extracted, cannot proceed with VRT format. Defaulting to ENVI format. if outputFormat == 'VRT': outputFormat = 'ENVI' # Download mask if maskfilename.lower() == 'download': maskfilename = os.path.join(workdir, 'watermask' + '.msk') ###Make coastlines/islands union VRT os.system('ogrmerge.py -o ' + os.path.join(workdir, 'watermsk_shorelines.vrt') + ''.join(_world_watermask[::2]) + ' -field_strategy Union -f VRT -single') ###Make lakes/ponds union VRT os.system('ogrmerge.py -o ' + os.path.join(workdir, 'watermsk_lakes.vrt') + ''.join(_world_watermask[1::2]) + ' -field_strategy Union -f VRT -single') ###Initiate water-mask with coastlines/islands union VRT gdal.Rasterize(maskfilename, os.path.join(workdir, 'watermsk_shorelines.vrt'), options=gdal.RasterizeOptions(format=outputFormat, outputBounds=bounds, outputType=gdal.GDT_Byte, width=arrshape[1], height=arrshape[0], burnValues=[1], layers='merged')) gdal.Open(maskfilename, gdal.GA_Update).SetProjection(proj) gdal.Translate(maskfilename + '.vrt', maskfilename, options=gdal.TranslateOptions(format="VRT")) ###Must take inverse of lakes/ponds union because of opposite designation (1 for water, 0 for land) as desired (0 for water, 1 for land) lake_masks = gdal.Rasterize( '', os.path.join(workdir, 'watermsk_lakes.vrt'), options=gdal.RasterizeOptions(format='MEM', outputBounds=bounds, outputType=gdal.GDT_Byte, width=arrshape[1], height=arrshape[0], burnValues=[1], layers='merged', inverse=True)) lake_masks.SetProjection(proj) lake_masks = lake_masks.ReadAsArray() if amp_thresh: ###Make average amplitude mask # Iterate through all IFGs for i, j in enumerate(product_dict[0]): amp_file = gdal.Warp('', j, options=gdal.WarpOptions( format="MEM", cutlineDSName=prods_TOTbbox, outputBounds=bounds)) amp_file_arr = np.ma.masked_where( amp_file.ReadAsArray() == amp_file.GetRasterBand( 1).GetNoDataValue(), amp_file.ReadAsArray()) # Iteratively update average amplitude file # If looping through first amplitude file, nothing to sum so just save to file if os.path.exists(os.path.join(workdir, 'avgamplitude.msk')): amp_file = gdal.Open( os.path.join(workdir, 'avgamplitude.msk'), gdal.GA_Update) amp_file = amp_file.GetRasterBand(1).WriteArray( amp_file_arr + amp_file.ReadAsArray()) else: renderVRT( os.path.join(workdir, 'avgamplitude.msk'), amp_file_arr, geotrans=amp_file.GetGeoTransform(), drivername=outputFormat, gdal_fmt=amp_file_arr.dtype.name, proj=amp_file.GetProjection(), nodata=amp_file.GetRasterBand(1).GetNoDataValue()) amp_file = coh_val = amp_file_arr = None # Take average of amplitude sum amp_file = gdal.Open(os.path.join(workdir, 'avgamplitude.msk'), gdal.GA_Update) arr_mean = amp_file.ReadAsArray() / len(product_dict[0]) arr_mean = np.where(arr_mean < float(amp_thresh), 0, 1) amp_file = amp_file.GetRasterBand(1).WriteArray(arr_mean) amp_file = None arr_mean = None amp_file = gdal.Open(os.path.join( workdir, 'avgamplitude.msk')).ReadAsArray() else: amp_file = np.ones((lake_masks.shape[0], lake_masks.shape[1])) ###Update water-mask with lakes/ponds union and average amplitude update_file = gdal.Open(maskfilename, gdal.GA_Update) update_file = update_file.GetRasterBand(1).WriteArray( update_file.ReadAsArray() * lake_masks * amp_file) print('Downloaded water-mask here: ' + maskfilename) update_file = None lake_masks = None amp_file = None #Delete temp files os.remove(os.path.join(workdir, 'watermsk_shorelines.vrt')) os.remove(os.path.join(workdir, 'watermsk_lakes.vrt')) for i in glob.glob(os.path.join(workdir, 'avgamplitude.msk*')): os.remove(i) # Load mask try: mask = gdal.Warp('', maskfilename, options=gdal.WarpOptions(format="MEM", cutlineDSName=prods_TOTbbox, outputBounds=bounds, dstNodata=0)) mask.SetProjection(proj) # If no data value if mask.GetRasterBand(1).GetNoDataValue(): mask = np.ma.masked_where( mask.ReadAsArray() == mask.GetRasterBand(1).GetNoDataValue(), mask.ReadAsArray()) else: mask = mask.ReadAsArray() except: raise Exception('Failed to open user mask') return mask
def resampleRaster(fname, multilooking, bounds, prods_TOTbbox, rankedResampling=False, outputFormat='ENVI', num_threads='2'): ''' Resample rasters and update corresponding VRTs. ''' # Import functions import os from scipy import stats import numpy as np from decimal import Decimal, ROUND_HALF_UP import glob # Check if physical raster exists and needs to be updated # Also get datasource name (inputname) if outputFormat == 'VRT' and os.path.exists(fname.split('.vrt')[0]): outputFormat = 'ENVI' if os.path.exists(fname.split('.vrt')[0]): inputname = fname else: fname += '.vrt' inputname = gdal.Open(fname).GetFileList()[-1] # Access original shape ds = gdal.Warp('', fname, options=gdal.WarpOptions( format="MEM", cutlineDSName=prods_TOTbbox, outputBounds=bounds, multithread=True, options=['NUM_THREADS=%s' % (num_threads)])) arrshape = [ abs(ds.GetGeoTransform()[1]) * multilooking, abs(ds.GetGeoTransform()[-1]) * multilooking ] # Get output res #get geotrans/proj ds = gdal.Warp('', fname, options=gdal.WarpOptions( format="MEM", cutlineDSName=prods_TOTbbox, outputBounds=bounds, xRes=arrshape[0], yRes=arrshape[1], resampleAlg='near', multithread=True, options=['NUM_THREADS=%s' % (num_threads)])) geotrans = ds.GetGeoTransform() proj = ds.GetProjection() # Must resample mask with nearest-neighbor if fname.split('/')[-2] == 'mask': # Resample raster gdal.Warp( fname, inputname, options=gdal.WarpOptions( format=outputFormat, cutlineDSName=prods_TOTbbox, outputBounds=bounds, xRes=arrshape[0], yRes=arrshape[1], resampleAlg='near', multithread=True, options=['NUM_THREADS=%s' % (num_threads) + ' -overwrite'])) # Use pixel function to downsample connected components/unw files based off of frequency of connected components in each window elif fname.split('/')[-2] == 'connectedComponents' or fname.split( '/')[-2] == 'unwrappedPhase': # Resample unw phase based off of mode of connected components fnameunw = os.path.join('/'.join(fname.split('/')[:-2]), 'unwrappedPhase', ''.join(fname.split('/')[-1]).split('.vrt')[0]) fnameconcomp = os.path.join( '/'.join(fname.split('/')[:-2]), 'connectedComponents', ''.join(fname.split('/')[-1]).split('.vrt')[0]) if rankedResampling: #open connected components/unw files ds_concomp = gdal.Open(fnameconcomp) ds_concomp_nodata = ds_concomp.GetRasterBand(1).GetNoDataValue() ds_concomp = ds_concomp.ReadAsArray() ds_concomp = np.ma.masked_where(ds_concomp == ds_concomp_nodata, ds_concomp) np.ma.set_fill_value(ds_concomp, ds_concomp_nodata) ds_unw = gdal.Open(fnameunw) ds_unw_nodata = ds_unw.GetRasterBand(1).GetNoDataValue() ds_unw = ds_unw.ReadAsArray() ds_unw = np.ma.masked_where(ds_unw == ds_unw_nodata, ds_unw) np.ma.set_fill_value(ds_unw, ds_unw_nodata) unwmap = [] for row in range(multilooking, (ds_unw.shape[0]) + multilooking, multilooking): unwmap_row = [] for column in range(multilooking, (ds_unw.shape[1]) + multilooking, multilooking): #get subset values subset_concomp = ds_concomp[row - multilooking:row, column - multilooking:column] subset_unw = ds_unw[row - multilooking:row, column - multilooking:column] concomp_mode = stats.mode(subset_concomp.flatten()).mode[0] #average only phase values coinciding with concomp mode subset_concomp = np.where(subset_concomp != concomp_mode, 0, 1) subset_unw = subset_unw * subset_concomp #assign downsampled pixel values unwmap_row.append(subset_unw.mean()) unwmap.append(unwmap_row) #finalize unw array unwmap = np.array(unwmap) #finalize unw array shape unwmap=unwmap[0:int(Decimal(ds_unw.shape[0]/multilooking).quantize(0, ROUND_HALF_UP)), \ 0:int(Decimal(ds_unw.shape[1]/multilooking).quantize(0, ROUND_HALF_UP))] unwmap = np.ma.masked_invalid(unwmap) np.ma.set_fill_value(unwmap, ds_unw_nodata) #unwphase renderVRT(fnameunw, unwmap.filled(), geotrans=geotrans, drivername=outputFormat, gdal_fmt='float32', proj=proj, nodata=ds_unw_nodata) #temp workaround for gdal bug try: gdal.Open(fnameunw) except RuntimeError: for f in glob.glob(fnameunw + "*"): os.remove(f) unwmap[0, 0] = unwmap[0, 0] - 1e-6 renderVRT(fnameunw, unwmap.filled(), geotrans=geotrans, drivername=outputFormat, gdal_fmt='float32', proj=proj, nodata=ds_unw_nodata) del unwmap # Resample connected components gdal.Warp(fnameconcomp, fnameconcomp, options=gdal.WarpOptions( format=outputFormat, cutlineDSName=prods_TOTbbox, outputBounds=bounds, xRes=arrshape[0], yRes=arrshape[1], resampleAlg='mode', multithread=True, options=[ 'NUM_THREADS=%s' % (num_threads) + ' -overwrite' ])) # update VRT gdal.BuildVRT(fnameconcomp + '.vrt', fnameconcomp, options=gdal.BuildVRTOptions(options=['-overwrite'])) # Default: resample unw phase with gdal average algorithm else: # Resample unwphase ds_unw_nodata = gdal.Open(fnameunw) ds_unw_nodata = ds_unw_nodata.GetRasterBand(1).GetNoDataValue() gdal.Warp(fnameunw, fnameunw, options=gdal.WarpOptions( format=outputFormat, cutlineDSName=prods_TOTbbox, outputBounds=bounds, xRes=arrshape[0], yRes=arrshape[1], resampleAlg='average', multithread=True, options=[ 'NUM_THREADS=%s' % (num_threads) + ' -overwrite' ])) # update VRT gdal.BuildVRT(fnameunw + '.vrt', fnameunw, options=gdal.BuildVRTOptions(options=['-overwrite'])) #temp workaround for gdal bug try: gdal.Open(fnameunw) except RuntimeError: unwmap = np.fromfile(fnameunw, dtype=np.float32).reshape( ds.GetRasterBand(1).ReadAsArray().shape) for f in glob.glob(fnameunw + "*"): os.remove(f) unwmap[0, 0] = unwmap[0, 0] - 1e-6 renderVRT(fnameunw, unwmap, geotrans=geotrans, drivername=outputFormat, gdal_fmt='float32', proj=proj, nodata=ds_unw_nodata) del unwmap # Resample connected components gdal.Warp(fnameconcomp, fnameconcomp, options=gdal.WarpOptions( format=outputFormat, cutlineDSName=prods_TOTbbox, outputBounds=bounds, xRes=arrshape[0], yRes=arrshape[1], resampleAlg='near', multithread=True, options=[ 'NUM_THREADS=%s' % (num_threads) + ' -overwrite' ])) # update VRT gdal.BuildVRT(fnameconcomp + '.vrt', fnameconcomp, options=gdal.BuildVRTOptions(options=['-overwrite'])) # Resample all other files with lanczos else: # Resample raster gdal.Warp( fname, inputname, options=gdal.WarpOptions( format=outputFormat, cutlineDSName=prods_TOTbbox, outputBounds=bounds, xRes=arrshape[0], yRes=arrshape[1], resampleAlg='lanczos', multithread=True, options=['NUM_THREADS=%s' % (num_threads) + ' -overwrite'])) if outputFormat != 'VRT': # update VRT gdal.BuildVRT(fname + '.vrt', fname, options=gdal.BuildVRTOptions(options=['-overwrite'])) return
def rasterAverage(outname, product_dict, bounds, prods_TOTbbox, outputFormat='ENVI', thresh=None): ''' Generate average of rasters. Currently implemented for: 1. amplitude under 'mask_util.py' 2. coherence under 'plot_avgcoherence' function of productPlot ''' # Import functions from ARIAtools.vrtmanager import renderVRT import glob import numpy as np import os ###Make average raster # Delete existing average raster file for i in glob.glob(outname + '*'): os.remove(i) # Iterate through all layers for i in enumerate(product_dict): arr_file = gdal.Warp('', i[1], options=gdal.WarpOptions( format="MEM", cutlineDSName=prods_TOTbbox, outputBounds=bounds)) arr_file_arr = np.ma.masked_where( arr_file.ReadAsArray() == arr_file.GetRasterBand( 1).GetNoDataValue(), arr_file.ReadAsArray()) ### Iteratively update average raster file if os.path.exists(outname): arr_file = gdal.Open(outname, gdal.GA_Update) arr_file = arr_file.GetRasterBand(1).WriteArray( arr_file_arr + arr_file.ReadAsArray()) else: # If looping through first raster file, nothing to sum so just save to file renderVRT(outname, arr_file_arr, geotrans=arr_file.GetGeoTransform(), drivername=outputFormat, \ gdal_fmt=arr_file_arr.dtype.name, proj=arr_file.GetProjection(), \ nodata=arr_file.GetRasterBand(1).GetNoDataValue()) arr_file = arr_file_arr = None # Take average of raster sum arr_file = gdal.Open(outname, gdal.GA_Update) arr_mean = arr_file.ReadAsArray() / len(product_dict) # Mask using specified raster threshold if thresh: arr_mean = np.where(arr_mean < float(thresh), 0, 1) # Save updated array to file arr_file = arr_file.GetRasterBand(1).WriteArray(arr_mean) arr_file = None arr_mean = None # Load raster to pass arr_file = gdal.Open(outname).ReadAsArray() return arr_file