def prepare4multi_subplots(inps, metadata): """Prepare for multiple subplots: 1) check multilook to save memory 2) read existed reference pixel info for unwrapPhase 3) read dropIfgram info 4) read and prepare DEM for background """ inps.dsetFamilyList = sorted(list(set(i.split('-')[0] for i in inps.dset))) # Update multilook parameters with new num and col number if inps.multilook and inps.multilook_num == 1: # Do not auto multilook mask and lookup table file auto_multilook = True for dsFamily in inps.dsetFamilyList: if any(i in dsFamily.lower() for i in ['mask', 'coord']): auto_multilook = False if auto_multilook: inps.multilook, inps.multilook_num = check_multilook_input( inps.pix_box, inps.fig_row_num, inps.fig_col_num) if inps.msk is not None: inps.msk = multilook_data(inps.msk, inps.multilook_num, inps.multilook_num) # Reference pixel for timeseries and ifgramStack # metadata = read_attribute(inps.file) inps.file_ref_yx = None if inps.key in ['ifgramStack'] and 'REF_Y' in metadata.keys(): ref_y, ref_x = int(metadata['REF_Y']), int(metadata['REF_X']) length, width = int(metadata['LENGTH']), int(metadata['WIDTH']) if 0 <= ref_y < length and 0 <= ref_x < width: inps.file_ref_yx = [ref_y, ref_x] vprint('consider reference pixel in y/x: {}'.format( inps.file_ref_yx)) if inps.dsetNum > 10: inps.ref_marker_size /= 10. elif inps.dsetNum > 100: inps.ref_marker_size /= 20. # inps.disp_ref_pixel = False # vprint('turn off reference pixel plot for more than 10 datasets to display') # Check dropped interferograms inps.dropDatasetList = [] if inps.key == 'ifgramStack' and inps.disp_title: obj = ifgramStack(inps.file) obj.open(print_msg=False) dropDate12List = obj.get_drop_date12_list() for i in inps.dsetFamilyList: inps.dropDatasetList += [ '{}-{}'.format(i, j) for j in dropDate12List ] vprint( "mark interferograms with 'dropIfgram=False' in red colored title") # Read DEM if inps.dem_file: dem_metadata = read_attribute(inps.dem_file) if all(dem_metadata[i] == metadata[i] for i in ['LENGTH', 'WIDTH']): vprint('reading DEM: {} ... '.format( os.path.basename(inps.dem_file))) dem = read(inps.dem_file, datasetName='height', box=inps.pix_box, print_msg=False)[0] if inps.multilook: dem = multilook_data(dem, inps.multilook_num, inps.multilook_num) (inps.dem_shade, inps.dem_contour, inps.dem_contour_seq) = pp.prepare_dem_background( dem=dem, inps=inps, print_msg=inps.print_msg) else: inps.dem_file = None inps.transparency = 1.0 vprint( 'Input DEM file has different size than data file, ignore it.') return inps
def write_ifgram_stack(outfile, unwStack, cohStack, connCompStack, ampStack=None, box=None, xstep=1, ystep=1): """Write ifgramStack HDF5 file from stack VRT files """ print('-' * 50) stackFiles = [unwStack, cohStack, connCompStack, ampStack] max_digit = max([len(os.path.basename(str(i))) for i in stackFiles]) for stackFile in stackFiles: if stackFile is not None: print('open {f:<{w}} with gdal ...'.format( f=os.path.basename(stackFile), w=max_digit)) dsUnw = gdal.Open(unwStack, gdal.GA_ReadOnly) dsCoh = gdal.Open(cohStack, gdal.GA_ReadOnly) dsComp = gdal.Open(connCompStack, gdal.GA_ReadOnly) if ampStack is not None: dsAmp = gdal.Open(ampStack, gdal.GA_ReadOnly) else: dsAmp = None # extract NoDataValue (from the last */date2_date1.vrt file for example) ds = gdal.Open(dsUnw.GetFileList()[-1], gdal.GA_ReadOnly) noDataValueUnw = ds.GetRasterBand(1).GetNoDataValue() print('grab NoDataValue for unwrapPhase : {:<5} and convert to 0.'. format(noDataValueUnw)) ds = gdal.Open(dsCoh.GetFileList()[-1], gdal.GA_ReadOnly) noDataValueCoh = ds.GetRasterBand(1).GetNoDataValue() print('grab NoDataValue for coherence : {:<5} and convert to 0.'. format(noDataValueCoh)) ds = gdal.Open(dsComp.GetFileList()[-1], gdal.GA_ReadOnly) noDataValueComp = ds.GetRasterBand(1).GetNoDataValue() print('grab NoDataValue for connectComponent: {:<5} and convert to 0.'. format(noDataValueComp)) ds = None if dsAmp is not None: ds = gdal.Open(dsAmp.GetFileList()[-1], gdal.GA_ReadOnly) noDataValueAmp = ds.GetRasterBand(1).GetNoDataValue() print('grab NoDataValue for magnitude : {:<5} and convert to 0.'. format(noDataValueAmp)) ds = None # sort the order of interferograms based on date1_date2 with date1 < date2 nPairs = dsUnw.RasterCount d12BandDict = {} for ii in range(nPairs): bnd = dsUnw.GetRasterBand(ii + 1) d12 = bnd.GetMetadata("unwrappedPhase")["Dates"] d12 = sorted(d12.split("_")) d12 = '{}_{}'.format(d12[0], d12[1]) d12BandDict[d12] = ii + 1 d12List = sorted(d12BandDict.keys()) print('number of interferograms: {}'.format(len(d12List))) # box to gdal arguments # link: https://gdal.org/python/osgeo.gdal.Band-class.html#ReadAsArray if box is not None: kwargs = dict(xoff=box[0], yoff=box[1], win_xsize=box[2] - box[0], win_ysize=box[3] - box[1]) else: kwargs = dict() print('writing data to HDF5 file {} with a mode ...'.format(outfile)) with h5py.File(outfile, "a") as f: prog_bar = ptime.progressBar(maxValue=nPairs) for ii in range(nPairs): d12 = d12List[ii] bndIdx = d12BandDict[d12] prog_bar.update(ii + 1, suffix='{}'.format(d12)) f["date"][ii, 0] = d12.split("_")[0].encode("utf-8") f["date"][ii, 1] = d12.split("_")[1].encode("utf-8") f["dropIfgram"][ii] = True bnd = dsUnw.GetRasterBand(bndIdx) data = bnd.ReadAsArray(**kwargs) data = multilook_data(data, ystep, xstep, method='nearest') data[data == noDataValueUnw] = 0 #assign pixel with no-data to 0 data *= -1.0 #date2_date1 -> date1_date2 f["unwrapPhase"][ii, :, :] = data bperp = float( bnd.GetMetadata("unwrappedPhase")["perpendicularBaseline"]) bperp *= -1.0 #date2_date1 -> date1_date2 f["bperp"][ii] = bperp bnd = dsCoh.GetRasterBand(bndIdx) data = bnd.ReadAsArray(**kwargs) data = multilook_data(data, ystep, xstep, method='nearest') data[data == noDataValueCoh] = 0 #assign pixel with no-data to 0 f["coherence"][ii, :, :] = data bnd = dsComp.GetRasterBand(bndIdx) data = bnd.ReadAsArray(**kwargs) data = multilook_data(data, ystep, xstep, method='nearest') data[data == noDataValueComp] = 0 #assign pixel with no-data to 0 f["connectComponent"][ii, :, :] = data if dsAmp is not None: bnd = dsAmp.GetRasterBand(bndIdx) data = bnd.ReadAsArray(**kwargs) data = multilook_data(data, ystep, xstep, method='nearest') data[data == noDataValueAmp] = 0 #assign pixel with no-data to 0 f["magnitude"][ii, :, :] = data prog_bar.close() # add MODIFICATION_TIME metadata to each 3D dataset for dsName in ['unwrapPhase', 'coherence', 'connectComponent']: f[dsName].attrs['MODIFICATION_TIME'] = str(time.time()) print('finished writing to HD5 file: {}'.format(outfile)) dsUnw = None dsCoh = None dsComp = None dsAmp = None return outfile
def read_data4figure(i_start, i_end, inps, metadata): """Read multiple datasets for one figure into 3D matrix based on i_start/end""" data = np.zeros((i_end - i_start, inps.pix_box[3] - inps.pix_box[1], inps.pix_box[2] - inps.pix_box[0])) # fast reading for single dataset type if (len(inps.dsetFamilyList) == 1 and inps.key in [ 'timeseries', 'giantTimeseries', 'ifgramStack', 'HDFEOS', 'geometry', 'slc' ]): dset_list = [inps.dset[i] for i in range(i_start, i_end)] data = read(inps.file, datasetName=dset_list, box=inps.pix_box)[0] if inps.key == 'slc': data = np.abs(data) if inps.key == 'ifgramStack': # reference pixel info in unwrapPhase if inps.dsetFamilyList[0] == 'unwrapPhase' and inps.file_ref_yx: ref_y, ref_x = inps.file_ref_yx ref_box = (ref_x, ref_y, ref_x + 1, ref_y + 1) ref_data = read(inps.file, datasetName=dset_list, box=ref_box, print_msg=False)[0] for i in range(data.shape[0]): mask = data[i, :, :] != 0. data[i, mask] -= ref_data[i] # slow reading with one 2D matrix at a time else: vprint('reading data ...') prog_bar = ptime.progressBar(maxValue=i_end - i_start, print_msg=inps.print_msg) for i in range(i_start, i_end): d = read(inps.file, datasetName=inps.dset[i], box=inps.pix_box, print_msg=False)[0] data[i - i_start, :, :] = d prog_bar.update(i - i_start + 1, suffix=inps.dset[i].split('/')[-1]) prog_bar.close() # ref_date for timeseries if inps.ref_date: vprint('consider input reference date: ' + inps.ref_date) ref_data = read(inps.file, datasetName=inps.ref_date, box=inps.pix_box, print_msg=False)[0] data -= ref_data # v/dlim, adjust data if all subplots share the same unit # This could be: # 1) the same type OR # 2) velocity or timeseries OR # 3) data/model output from load_gbis.py OR # 4) horizontal/vertical output from asc_desc2horz_vert.py if (len(inps.dsetFamilyList) == 1 or all(d in inps.dsetFamilyList for d in ['horizontal', 'vertical']) or inps.dsetFamilyList == ['data', 'model', 'residual'] or inps.key in ['velocity', 'timeseries', 'inversion']): data, inps = update_data_with_plot_inps(data, metadata, inps) if (not inps.vlim and not (inps.dsetFamilyList[0].startswith('unwrap') and not inps.file_ref_yx) and inps.dsetFamilyList[0] not in ['bperp']): data_mli = multilook_data(data, 10, 10) inps.vlim = [np.nanmin(data_mli), np.nanmax(data_mli)] del data_mli inps.dlim = [np.nanmin(data), np.nanmax(data)] # multilook if inps.multilook: data = multilook_data(data, inps.multilook_num, inps.multilook_num) # mask if inps.msk is not None: vprint('masking data') msk = np.tile(inps.msk, (data.shape[0], 1, 1)) data = np.ma.masked_where(msk == 0., data) if inps.zero_mask: vprint('masking pixels with zero value') data = np.ma.masked_where(data == 0., data) return data
def write_geometry(outfile, demFile, incAngleFile, azAngleFile=None, waterMaskFile=None, box=None, xstep=1, ystep=1): """Write geometry HDF5 file from list of VRT files.""" print('-' * 50) # box to gdal arguments # link: https://gdal.org/python/osgeo.gdal.Band-class.html#ReadAsArray if box is not None: kwargs = dict(xoff=box[0], yoff=box[1], win_xsize=box[2] - box[0], win_ysize=box[3] - box[1]) else: kwargs = dict() print('writing data to HDF5 file {} with a mode ...'.format(outfile)) with h5py.File(outfile, 'a') as f: # height ds = gdal.Open(demFile, gdal.GA_ReadOnly) bnd = ds.GetRasterBand(1) data = np.array(bnd.ReadAsArray(**kwargs), dtype=np.float32) data = multilook_data(data, ystep, xstep, method='nearest') data[data == bnd.GetNoDataValue()] = np.nan f['height'][:, :] = data # slantRangeDistance f['slantRangeDistance'][:, :] = float(f.attrs['STARTING_RANGE']) # incidenceAngle ds = gdal.Open(incAngleFile, gdal.GA_ReadOnly) bnd = ds.GetRasterBand(1) data = bnd.ReadAsArray(**kwargs) data = multilook_data(data, ystep, xstep, method='nearest') data[data == bnd.GetNoDataValue()] = np.nan f['incidenceAngle'][:, :] = data # azimuthAngle if azAngleFile is not None: ds = gdal.Open(azAngleFile, gdal.GA_ReadOnly) bnd = ds.GetRasterBand(1) data = bnd.ReadAsArray(**kwargs) data = multilook_data(data, ystep, xstep, method='nearest') data[data == bnd.GetNoDataValue()] = np.nan # azimuth angle of the line-of-sight vector: # ARIA: vector from target to sensor measured from the east in counterclockwise direction # ISCE: vector from sensor to target measured from the north in counterclockwise direction # convert ARIA format to ISCE format, which is used in mintpy data -= 90 f['azimuthAngle'][:, :] = data # waterMask if waterMaskFile is not None: # read ds = gdal.Open(waterMaskFile, gdal.GA_ReadOnly) bnd = ds.GetRasterBand(1) water_mask = bnd.ReadAsArray(**kwargs) water_mask = multilook_data(water_mask, ystep, xstep, method='nearest') water_mask[water_mask == bnd.GetNoDataValue()] = False # assign False to invalid pixels based on incAngle data ds = gdal.Open(incAngleFile, gdal.GA_ReadOnly) bnd = ds.GetRasterBand(1) data = bnd.ReadAsArray(**kwargs) data = multilook_data(data, ystep, xstep, method='nearest') water_mask[data == bnd.GetNoDataValue()] = False # write f['waterMask'][:, :] = water_mask print('finished writing to HD5 file: {}'.format(outfile)) return outfile
def read_timeseries_data(inps): """Read data of time-series files Parameters: inps : Namespace of input arguments Returns: ts_data : list of 3D np.array in size of (num_date, length, width) mask : 2D np.array in size of (length, width) inps : Namespace of input arguments """ ## read list of 3D time-series ts_data = [] for fname in inps.file: msg = f'reading timeseries from file {fname}' msg += f' with step of {inps.multilook_num} by {inps.multilook_num}' if inps.multilook_num > 1 else '' vprint(msg) data, atr = readfile.read(fname, datasetName=inps.date_list, box=inps.pix_box, xstep=inps.multilook_num, ystep=inps.multilook_num) if inps.ref_yx and inps.ref_yx != (int(atr.get('REF_Y', -1)), int(atr.get('REF_X', -1))): (ry, rx) = subset_and_multilook_yx(inps.ref_yx, inps.pix_box, inps.multilook_num) ref_phase = data[:, ry, rx] data -= np.tile(ref_phase.reshape(-1, 1, 1), (1, data.shape[-2], data.shape[-1])) vprint('reference to pixel: {}'.format(inps.ref_yx)) if inps.ref_idx is not None: vprint('reference to date: {}'.format(inps.date_list[inps.ref_idx])) data -= np.tile(data[inps.ref_idx, :, :], (inps.num_date, 1, 1)) # Display Unit (data, inps.disp_unit, inps.unit_fac) = pp.scale_data2disp_unit(data, metadata=atr, disp_unit=inps.disp_unit) ts_data.append(data) ## mask file: input mask file + non-zero ts pixels - ref_point mask = pp.read_mask(inps.file[0], mask_file=inps.mask_file, datasetName='displacement', box=inps.pix_box, xstep=inps.multilook_num, ystep=inps.multilook_num, print_msg=inps.print_msg)[0] if mask is None: mask = np.ones(ts_data[0].shape[-2:], np.bool_) ts_stack = np.nansum(ts_data[0], axis=0) mask[np.isnan(ts_stack)] = False # keep all-zero value for unwrapError time-series if atr['UNIT'] not in ['cycle']: mask[ts_stack == 0.] = False del ts_stack # do not mask the reference point if inps.ref_yx and inps.ref_yx != (int(atr.get('REF_Y', -1)), int(atr.get('REF_X', -1))): (ry, rx) = subset_and_multilook_yx(inps.ref_yx, inps.pix_box, inps.multilook_num) mask[ry, rx] = True ## default vlim inps.dlim = [np.nanmin(ts_data[0]), np.nanmax(ts_data[0])] if not inps.vlim: inps.cmap_lut, inps.vlim = pp.auto_adjust_colormap_lut_and_disp_limit(ts_data[0], num_multilook=10, print_msg=inps.print_msg) vprint('data range: {} {}'.format(inps.dlim, inps.disp_unit)) vprint('display range: {} {}'.format(inps.vlim, inps.disp_unit)) ## default ylim num_file = len(inps.file) if not inps.ylim: ts_data_mli = multilook_data(np.squeeze(ts_data[-1]), 4, 4) if inps.zero_first: ts_data_mli -= np.tile(ts_data_mli[inps.zero_idx, :, :], (inps.num_date, 1, 1)) ymin, ymax = (np.nanmin(ts_data_mli[inps.ex_flag != 0]), np.nanmax(ts_data_mli[inps.ex_flag != 0])) ybuffer = (ymax - ymin) * 0.05 inps.ylim = [ymin - ybuffer, ymax + ybuffer] if inps.offset: inps.ylim[1] += inps.offset * (num_file - 1) del ts_data_mli return ts_data, mask, inps
def read_timeseries_data(inps): """Read data of time-series files Parameters: inps : Namespace of input arguments Returns: ts_data : list of 3D np.array in size of (num_date, length, width) mask : 2D np.array in size of (length, width) inps : Namespace of input arguments """ # read list of 3D time-series ts_data = [] for fname in inps.file: vprint('reading timeseries from file {} ...'.format(fname)) data, atr = readfile.read(fname, datasetName=inps.date_list, box=inps.pix_box) try: ref_phase = data[:, inps.ref_yx[0] - inps.pix_box[1], inps.ref_yx[1] - inps.pix_box[0]] data -= np.tile(ref_phase.reshape(-1, 1, 1), (1, data.shape[-2], data.shape[-1])) vprint('reference to pixel: {}'.format(inps.ref_yx)) except: pass vprint('reference to date: {}'.format(inps.date_list[inps.ref_idx])) data -= np.tile(data[inps.ref_idx, :, :], (inps.num_date, 1, 1)) # Display Unit (data, inps.disp_unit, inps.unit_fac) = pp.scale_data2disp_unit(data, metadata=atr, disp_unit=inps.disp_unit) ts_data.append(data) # Mask file: input mask file + non-zero ts pixels - ref_point mask = np.ones(ts_data[0].shape[-2:], np.bool_) msk = pp.read_mask(inps.file[0], mask_file=inps.mask_file, datasetName='displacement', box=inps.pix_box, print_msg=inps.print_msg)[0] mask[msk == 0.] = False del msk ts_stack = np.sum(ts_data[0], axis=0) mask[ts_stack == 0.] = False mask[np.isnan(ts_stack)] = False del ts_stack #do not mask the reference point try: mask[inps.ref_yx[0] - inps.pix_box[1], inps.ref_yx[1] - inps.pix_box[0]] = True except: pass #vprint('masking data') #ts_mask = np.tile(mask, (inps.num_date, 1, 1)) #for i in range(len(ts_data)): # ts_data[i][ts_mask == 0] = np.nan # try: # ts_data[i][:, inps.ref_yx[0], inps.ref_yx[1]] = 0. # keep value on reference pixel # except: # pass #del ts_mask # default vlim inps.dlim = [np.nanmin(ts_data[0]), np.nanmax(ts_data[0])] ts_data_mli = multilook_data(np.squeeze(ts_data[0]), 10, 10) if not inps.vlim: inps.vlim = [ np.nanmin(ts_data_mli[inps.ex_flag != 0]), np.nanmax(ts_data_mli[inps.ex_flag != 0]) ] vprint('data range: {} {}'.format(inps.dlim, inps.disp_unit)) vprint('display range: {} {}'.format(inps.vlim, inps.disp_unit)) # default ylim num_file = len(inps.file) if not inps.ylim: ts_data_mli = multilook_data(np.squeeze(ts_data[-1]), 4, 4) if inps.zero_first: ts_data_mli -= np.tile(ts_data_mli[inps.zero_idx, :, :], (inps.num_date, 1, 1)) ymin, ymax = (np.nanmin(ts_data_mli[inps.ex_flag != 0]), np.nanmax(ts_data_mli[inps.ex_flag != 0])) ybuffer = (ymax - ymin) * 0.05 inps.ylim = [ymin - ybuffer, ymax + ybuffer] if inps.offset: inps.ylim[1] += inps.offset * (num_file - 1) del ts_data_mli return ts_data, mask, inps
def estimate_phase_elevation_ratio(dem, ts_data, inps): """Estimate phase/elevation ratio for each acquisition of timeseries Parameters: dem : 2D array in size of ( length, width) ts_data : 3D array in size of (num_date, length, width) inps : Namespace Returns: X : 2D array in size of (poly_num+1, num_date) """ num_date = ts_data.shape[0] # prepare phase and elevation data print('reading mask from file: ' + inps.mask_file) mask = readfile.read(inps.mask_file, datasetName='mask')[0] dem = mask_matrix(np.array(dem), mask) ts_data = mask_matrix(np.array(ts_data), mask) # display # 1. effect of multilooking --> narrow phase range --> better ratio estimation debug_mode = False if debug_mode: import matplotlib.pyplot as plt #d_index = np.argmax(topo_trop_corr) d_index = 47 data = ts_data[d_index, :, :] title = inps.date_list[d_index] fig = plt.figure() plt.plot(dem[~np.isnan(dem)], data[~np.isnan(dem)], '.', label='Number of Looks = 1') mli_dem = multilook_data(dem, 8, 8) mli_data = multilook_data(data, 8, 8) plt.plot(mli_dem[~np.isnan(mli_dem)], mli_data[~np.isnan(mli_dem)], '.', label='Number of Looks = 8') plt.legend() plt.xlabel('Elevation (m)') plt.ylabel('Range Change (m)') plt.title(title) out_file = 'phase_elevation_ratio_{}.png'.format(title) plt.savefig(out_file, bbox_inches='tight', transparent=True, dpi=300) print('save to {}'.format(out_file)) #plt.show() print('----------------------------------------------------------') print( 'Empirical tropospheric delay correction based on phase/elevation ratio (Doin et al., 2009)' ) print('polynomial order: {}'.format(inps.poly_order)) if inps.num_multilook > 1: print('number of multilook: {} (multilook data for estimation only)'. format(inps.num_multilook)) mask = multilook_data(mask, inps.num_multilook, inps.num_multilook) dem = multilook_data(dem, inps.num_multilook, inps.num_multilook) ts_data = multilook_data(ts_data, inps.num_multilook, inps.num_multilook) if inps.threshold > 0.: print('correlation threshold: {}'.format(inps.threshold)) mask_nan = ~np.isnan(dem) dem = dem[mask_nan] ts_data = ts_data[:, mask_nan] # calculate correlation coefficient print('----------------------------------------------------------') print('calculate correlation of DEM with each acquisition') topo_trop_corr = np.zeros(num_date, np.float32) for i in range(num_date): phase = ts_data[i, :] cc = 0. if np.count_nonzero(phase) > 0: comp_data = np.vstack((dem, phase)) cc = np.corrcoef(comp_data)[0, 1] topo_trop_corr[i] = cc print('{}: {:>5.2f}'.format(inps.date_list[i], cc)) topo_trop_corr = np.abs(topo_trop_corr) print('average correlation magnitude: {:>5.2f}'.format( np.nanmean(topo_trop_corr))) # estimate ratio parameter print('----------------------------------------------------------') print('estimate phase/elevation ratio') A = design_matrix(dem=dem, poly_order=inps.poly_order) X = np.dot(np.linalg.pinv(A), ts_data.T) X = np.array(X, dtype=np.float32) X[:, topo_trop_corr < inps.threshold] = 0. return X
def estimate_phase_elevation_ratio(dem, ts_data, inps): """Estimate phase/elevation ratio for each acquisition of timeseries Parameters: dem : 2D array in size of ( length, width) ts_data : 3D array in size of (num_date, length, width) inps : Namespace Returns: X : 2D array in size of (poly_num+1, num_date) """ num_date = ts_data.shape[0] # prepare phase and elevation data print('reading mask from file: '+inps.mask_file) mask = readfile.read(inps.mask_file, datasetName='mask')[0] dem = mask_matrix(np.array(dem), mask) ts_data = mask_matrix(np.array(ts_data), mask) # display # 1. effect of multilooking --> narrow phase range --> better ratio estimation debug_mode = False if debug_mode: import matplotlib.pyplot as plt #d_index = np.argmax(topo_trop_corr) d_index = 47 data = ts_data[d_index, :, :] title = inps.date_list[d_index] fig = plt.figure() plt.plot(dem[~np.isnan(dem)], data[~np.isnan(dem)], '.', label='Number of Looks = 1') mli_dem = multilook_data(dem, 8, 8) mli_data = multilook_data(data, 8, 8) plt.plot(mli_dem[~np.isnan(mli_dem)], mli_data[~np.isnan(mli_dem)], '.', label='Number of Looks = 8') plt.legend() plt.xlabel('Elevation (m)') plt.ylabel('Range Change (m)') plt.title(title) out_file = 'phase_elevation_ratio_{}.png'.format(title) plt.savefig(out_file, bbox_inches='tight', transparent=True, dpi=300) print('save to {}'.format(out_file)) #plt.show() print('----------------------------------------------------------') print('Empirical tropospheric delay correction based on phase/elevation ratio (Doin et al., 2009)') print('polynomial order: {}'.format(inps.poly_order)) if inps.num_multilook > 1: print('number of multilook: {} (multilook data for estimation only)'.format(inps.num_multilook)) mask = multilook_data(mask, inps.num_multilook, inps.num_multilook) dem = multilook_data(dem, inps.num_multilook, inps.num_multilook) ts_data = multilook_data(ts_data, inps.num_multilook, inps.num_multilook) if inps.threshold > 0.: print('correlation threshold: {}'.format(inps.threshold)) mask_nan = ~np.isnan(dem) dem = dem[mask_nan] ts_data = ts_data[:, mask_nan] # calculate correlation coefficient print('----------------------------------------------------------') print('calculate correlation of DEM with each acquisition') topo_trop_corr = np.zeros(num_date, np.float32) for i in range(num_date): phase = ts_data[i, :] cc = 0. if np.count_nonzero(phase) > 0: comp_data = np.vstack((dem, phase)) cc = np.corrcoef(comp_data)[0, 1] topo_trop_corr[i] = cc print('{}: {:>5.2f}'.format(inps.date_list[i], cc)) topo_trop_corr = np.abs(topo_trop_corr) print('average correlation magnitude: {:>5.2f}'.format(np.nanmean(topo_trop_corr))) # estimate ratio parameter print('----------------------------------------------------------') print('estimate phase/elevation ratio') A = design_matrix(dem=dem, poly_order=inps.poly_order) X = np.dot(np.linalg.pinv(A), ts_data.T) X = np.array(X, dtype=np.float32) X[:, topo_trop_corr < inps.threshold] = 0. return X