def plot_bridge(ax, mask_cc_file, bridges): """Plot mask of connected components with bridges info Parameters: mask_cc_file : string, path of mask cc file bridges : list of dict """ mask_cc, metadata = readfile.read(mask_cc_file) num_bridge = len(bridges) # plot 1. mask_cc data im = ax.imshow(mask_cc) # plot 2. bridge data for i in range(num_bridge): bridge = bridges[i] ax.imshow(np.ma.masked_where(~bridge['mask0'], np.zeros(mask_cc.shape)), cmap='gray', alpha=0.3, vmin=0, vmax=1) ax.imshow(np.ma.masked_where(~bridge['mask1'], np.zeros(mask_cc.shape)), cmap='gray', alpha=0.3, vmin=0, vmax=1) ax.plot([bridge['x0'], bridge['x1']], [bridge['y0'], bridge['y1']], '-', ms=5, mfc='none') ax = pp.auto_flip_direction(metadata, ax=ax, print_msg=False) return ax, im
def flip_map(): global inps, atr if inps.auto_flip: inps.flip_lr, inps.flip_ud = pp.auto_flip_direction(atr) else: inps.flip_ud = False inps.flip_lr = False
def water_mask2conn_comp_mask(water_mask_file, ref_yx, out_file='maskConnComp.h5', min_num_pixel=5e4, display=False): """Generate connected component mask file from water mask file Parameters: water_mask_file : str, path of water mask file ref_yx : tuple of 2 int, row/col number of reference point out_file : str, filename of output connected components mask min_num_pixel : float, min number of pixels to be identified as conn comp display : bool, display generated conn comp mask Returns: out_file : str, filename of output connected components mask """ print('-' * 50) print('generate connected component mask from water mask file: ', water_mask_file) water_mask, atr = readfile.read(water_mask_file) mask_cc = np.zeros(water_mask.shape, dtype=np.int16) # first conn comp - reference conn comp num_cc = 1 label_mask = ndimage.label(water_mask)[0] mask_cc += label_mask == label_mask[ref_yx[0], ref_yx[1]] # all the other conn comps water_mask ^= mask_cc == mask_cc[ref_yx[0], ref_yx[1]] mask_ccs = ut.get_all_conn_components(water_mask, min_num_pixel=min_num_pixel) if mask_ccs: for mask_cci in mask_ccs: num_cc += 1 mask_cc += mask_cci * num_cc # write file atr['FILE_TYPE'] = 'mask' atr['REF_Y'] = str(ref_yx[0]) atr['REF_X'] = str(ref_yx[1]) writefile.write(mask_cc, out_file=out_file, metadata=atr) # plot out_img = '{}.png'.format(os.path.splitext(out_file)[0]) fig, ax = plt.subplots(figsize=[6, 8]) im = ax.imshow(mask_cc) # colorbar divider = make_axes_locatable(ax) cax = divider.append_axes("right", "3%", pad="3%") cbar = plt.colorbar(im, cax=cax, ticks=np.arange(num_cc + 1)) ax = pp.auto_flip_direction(atr, ax=ax, print_msg=False) fig.savefig(out_img, bbox_inches='tight', transparent=True, dpi=300) print('save figure to {}'.format(out_img)) if display: plt.show() return out_file
def read_init_info(inps): # Time Series Info ts_file0 = inps.timeseries_file[0] atr = readfile.read_attribute(ts_file0) inps.key = atr['FILE_TYPE'] if inps.key == 'timeseries': obj = timeseries(ts_file0) elif inps.key == 'giantTimeseries': obj = giantTimeseries(ts_file0) elif inps.key == 'HDFEOS': obj = HDFEOS(ts_file0) else: raise ValueError('input file is {}, not timeseries.'.format(inps.key)) obj.open() if not inps.file_label: inps.file_label = [ str(i) for i in list(range(len(inps.timeseries_file))) ] # default mask file if not inps.mask_file and 'masked' not in ts_file0: dir_name = os.path.dirname(ts_file0) if 'Y_FIRST' in atr.keys(): inps.mask_file = os.path.join(dir_name, 'geo_maskTempCoh.h5') else: inps.mask_file = os.path.join(dir_name, 'maskTempCoh.h5') if not os.path.isfile(inps.mask_file): inps.mask_file = None # date info inps.date_list = obj.dateList if inps.start_date: inps.date_list = [ i for i in inps.date_list if int(i) >= int(inps.start_date) ] if inps.end_date: inps.date_list = [ i for i in inps.date_list if int(i) <= int(inps.end_date) ] inps.num_date = len(inps.date_list) inps.dates, inps.yearList = ptime.date_list2vector(inps.date_list) (inps.ex_date_list, inps.ex_dates, inps.ex_flag) = read_exclude_date(inps.ex_date_list, inps.date_list) # initial display index if obj.metadata['REF_DATE'] in inps.date_list: inps.ref_idx = inps.date_list.index(obj.metadata['REF_DATE']) else: inps.ref_idx = 0 if inps.ref_date: inps.ref_idx = inps.date_list.index(inps.ref_date) if not inps.init_idx: if inps.ref_idx < inps.num_date / 2.: inps.init_idx = -3 else: inps.init_idx = 3 # Display Unit (inps.disp_unit, inps.unit_fac) = pp.scale_data2disp_unit(metadata=atr, disp_unit=inps.disp_unit)[1:3] # Read Error List inps.error_ts = None inps.ex_error_ts = None if inps.error_file: error_fileContent = np.loadtxt(inps.error_file, dtype=bytes).astype(str) inps.error_ts = error_fileContent[:, 1].astype( np.float) * inps.unit_fac if inps.ex_date_list: e_ts = inps.error_ts[:] inps.ex_error_ts = e_ts[inps.ex_flag == 0] inps.error_ts = e_ts[inps.ex_flag == 1] # Zero displacement for 1st acquisition if inps.zero_first: inps.zero_idx = min(0, np.min(np.where(inps.ex_flag)[0])) # default lookup table file if not inps.lookup_file: inps.lookup_file = ut.get_lookup_file('./INPUTS/geometryRadar.h5') inps.coord = ut.coordinate(atr, inps.lookup_file) # size and lalo info inps.pix_box, inps.geo_box = subset.subset_input_dict2box(vars(inps), atr) inps.pix_box = inps.coord.check_box_within_data_coverage(inps.pix_box) inps.geo_box = inps.coord.box_pixel2geo(inps.pix_box) # Out message data_box = (0, 0, obj.width, obj.length) print('data coverage in y/x: ' + str(data_box)) print('subset coverage in y/x: ' + str(inps.pix_box)) print('data coverage in lat/lon: ' + str(inps.coord.box_pixel2geo(data_box))) print('subset coverage in lat/lon: ' + str(inps.geo_box)) print( '------------------------------------------------------------------------' ) # reference pixel if not inps.ref_lalo and 'REF_LAT' in atr.keys(): inps.ref_lalo = (float(atr['REF_LAT']), float(atr['REF_LON'])) if inps.ref_lalo: if inps.ref_lalo[1] > 180.: inps.ref_lalo[1] -= 360. inps.ref_yx = inps.coord.geo2radar(inps.ref_lalo[0], inps.ref_lalo[1], print_msg=False)[0:2] if not inps.ref_yx: inps.ref_yx = [int(atr['REF_Y']), int(atr['REF_X'])] # Initial Pixel Coord if inps.lalo: inps.yx = inps.coord.geo2radar(inps.lalo[0], inps.lalo[1], print_msg=False)[0:2] try: inps.lalo = inps.coord.radar2geo(inps.yx[0], inps.yx[1], print_msg=False)[0:2] except: inps.lalo = None # Flip up-down / left-right if inps.auto_flip: inps.flip_lr, inps.flip_ud = pp.auto_flip_direction(atr) # display unit ans wrap # if wrap_step == 2*np.pi (default value), set disp_unit_v = radian; # otherwise set disp_unit_v = disp_unit inps.disp_unit_v = inps.disp_unit if inps.wrap: inps.range2phase = -4. * np.pi / float(atr['WAVELENGTH']) if 'cm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 100. elif 'mm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 1000. elif 'm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 1. else: raise ValueError('un-recognized display unit: {}'.format( inps.disp_unit)) if (inps.wrap_range[1] - inps.wrap_range[0]) == 2 * np.pi: inps.disp_unit_v = 'radian' inps.vlim = inps.wrap_range inps.cbar_label = 'Displacement [{}]'.format(inps.disp_unit_v) return inps, atr
def main(iargs=None): # Actual code. inps = cmd_line_parse(iargs) # Time Series Info atr = readfile.read_attribute(inps.timeseries_file) k = atr['FILE_TYPE'] print('input file is ' + k + ': ' + inps.timeseries_file) if not k in ['timeseries', 'GIANT_TS']: raise ValueError('Only timeseries file is supported!') obj = timeseries(inps.timeseries_file) obj.open() h5 = h5py.File(inps.timeseries_file, 'r') if k in ['GIANT_TS']: dateList = [ dt.fromordinal(int(i)).strftime('%Y%m%d') for i in h5['dates'][:].tolist() ] else: dateList = obj.dateList date_num = len(dateList) inps.dates, inps.yearList = ptime.date_list2vector(dateList) # Read exclude dates if inps.ex_date_list: input_ex_date = list(inps.ex_date_list) inps.ex_date_list = [] if input_ex_date: for ex_date in input_ex_date: if os.path.isfile(ex_date): ex_date = ptime.read_date_list(ex_date) else: ex_date = [ptime.yyyymmdd(ex_date)] inps.ex_date_list += list( set(ex_date) - set(inps.ex_date_list)) # delete dates not existed in input file inps.ex_date_list = sorted( list(set(inps.ex_date_list).intersection(dateList))) inps.ex_dates = ptime.date_list2vector(inps.ex_date_list)[0] inps.ex_idx_list = sorted( [dateList.index(i) for i in inps.ex_date_list]) print('exclude date:' + str(inps.ex_date_list)) # Zero displacement for 1st acquisition if inps.zero_first: if inps.ex_date_list: inps.zero_idx = min( list(set(range(date_num)) - set(inps.ex_idx_list))) else: inps.zero_idx = 0 # File Size length = int(atr['LENGTH']) width = int(atr['WIDTH']) print('data size in [y0,y1,x0,x1]: [%d, %d, %d, %d]' % (0, length, 0, width)) try: ullon = float(atr['X_FIRST']) ullat = float(atr['Y_FIRST']) lon_step = float(atr['X_STEP']) lat_step = float(atr['Y_STEP']) lrlon = ullon + width * lon_step lrlat = ullat + length * lat_step print('data size in [lat0,lat1,lon0,lon1]: [%.4f, %.4f, %.4f, %.4f]' % (lrlat, ullat, ullon, lrlon)) except: pass # Initial Pixel Coord if inps.lalo and 'Y_FIRST' in atr.keys(): y = int((inps.lalo[0] - ullat) / lat_step + 0.5) x = int((inps.lalo[1] - ullon) / lon_step + 0.5) inps.yx = [y, x] if inps.ref_lalo and 'Y_FIRST' in atr.keys(): y = int((inps.ref_lalo[0] - ullat) / lat_step + 0.5) x = int((inps.ref_lalo[1] - ullon) / lon_step + 0.5) inps.ref_yx = [y, x] # Display Unit if inps.disp_unit == 'cm': inps.unit_fac = 100.0 elif inps.disp_unit == 'm': inps.unit_fac = 1.0 elif inps.disp_unit == 'dm': inps.unit_fac = 10.0 elif inps.disp_unit == 'mm': inps.unit_fac = 1000.0 elif inps.disp_unit == 'km': inps.unit_fac = 0.001 else: raise ValueError('Un-recognized unit: ' + inps.disp_unit) if k in ['GIANT_TS']: print('data unit: mm') inps.unit_fac *= 0.001 else: print('data unit: m') print('display unit: ' + inps.disp_unit) # Flip up-down / left-right if inps.auto_flip: inps.flip_lr, inps.flip_ud = pp.auto_flip_direction(atr) else: inps.flip_ud = False inps.left_lr = False # Mask file if not inps.mask_file: if os.path.basename(inps.timeseries_file).startswith('geo_'): file_list = ['geo_maskTempCoh.h5'] else: file_list = ['maskTempCoh.h5', 'mask.h5'] try: inps.mask_file = ut.get_file_list(file_list)[0] except: inps.mask_file = None try: mask = readfile.read(inps.mask_file, datasetName='mask')[0] mask[mask != 0] = 1 print('load mask from file: ' + inps.mask_file) except: mask = None print('No mask used.') # Initial Map d_v = readfile.read( inps.timeseries_file, datasetName=dateList[inps.epoch_num])[0] * inps.unit_fac if inps.ref_date: inps.ref_d_v = readfile.read( inps.timeseries_file, datasetName=inps.ref_date)[0] * inps.unit_fac d_v -= inps.ref_d_v if mask is not None: d_v = mask_matrix(d_v, mask) if inps.ref_yx: d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]] data_lim = [np.nanmin(d_v), np.nanmax(d_v)] if not inps.ylim_mat: inps.ylim_mat = data_lim print('Initial data range: ' + str(data_lim)) print('Display data range: ' + str(inps.ylim_mat)) # Fig 1 - Cumulative Displacement Map if not inps.disp_fig: plt.switch_backend('Agg') fig_v = plt.figure('Cumulative Displacement') # Axes 1 #ax_v = fig_v.add_subplot(111) # ax_v.set_position([0.125,0.25,0.75,0.65]) # This works on OSX. Original worked on Linux. # rect[left, bottom, width, height] ax_v = fig_v.add_axes([0.125, 0.25, 0.75, 0.65]) if inps.dem_file: dem = readfile.read(inps.dem_file, datasetName='height')[0] ax_v = pp.plot_dem_yx(ax_v, dem) img = ax_v.imshow(d_v, cmap=inps.colormap, clim=inps.ylim_mat, interpolation='nearest') # Reference Pixel if inps.ref_yx: d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]] ax_v.plot(inps.ref_yx[1], inps.ref_yx[0], 'ks', ms=6) else: try: ax_v.plot(int(atr['REF_X']), int(atr['REF_Y']), 'ks', ms=6) except: pass # Initial Pixel if inps.yx: ax_v.plot(inps.yx[1], inps.yx[0], 'ro', markeredgecolor='black') ax_v.set_xlim(0, np.shape(d_v)[1]) ax_v.set_ylim(np.shape(d_v)[0], 0) # Status Bar def format_coord(x, y): col = int(x + 0.5) row = int(y + 0.5) if 0 <= col < width and 0 <= row < length: z = d_v[row, col] try: lon = ullon + x * lon_step lat = ullat + y * lat_step return 'x=%.0f, y=%.0f, value=%.4f, lon=%.4f, lat=%.4f' % ( x, y, z, lon, lat) except: return 'x=%.0f, y=%.0f, value=%.4f' % (x, y, z) ax_v.format_coord = format_coord # Title and Axis Label ax_v.set_title( 'N = %d, Time = %s' % (inps.epoch_num, inps.dates[inps.epoch_num].strftime('%Y-%m-%d'))) if not 'Y_FIRST' in atr.keys(): ax_v.set_xlabel('Range') ax_v.set_ylabel('Azimuth') # Flip axis if inps.flip_lr: ax_v.invert_xaxis() print('flip map left and right') if inps.flip_ud: ax_v.invert_yaxis() print('flip map up and down') # Colorbar cbar = fig_v.colorbar(img, orientation='vertical') cbar.set_label('Displacement [%s]' % inps.disp_unit) # Axes 2 - Time Slider ax_time = fig_v.add_axes([0.125, 0.1, 0.6, 0.07], facecolor='lightgoldenrodyellow', yticks=[]) tslider = Slider(ax_time, 'Years', inps.yearList[0], inps.yearList[-1], valinit=inps.yearList[inps.epoch_num]) tslider.ax.bar(inps.yearList, np.ones(len(inps.yearList)), facecolor='black', width=0.01, ecolor=None) tslider.ax.set_xticks( np.round( np.linspace(inps.yearList[0], inps.yearList[-1], num=5) * 100) / 100) def time_slider_update(val): """Update Displacement Map using Slider""" timein = tslider.val idx_nearest = np.argmin(np.abs(np.array(inps.yearList) - timein)) ax_v.set_title( 'N = %d, Time = %s' % (idx_nearest, inps.dates[idx_nearest].strftime('%Y-%m-%d'))) d_v = h5[dateList[idx_nearest]][:] * inps.unit_fac if inps.ref_date: d_v -= inps.ref_d_v if mask is not None: d_v = mask_matrix(d_v, mask) if inps.ref_yx: d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]] img.set_data(d_v) fig_v.canvas.draw() tslider.on_changed(time_slider_update) # Fig 2 - Time Series Displacement - Point fig_ts = plt.figure('Time series - point', figsize=inps.fig_size) ax_ts = fig_ts.add_subplot(111) # Read Error List inps.error_ts = None if inps.error_file: error_fileContent = np.loadtxt(inps.error_file, dtype=bytes).astype(str) inps.error_ts = error_fileContent[:, 1].astype( np.float) * inps.unit_fac if inps.ex_date_list: e_ts = inps.error_ts[:] inps.ex_error_ts = np.array([e_ts[i] for i in inps.ex_idx_list]) inps.error_ts = np.array([ e_ts[i] for i in range(date_num) if i not in inps.ex_idx_list ]) def plot_timeseries_errorbar(ax, dis_ts, inps): dates = list(inps.dates) d_ts = dis_ts[:] if inps.ex_date_list: # Update displacement time-series dates = sorted(list(set(inps.dates) - set(inps.ex_dates))) ex_d_ts = np.array([dis_ts[i] for i in inps.ex_idx_list]) d_ts = np.array([ dis_ts[i] for i in range(date_num) if i not in inps.ex_idx_list ]) # Plot excluded dates (_, caps, _) = ax.errorbar(inps.ex_dates, ex_d_ts, yerr=inps.ex_error_ts, fmt='-o', color='gray', ms=inps.marker_size, lw=0, alpha=1, mfc='gray', elinewidth=inps.edge_width, ecolor='black', capsize=inps.marker_size * 0.5) for cap in caps: cap.set_markeredgewidth(inps.edge_width) # Plot kept dates (_, caps, _) = ax.errorbar(dates, d_ts, yerr=inps.error_ts, fmt='-o', ms=inps.marker_size, lw=0, alpha=1, elinewidth=inps.edge_width, ecolor='black', capsize=inps.marker_size * 0.5) for cap in caps: cap.set_markeredgewidth(inps.edge_width) return ax def plot_timeseries_scatter(ax, dis_ts, inps): dates = list(inps.dates) d_ts = dis_ts[:] if inps.ex_date_list: # Update displacement time-series dates = sorted(list(set(inps.dates) - set(inps.ex_dates))) ex_d_ts = np.array([dis_ts[i] for i in inps.ex_idx_list]) d_ts = np.array([ dis_ts[i] for i in range(date_num) if i not in inps.ex_idx_list ]) # Plot excluded dates ax.scatter(inps.ex_dates, ex_d_ts, s=inps.marker_size**2, color='gray') # color='crimson' # Plot kept dates ax.scatter(dates, d_ts, s=inps.marker_size**2) return ax def update_timeseries(ax_ts, y, x): """Plot point time series displacement at pixel [y, x]""" d_ts = read_timeseries_yx( inps.timeseries_file, y, x, ref_yx=inps.ref_yx) * inps.unit_fac # for date in dateList: # d = h5[k].get(date)[y,x] # if inps.ref_yx: # d -= h5[k].get(date)[inps.ref_yx[0], inps.ref_yx[1]] # d_ts.append(d*inps.unit_fac) if inps.zero_first: d_ts -= d_ts[inps.zero_idx] ax_ts.cla() if inps.error_file: ax_ts = plot_timeseries_errorbar(ax_ts, d_ts, inps) else: ax_ts = plot_timeseries_scatter(ax_ts, d_ts, inps) if inps.ylim: ax_ts.set_ylim(inps.ylim) for tick in ax_ts.yaxis.get_major_ticks(): tick.label.set_fontsize(inps.font_size) # Title title_ts = 'Y = %d, X = %d' % (y, x) try: lat = ullat + y * lat_step lon = ullon + x * lon_step title_ts += ', lat = %.4f, lon = %.4f' % (lat, lon) except: pass if inps.disp_title: ax_ts.set_title(title_ts) ax_ts = pp.auto_adjust_xaxis_date(ax_ts, inps.yearList, fontSize=inps.font_size)[0] ax_ts.set_xlabel('Time', fontsize=inps.font_size) ax_ts.set_ylabel('Displacement [%s]' % inps.disp_unit, fontsize=inps.font_size) fig_ts.canvas.draw() # Print to terminal print('\n---------------------------------------') print(title_ts) print(d_ts) # Slope estimation if inps.ex_date_list: inps.yearList_kept = [ inps.yearList[i] for i in range(date_num) if i not in inps.ex_idx_list ] d_ts_kept = [ d_ts[i] for i in range(date_num) if i not in inps.ex_idx_list ] d_slope = stats.linregress(np.array(inps.yearList_kept), np.array(d_ts_kept)) else: d_slope = stats.linregress(np.array(inps.yearList), np.array(d_ts)) print('linear velocity: %.2f +/- %.2f [%s/yr]' % (d_slope[0], d_slope[4], inps.disp_unit)) return d_ts # Initial point time series plot if inps.yx: d_ts = update_timeseries(ax_ts, inps.yx[0], inps.yx[1]) else: d_ts = np.zeros(len(inps.yearList)) ax_ts = plot_timeseries_scatter(ax_ts, d_ts, inps) def plot_timeseries_event(event): """Event function to get y/x from button press""" if event.inaxes != ax_v: return ii = int(event.ydata + 0.5) jj = int(event.xdata + 0.5) d_ts = update_timeseries(ax_ts, ii, jj) # Output if inps.save_fig and inps.yx: print('save info for pixel ' + str(inps.yx)) if not inps.fig_base: inps.fig_base = 'y%d_x%d' % (inps.yx[0], inps.yx[1]) # TXT - point time series outName = inps.fig_base + '_ts.txt' header_info = 'timeseries_file=' + inps.timeseries_file header_info += '\ny=%d, x=%d' % (inps.yx[0], inps.yx[1]) try: lat = ullat + inps.yx[0] * lat_step lon = ullon + inps.yx[1] * lon_step header_info += '\nlat=%.6f, lon=%.6f' % (lat, lon) except: pass if inps.ref_yx: header_info += '\nreference pixel: y=%d, x=%d' % (inps.ref_yx[0], inps.ref_yx[1]) else: header_info += '\nreference pixel: y=%s, x=%s' % (atr['REF_Y'], atr['REF_X']) header_info += '\nunit=m/yr' np.savetxt(outName, list(zip(np.array(dateList), np.array(d_ts) / inps.unit_fac)), fmt='%s', delimiter=' ', header=header_info) print('save time series displacement in meter to ' + outName) # Figure - point time series outName = inps.fig_base + '_ts.pdf' fig_ts.savefig(outName, bbox_inches='tight', transparent=True, dpi=inps.fig_dpi) print('save time series plot to ' + outName) # Figure - map outName = inps.fig_base + '_' + dateList[inps.epoch_num] + '.png' fig_v.savefig(outName, bbox_inches='tight', transparent=True, dpi=inps.fig_dpi) print('save map plot to ' + outName) # Final linking of the canvas to the plots. cid = fig_v.canvas.mpl_connect('button_press_event', plot_timeseries_event) if inps.disp_fig: plt.show() fig_v.canvas.mpl_disconnect(cid)
def get_common_region_int_ambiguity(ifgram_file, cc_mask_file, water_mask_file=None, num_sample=100, dsNameIn='unwrapPhase'): """Solve the phase unwrapping integer ambiguity for the common regions among all interferograms Parameters: ifgram_file : str, path of interferogram stack file cc_mask_file : str, path of common connected components file water_mask_file : str, path of water mask file num_sample : int, number of pixel sampled for each region dsNameIn : str, dataset name of the unwrap phase to be corrected Returns: common_regions : list of skimage.measure._regionprops._RegionProperties object modified by adding two more variables: sample_coords : 2D np.ndarray in size of (num_sample, 2) in int64 format int_ambiguity : 1D np.ndarray in size of (num_ifgram,) in int format """ print('-' * 50) print( 'calculating the integer ambiguity for the common regions defined in', cc_mask_file) # stack info stack_obj = ifgramStack(ifgram_file) stack_obj.open() date12_list = stack_obj.get_date12_list(dropIfgram=True) num_ifgram = len(date12_list) C = matrix( ifgramStack.get_design_matrix4triplet(date12_list).astype(float)) ref_phase = stack_obj.get_reference_phase(unwDatasetName=dsNameIn, dropIfgram=True).reshape( num_ifgram, -1) # prepare common label print('read common mask from', cc_mask_file) cc_mask = readfile.read(cc_mask_file)[0] if water_mask_file is not None and os.path.isfile(water_mask_file): water_mask = readfile.read(water_mask_file)[0] print('refine common mask based on water mask file', water_mask_file) cc_mask[water_mask == 0] = 0 label_img, num_label = connectComponent.get_large_label(cc_mask, min_area=2.5e3, print_msg=True) common_regions = measure.regionprops(label_img) print('number of common regions:', num_label) # add sample_coords / int_ambiguity print('number of samples per region:', num_sample) print('solving the phase-unwrapping integer ambiguity for {}'.format( dsNameIn)) print( '\tbased on the closure phase of interferograms triplets (Yunjun et al., 2019)' ) print( '\tusing the L1-norm regularzed least squares approximation (LASSO) ...' ) for i in range(num_label): common_reg = common_regions[i] # sample_coords idx = sorted( np.random.choice(common_reg.area, num_sample, replace=False)) common_reg.sample_coords = common_reg.coords[idx, :].astype(int) # solve for int_ambiguity U = np.zeros((num_ifgram, num_sample)) if common_reg.label == label_img[stack_obj.refY, stack_obj.refX]: print('{}/{} skip calculation for the reference region'.format( i + 1, num_label)) else: prog_bar = ptime.progressBar(maxValue=num_sample, prefix='{}/{}'.format( i + 1, num_label)) for j in range(num_sample): # read unwrap phase y, x = common_reg.sample_coords[j, :] unw = ifginv.read_unwrap_phase(stack_obj, box=(x, y, x + 1, y + 1), ref_phase=ref_phase, unwDatasetName=dsNameIn, dropIfgram=True, print_msg=False).reshape( num_ifgram, -1) # calculate closure_int closure_pha = np.dot(C, unw) closure_int = matrix( np.round( (closure_pha - ut.wrap(closure_pha)) / (2. * np.pi))) # solve for U U[:, j] = np.round( l1regls(-C, closure_int, alpha=1e-2, show_progress=0)).flatten() prog_bar.update(j + 1, every=5) prog_bar.close() # add int_ambiguity common_reg.int_ambiguity = np.median(U, axis=1) common_reg.date12_list = date12_list #sort regions by size to facilitate the region matching later common_regions.sort(key=lambda x: x.area, reverse=True) # plot sample result fig_size = pp.auto_figure_size(label_img.shape, disp_cbar=False) fig, ax = plt.subplots(figsize=fig_size) ax.imshow(label_img, cmap='jet') for common_reg in common_regions: ax.plot(common_reg.sample_coords[:, 1], common_reg.sample_coords[:, 0], 'k.', ms=2) pp.auto_flip_direction(stack_obj.metadata, ax, print_msg=False) out_img = 'common_region_sample.png' fig.savefig(out_img, bbox_inches='tight', transparent=True, dpi=300) print('saved common regions and sample pixels to file', out_img) return common_regions
def detect_unwrap_error(ifgram_file, mask_file, mask_cc_file='maskConnComp.h5', unwDatasetName='unwrapPhase', cutoff=1., min_num_pixel=1e4): """Detect unwrapping error based on phase closure and extract coherent conn comps based on its histogram distribution Check: https://en.wikipedia.org/wiki/Otsu%27s_method from skimage.filters import threshold_otsu Parameters: ifgram_file : string, path of ifgram stack file mask_file : string, path of mask file, e.g. waterMask.h5, maskConnComp.h5 mask_cc_file: string, path of mask file for coherent conn comps cutoff : float, cutoff value for the mean number of nonzero phase closure to be selected as coherent conn comps candidate min_num_pixel : float, min number of pixels left after morphology operation to be determined as coherent conn comps Returns: mask_cc_file : string, path of mask file for coherent conn comps """ print('-' * 50) print('detect unwraping error based on phase closure') obj = ifgramStack(ifgram_file) obj.open(print_msg=False) C = obj.get_design_matrix4triplet(obj.get_date12_list(dropIfgram=False)) num_nonzero_closure = get_nonzero_phase_closure( ifgram_file, unwDatasetName=unwDatasetName) # get histogram of num_nonzero_phase_closure mask = readfile.read(mask_file)[0] mask *= num_nonzero_closure != 0. fig, ax = plt.subplots(nrows=1, ncols=2, figsize=[12, 4]) num4disp = np.array(num_nonzero_closure, dtype=np.float32) num4disp[mask == 0] = np.nan im = ax[0].imshow(num4disp) ax[0].set_xlabel('Range [pix.]') ax[0].set_ylabel('Azimuth [pix.]') ax[0] = pp.auto_flip_direction(obj.metadata, ax=ax[0], print_msg=False) cbar = fig.colorbar(im, ax=ax[0]) cbar.set_label('number of non-zero phase closure') print( '2. extract coherent conn comps with unwrap error based on histogram distribution' ) max_nonzero_closure = int(np.max(num_nonzero_closure[mask])) bin_value, bin_edge = ax[1].hist(num_nonzero_closure[mask].flatten(), range=(0, max_nonzero_closure), log=True, bins=max_nonzero_closure)[0:2] ax[1].set_xlabel('number of non-zero phase closure') ax[1].set_ylabel('number of pixels') if 'Closure' not in unwDatasetName: print( 'eliminate pixels with number of nonzero phase closure < 5% of total phase closure number' ) print('\twhich can be corrected using phase closure alone.') bin_value[:int(C.shape[0] * 0.05)] = 0. bin_value_thres = ut.median_abs_deviation_threshold(bin_value, cutoff=cutoff) print('median abs deviation cutoff value: {}'.format(cutoff)) plt.plot([0, max_nonzero_closure], [bin_value_thres, bin_value_thres]) out_img = 'numUnwErr_stat.png' fig.savefig(out_img, bbox_inches='tight', transparent=True, dpi=300) print('save unwrap error detection result to {}'.format(out_img)) # histogram --> candidates of coherence conn comps --> mask_cc # find pixel clusters sharing similar number of non-zero phase closure print('searching connected components with more than {} pixels'.format( min_num_pixel)) bin_label, n_bins = ndimage.label(bin_value > bin_value_thres) mask_cc = np.zeros(num_nonzero_closure.shape, dtype=np.int16) # first conn comp - reference conn comp with zero non-zero phase closure num_cc = 1 mask_cc1 = num_nonzero_closure == 0. mask_cc1s = ut.get_all_conn_components(mask_cc1, min_num_pixel=min_num_pixel) for mask_cc1 in mask_cc1s: mask_cc += mask_cc1 # other conn comps - target conn comps to be corrected for unwrap error for i in range(n_bins): idx = np.where(bin_label == i + 1)[0] mask_cci0 = np.multiply(num_nonzero_closure >= bin_edge[idx[0]], num_nonzero_closure < bin_edge[idx[-1] + 1]) mask_ccis = ut.get_all_conn_components(mask_cci0, min_num_pixel=min_num_pixel) if mask_ccis: for mask_cci in mask_ccis: num_cc += 1 mask_cc += mask_cci * num_cc fig, ax = plt.subplots(nrows=1, ncols=2, figsize=[8, 4]) im = ax[0].imshow(mask_cci0) im = ax[1].imshow(mask_cci) fig.savefig('mask_cc{}.png'.format(num_cc), bbox_inches='tight', transparent=True, dpi=300) # save to hdf5 file num_bridge = num_cc - 1 atr = dict(obj.metadata) atr['FILE_TYPE'] = 'mask' atr['UNIT'] = 1 writefile.write(mask_cc, out_file=mask_cc_file, metadata=atr) # plot and save figure to img file out_img = '{}.png'.format(os.path.splitext(mask_cc_file)[0]) fig, ax = plt.subplots(figsize=[6, 8]) im = ax.imshow(mask_cc) ax = pp.auto_flip_direction(atr, ax=ax, print_msg=False) divider = make_axes_locatable(ax) cax = divider.append_axes("right", "3%", pad="3%") cbar = plt.colorbar(im, cax=cax, ticks=np.arange(num_bridge + 2)) fig.savefig(out_img, bbox_inches='tight', transparent=True, dpi=300) print('save to {}'.format(out_img)) return mask_cc_file