def search_gps(SNWE, start_date=None, end_date=None, site_list_file=None, print_msg=True): """Search available GPS sites within the geo bounding box from UNR website Parameters: SNWE : tuple of 4 float, indicating (South, North, West, East) in degrees start_date : string in YYYYMMDD format end_date : string in YYYYMMDD format site_list_file : string. Returns: site_names : 1D np.array of string for GPS station names site_lats : 1D np.array for lat site_lons : 1D np.array for lon """ # download site list file if it's not found in current directory if site_list_file is None: site_list_file = os.path.basename(unr_site_list_file) if not os.path.isfile(site_list_file): dload_site_list(print_msg=print_msg) txt_data = np.loadtxt(site_list_file, dtype=bytes, skiprows=1, usecols=(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)).astype(str) site_names = txt_data[:, 0] site_lats, site_lons = txt_data[:, 1:3].astype(np.float32).T site_lons -= np.round(site_lons / (360.)) * 360. t_start = np.array([ dt(*time.strptime(i, "%Y-%m-%d")[0:5]) for i in txt_data[:, 7].astype(str) ]) t_end = np.array([ dt(*time.strptime(i, "%Y-%m-%d")[0:5]) for i in txt_data[:, 8].astype(str) ]) # limit on space idx = ((site_lats >= SNWE[0]) * (site_lats <= SNWE[1]) * (site_lons >= SNWE[2]) * (site_lons <= SNWE[3])) # limit on time if start_date: t0 = ptime.date_list2vector([start_date])[0][0] idx *= t_end >= t0 if end_date: t1 = ptime.date_list2vector([end_date])[0][0] idx *= t_start <= t1 return site_names[idx], site_lats[idx], site_lons[idx]
def read_timeseries_yx(y, x, ts_file, lookup_file=None, ref_y=None, ref_x=None): """ Read time-series of one pixel with input y/x Parameters: y/x : int, row/column number of interest ts_file : string, filename of time-series HDF5 file lookup_file : string, filename of lookup table file ref_y/x : int, row/column number of reference pixel Returns: dates : 1D np.array of datetime.datetime objects, i.e. datetime.datetime(2010, 10, 20, 0, 0) dis : 1D np.array of float in meter """ # read date obj = timeseries(ts_file) obj.open(print_msg=False) dates = ptime.date_list2vector(obj.dateList)[0] dates = np.array(dates) # read displacement box = (x, y, x+1, y+1) dis = readfile.read(ts_file, box=box)[0] # reference pixel if ref_y is not None: ref_box = (ref_x, ref_y, ref_x+1, ref_y+1) dis -= readfile.read(ts_file, box=ref_box)[0] #start at zero dis -= dis[0] return dates, dis
def read_timeseries_yx(y, x, ts_file, ref_y=None, ref_x=None, win_size=1): """ Read time-series of one pixel with input y/x Parameters: y/x : int, row/column number of interest ts_file : string, filename of time-series HDF5 file ref_y/x : int, row/column number of reference pixel Returns: dates : 1D np.array of datetime.datetime objects, i.e. datetime.datetime(2010, 10, 20, 0, 0) dis : 1D np.array of float in meter """ # read date obj = timeseries(ts_file) obj.open(print_msg=False) dates = ptime.date_list2vector(obj.dateList)[0] dates = np.array(dates) # read displacement print('input y / x: {} / {}'.format(y, x)) box = (x, y, x + 1, y + 1) dis = readfile.read(ts_file, box=box)[0] if win_size != 1: buf = int(win_size / 2) box_win = (x - buf, y - buf, x + buf + 1, y + buf + 1) dis_win = readfile.read(ts_file, box=box_win)[0] dis = np.nanmean(dis_win.reshape((obj.numDate, -1)), axis=1) # reference pixel if ref_y is not None: ref_box = (ref_x, ref_y, ref_x + 1, ref_y + 1) dis -= readfile.read(ts_file, box=ref_box)[0] #start at zero dis -= dis[0] return dates, dis
def plot_bar4date_rms(inps): inps.figName = os.path.splitext(inps.rmsFile)[0] + '.pdf' if ut.update_file(inps.figName, [inps.exDateFile, inps.refDateFile, inps.template_file], check_readable=False): if inps.fig_size: fig = plt.figure(figsize=inps.fig_size) else: fig = plt.figure() ax = fig.add_subplot(111) font_size = 12 dates, datevector = ptime.date_list2vector(inps.dateList) try: bar_width = ut.most_common(np.diff(dates).tolist()) * 3 / 4 except: bar_width = np.min(np.diff(dates).tolist()) * 3 / 4 x_list = [i - bar_width / 2 for i in dates] inps.rmsList = [i * 1000. for i in inps.rmsList] min_rms = inps.min_rms * 1000. # Plot all dates ax.bar(x_list, inps.rmsList, bar_width.days) # Plot reference date ax.bar(x_list[inps.refDateIndex], inps.rmsList[inps.refDateIndex], bar_width.days, label='Reference date') # Plot exclude dates if inps.exIdxList: ex_x_list = [x_list[i] for i in inps.exIdxList] inps.exRmsList = [inps.rmsList[i] for i in inps.exIdxList] ax.bar(ex_x_list, inps.exRmsList, bar_width.days, color='darkgray', label='Exclude date(s)') # Plot min_rms line ax, xmin, xmax = pp.auto_adjust_xaxis_date( ax, datevector, font_size, every_year=inps.tick_year_num) ax.plot(np.array([xmin, xmax]), np.array([min_rms, min_rms]), '--k') # axis format ax = pp.auto_adjust_yaxis(ax, inps.rmsList + [min_rms], font_size, ymin=0.0) ax.set_xlabel('Time [years]', fontsize=font_size) ax.set_ylabel('Root Mean Square [mm]', fontsize=font_size) ax.yaxis.set_ticks_position('both') ax.tick_params(labelsize=font_size) plt.legend(fontsize=font_size) # save figure fig.savefig(inps.figName, bbox_inches='tight', transparent=True) print('save figure to file: ' + inps.figName) return inps
def velocity2timeseries(date_list, vel=0.03, display=False): '''Simulate displacement time-series from linear velocity Inputs: date_list - list of string in YYYYMMDD or YYMMDD format vel - float, velocity in meter per year display - bool, display simulation or not Output: ts - 2D np.array in size of (date_num,1), displacement time-series in m Example: date_list = pnet.read_baseline_file('bl_list.txt')[0] ts0 = simulate_timeseries(date_list, vel=0.03, display=True) ''' tbase_list = ptime.date_list2tbase(date_list)[0] ts = vel / 365.25 * np.array(tbase_list) ts = ts.reshape(-1, 1) if display: dates = ptime.date_list2vector(date_list)[0] ## Display marker_size = 5 plt.figure() plt.scatter(dates, ts * 100.0, s=marker_size**2) plt.xlabel('Time (years)') plt.ylabel('LOS Displacement (cm)') plt.title('Displacement time-series with velocity = ' + str(vel) + ' m/yr') plt.show() return ts
def exclude_dates(): global inps, dateList if inps.ex_date_list: input_ex_date = list(inps.ex_date_list) inps.ex_date_list = [] if input_ex_date: for ex_date in input_ex_date: if os.path.isfile(ex_date): ex_date = ptime.read_date_list(ex_date) else: ex_date = [ptime.yyyymmdd(ex_date)] inps.ex_date_list += list( set(ex_date) - set(inps.ex_date_list)) # delete dates not existed in input file inps.ex_date_list = sorted( list(set(inps.ex_date_list).intersection(dateList))) inps.ex_dates = ptime.date_list2vector(inps.ex_date_list)[0] inps.ex_idx_list = sorted( [dateList.index(i) for i in inps.ex_date_list]) print(('exclude date:' + str(inps.ex_date_list)))
def read_timeseries_lalo(lat, lon, ts_file, lookup_file=None, ref_lat=None, ref_lon=None): """ Read time-series of one pixel with input lat/lon Parameters: lat/lon : float, latitude/longitude ts_file : string, filename of time-series HDF5 file lookup_file : string, filename of lookup table file ref_lat/lon : float, latitude/longitude of reference pixel Returns: dates : 1D np.array of datetime.datetime objects, i.e. datetime.datetime(2010, 10, 20, 0, 0) dis : 1D np.array of float in meter """ # read date obj = timeseries(ts_file) obj.open(print_msg=False) dates = ptime.date_list2vector(obj.dateList)[0] dates = np.array(dates) # read displacement coord = coordinate(obj.metadata, lookup_file=lookup_file) y, x = coord.geo2radar(lat, lon)[0:2] box = (x, y, x+1, y+1) dis = readfile.read(ts_file, box=box)[0] # reference pixel if ref_lat is not None: ref_y, ref_x = coord.geo2radar(ref_lat, ref_lon)[0:2] ref_box = (ref_x, ref_y, ref_x+1, ref_y+1) dis -= readfile.read(ts_file, box=ref_box)[0] #start at zero dis -= dis[0] return dates, dis
def read_displacement(self, start_date=None, end_date=None, print_msg=True, display=False): # download file if it's not exists. if not os.path.isfile(self.file): self.dload_site(print_msg=print_msg) # read dates, dis_e, dis_n, dis_u if print_msg: print( 'reading time and displacement in east/north/vertical direction' ) data = np.loadtxt(self.file, dtype=bytes, skiprows=1).astype(str) self.dates = np.array( [dt(*time.strptime(i, "%y%b%d")[0:5]) for i in data[:, 1]]) #self.dates = np.array([ptime.decimal_year2datetime(i) for i in data[:, 2]]) (self.dis_e, self.dis_n, self.dis_u, self.std_e, self.std_n, self.std_u) = data[:, (8, 10, 12, 14, 15, 16)].astype(np.float32).T # cut out the specified time range t_flag = np.ones(len(self.dates), np.bool_) if start_date: t0 = ptime.date_list2vector([start_date])[0][0] t_flag[self.dates < t0] = 0 if end_date: t1 = ptime.date_list2vector([end_date])[0][0] t_flag[self.dates > t1] = 0 self.dates = self.dates[t_flag] self.dis_e = self.dis_e[t_flag] self.dis_n = self.dis_n[t_flag] self.dis_u = self.dis_u[t_flag] self.std_e = self.std_e[t_flag] self.std_n = self.std_n[t_flag] self.std_u = self.std_u[t_flag] if display: import matplotlib.pyplot as plt fig, ax = plt.subplots(nrows=3, ncols=1, sharex=True) ax[0].scatter(self.dates, self.dis_e, s=2**2, label='East') ax[1].scatter(self.dates, self.dis_n, s=2**2, label='North') ax[2].scatter(self.dates, self.dis_u, s=2**2, label='Up') plt.show() return (self.dates, self.dis_e, self.dis_n, self.dis_u, self.std_e, self.std_n, self.std_u)
def manual_select_pairs_to_remove(stackFile): """Manually select interferograms to remove""" print('\n-------------------------------------------------------------') print('Manually select interferograms to remove') print('1) click two dates/points to select one pair of interferogram') print('2) repeat until you select all pairs you would like to remove') print('3) close the figure to continue the program ...') print('-------------------------------------------------------------\n') obj = ifgramStack(stackFile) obj.open() date12ListAll = obj.date12List pbase = obj.get_perp_baseline_timeseries(dropIfgram=False) dateList = obj.dateList datesNum = mdates.date2num(np.array(ptime.date_list2vector(dateList)[0])) date12ListKept = obj.get_date12_list(dropIfgram=True) date12ListDropped = sorted(list(set(date12ListAll) - set(date12ListKept))) # Display the network fig = plt.figure() ax = fig.add_subplot(111) ax = pp.plot_network(ax, date12ListAll, dateList, list(pbase), date12List_drop=date12ListDropped) print('display the network of interferogram of file: ' + stackFile) date_click = [] date12_click = [] def onclick(event): idx = nearest_neighbor(event.xdata, event.ydata, datesNum, pbase) print('click at ' + dateList[idx]) date_click.append(dateList[idx]) if len(date_click) % 2 == 0 and date_click[-2] != date_click[-1]: [mDate, sDate] = sorted(date_click[-2:]) mIdx = dateList.index(mDate) sIdx = dateList.index(sDate) date12 = mDate + '_' + sDate if date12 in date12ListAll: print('select date12: ' + date12) date12_click.append(date12) ax.plot([datesNum[mIdx], datesNum[sIdx]], [pbase[mIdx], pbase[sIdx]], 'r', lw=4) else: print(date12 + ' is not existed in input file') plt.draw() cid = fig.canvas.mpl_connect('button_press_event', onclick) plt.show() if not ut.yes_or_no('Proceed to drop the ifgrams/date12?'): date12_click = None return date12_click
def print_timseries_date_stat(dateList): datevector = ptime.date_list2vector(dateList)[1] print('Start Date: ' + dateList[0]) print('End Date: ' + dateList[-1]) print('Number of acquisitions : %d' % len(dateList)) print('Std. of acquisition times : %.2f yeras' % std(datevector)) print('----------------------') print('List of dates:') print(dateList) print('----------------------') print('List of dates in years') print(datevector) return
def read_exclude_date(input_ex_date, dateListAll): # default value ex_date_list = [] ex_dates = [] ex_flag = np.ones((len(dateListAll)), np.bool_) ex_date_list = ptime.read_date_list(input_ex_date, date_list_all=dateListAll) if ex_date_list: ex_dates = ptime.date_list2vector(ex_date_list)[0] for i in ex_date_list: ex_flag[dateListAll.index(i)] = False vprint('exclude date:'+str(ex_date_list)) return ex_date_list, ex_dates, ex_flag
def read_timeseries_info(): """Reads basic information about timeseries file being viewed""" global atr, k, h5, dateList, inps.tims, inps.num_date, inps atr = readfile.read_attribute(inps.timeseries_file) k = atr['FILE_TYPE'] print(('input file is '+k+': '+inps.timeseries_file)) if not k in ['timeseries','giantTimeseries']: raise ValueError('Only timeseries file is supported!') h5 = h5py.File(inps.timeseries_file,'r') if k in ['giantTimeseries']: dateList = [dt.fromordinal(int(i)).strftime('%Y%m%d') for i in h5['dates'][:].tolist()] else: dateList = timeseries(inps.timeseries_file).get_date_list() inps.num_date = len(dateList) inps.dates, inps.tims = ptime.date_list2vector(dateList)
def read_timeseries_info(): global atr, k, h5, dateList, tims, date_num, inps atr = readfile.read_attribute(inps.timeseries_file) k = atr['FILE_TYPE'] print(('input file is ' + k + ': ' + inps.timeseries_file)) if not k in ['timeseries', 'GIANT_TS']: raise ValueError('Only timeseries file is supported!') h5 = h5py.File(inps.timeseries_file, 'r') if k in ['GIANT_TS']: dateList = [ dt.fromordinal(int(i)).strftime('%Y%m%d') for i in h5['dates'][:].tolist() ] else: dateList = timeseries(inps.timeseries_file).get_date_list() date_num = len(dateList) inps.dates, tims = ptime.date_list2vector(dateList)
def main(iargs=None): inps = cmd_line_parse(iargs) print('\n*************** Spatial Average ******************') mean_list, date_list = ut.spatial_average(inps.file, datasetName=inps.datasetName, maskFile=inps.mask_file, saveList=True) atr = readfile.read_attribute(inps.file) k = atr['FILE_TYPE'] if inps.disp_fig and k == 'timeseries': dates, datevector = ptime.date_list2vector(date_list) # plot fig = plt.figure() ax = fig.add_subplot(111) ax.plot(dates, mean_list, '-o')#, lw=2, ms=16, alpha=0.7) #, mfc='crimson') ax.set_title('Spatial Average', fontsize=12) ax = pp.auto_adjust_xaxis_date(ax, datevector)[0] ax.set_xlabel('Time [years]', fontsize=12) ax.set_ylabel('Mean', fontsize=12) plt.show() return
def read_exclude_date(input_ex_date, dateListAll): ex_date_list = [] ex_dates = [] ex_flag = np.ones((len(dateListAll)), np.bool_) if input_ex_date: input_ex_date = list(input_ex_date) if input_ex_date: for ex_date in input_ex_date: if os.path.isfile(ex_date): ex_date = ptime.read_date_list(ex_date) else: ex_date = [ptime.yyyymmdd(ex_date)] ex_date_list += list(set(ex_date) - set(ex_date_list)) # delete dates not existed in input file ex_date_list = sorted( list(set(ex_date_list).intersection(dateListAll))) ex_dates = ptime.date_list2vector(ex_date_list)[0] for i in ex_date_list: ex_flag[dateListAll.index(i)] = False print('exclude date:' + str(ex_date_list)) return ex_date_list, ex_dates, ex_flag
def plot_coherence_history(ax, date12List, cohList, plot_dict={}): """Plot min/max Coherence of all interferograms for each date""" # Figure Setting if not 'fontsize' in plot_dict.keys(): plot_dict['fontsize'] = 12 if not 'linewidth' in plot_dict.keys(): plot_dict['linewidth'] = 2 if not 'markercolor' in plot_dict.keys(): plot_dict['markercolor'] = 'orange' if not 'markersize' in plot_dict.keys(): plot_dict['markersize'] = 16 if not 'disp_title' in plot_dict.keys(): plot_dict['disp_title'] = True if not 'every_year' in plot_dict.keys(): plot_dict['every_year'] = 1 # Get date list date12List = ptime.yyyymmdd_date12(date12List) m_dates = [date12.split('_')[0] for date12 in date12List] s_dates = [date12.split('_')[1] for date12 in date12List] dateList = sorted(ptime.yyyymmdd(list(set(m_dates + s_dates)))) dates, datevector = ptime.date_list2vector(dateList) bar_width = ut.most_common(np.diff(dates).tolist())*3/4 x_list = [i-bar_width/2 for i in dates] coh_mat = pnet.coherence_matrix(date12List, cohList) ax.bar(x_list, np.nanmax(coh_mat, axis=0), bar_width.days, label='Max Coherence') ax.bar(x_list, np.nanmin(coh_mat, axis=0), bar_width.days, label='Min Coherence') if plot_dict['disp_title']: ax.set_title('Coherence History of All Related Interferograms') ax = auto_adjust_xaxis_date(ax, datevector, plot_dict['fontsize'], every_year=plot_dict['every_year'])[0] ax.set_ylim([0.0, 1.0]) ax.set_xlabel('Time [years]', fontsize=plot_dict['fontsize']) ax.set_ylabel('Coherence', fontsize=plot_dict['fontsize']) ax.legend(loc='lower right') return ax
def plot_perp_baseline_hist(ax, dateList, pbaseList, plot_dict={}, dateList_drop=[]): """ Plot Perpendicular Spatial Baseline History Inputs ax : matplotlib axes object dateList : list of string, date in YYYYMMDD format pbaseList : list of float, perp baseline plot_dict : dictionary with the following items: fontsize linewidth markercolor markersize disp_title : bool, show figure title or not, default: True every_year : int, number of years for the major tick on xaxis dateList_drop : list of string, date dropped in YYYYMMDD format e.g. ['20080711', '20081011'] Output: ax : matplotlib axes object """ # Figure Setting if not 'fontsize' in plot_dict.keys(): plot_dict['fontsize'] = 12 if not 'linewidth' in plot_dict.keys(): plot_dict['linewidth'] = 2 if not 'markercolor' in plot_dict.keys(): plot_dict['markercolor'] = 'orange' if not 'markersize' in plot_dict.keys(): plot_dict['markersize'] = 16 if not 'disp_title' in plot_dict.keys(): plot_dict['disp_title'] = True if not 'every_year' in plot_dict.keys(): plot_dict['every_year'] = 1 transparency = 0.7 # Date Convert dateList = ptime.yyyymmdd(dateList) dates, datevector = ptime.date_list2vector(dateList) # Get index of date used and dropped # dateList_drop = ['20080711', '20081011'] # for debug idx_keep = list(range(len(dateList))) idx_drop = [] for i in dateList_drop: idx = dateList.index(i) idx_keep.remove(idx) idx_drop.append(idx) # Plot # ax=fig.add_subplot(111) # Plot date used if idx_keep: x_list = [dates[i] for i in idx_keep] y_list = [pbaseList[i] for i in idx_keep] ax.plot(x_list, y_list, '-ko', alpha=transparency, lw=plot_dict['linewidth'], ms=plot_dict['markersize'], mfc=plot_dict['markercolor']) # Plot date dropped if idx_drop: x_list = [dates[i] for i in idx_drop] y_list = [pbaseList[i] for i in idx_drop] ax.plot(x_list, y_list, 'ko', alpha=transparency, ms=plot_dict['markersize'], mfc='gray') if plot_dict['disp_title']: ax.set_title('Perpendicular Baseline History', fontsize=plot_dict['fontsize']) # axis format ax = auto_adjust_xaxis_date(ax, datevector, plot_dict['fontsize'], every_year=plot_dict['every_year'])[0] ax = auto_adjust_yaxis(ax, pbaseList, plot_dict['fontsize']) ax.set_xlabel('Time [years]', fontsize=plot_dict['fontsize']) ax.set_ylabel('Perpendicular Baseline [m]', fontsize=plot_dict['fontsize']) return ax
def read_init_info(inps): # Time Series Info ts_file0 = inps.timeseries_file[0] atr = readfile.read_attribute(ts_file0) inps.key = atr['FILE_TYPE'] if inps.key == 'timeseries': obj = timeseries(ts_file0) elif inps.key == 'giantTimeseries': obj = giantTimeseries(ts_file0) elif inps.key == 'HDFEOS': obj = HDFEOS(ts_file0) else: raise ValueError('input file is {}, not timeseries.'.format(inps.key)) obj.open() if not inps.file_label: inps.file_label = [ str(i) for i in list(range(len(inps.timeseries_file))) ] # default mask file if not inps.mask_file and 'masked' not in ts_file0: dir_name = os.path.dirname(ts_file0) if 'Y_FIRST' in atr.keys(): inps.mask_file = os.path.join(dir_name, 'geo_maskTempCoh.h5') else: inps.mask_file = os.path.join(dir_name, 'maskTempCoh.h5') if not os.path.isfile(inps.mask_file): inps.mask_file = None # date info inps.date_list = obj.dateList if inps.start_date: inps.date_list = [ i for i in inps.date_list if int(i) >= int(inps.start_date) ] if inps.end_date: inps.date_list = [ i for i in inps.date_list if int(i) <= int(inps.end_date) ] inps.num_date = len(inps.date_list) inps.dates, inps.yearList = ptime.date_list2vector(inps.date_list) (inps.ex_date_list, inps.ex_dates, inps.ex_flag) = read_exclude_date(inps.ex_date_list, inps.date_list) # initial display index if obj.metadata['REF_DATE'] in inps.date_list: inps.ref_idx = inps.date_list.index(obj.metadata['REF_DATE']) else: inps.ref_idx = 0 if inps.ref_date: inps.ref_idx = inps.date_list.index(inps.ref_date) if not inps.init_idx: if inps.ref_idx < inps.num_date / 2.: inps.init_idx = -3 else: inps.init_idx = 3 # Display Unit (inps.disp_unit, inps.unit_fac) = pp.scale_data2disp_unit(metadata=atr, disp_unit=inps.disp_unit)[1:3] # Read Error List inps.error_ts = None inps.ex_error_ts = None if inps.error_file: error_fileContent = np.loadtxt(inps.error_file, dtype=bytes).astype(str) inps.error_ts = error_fileContent[:, 1].astype( np.float) * inps.unit_fac if inps.ex_date_list: e_ts = inps.error_ts[:] inps.ex_error_ts = e_ts[inps.ex_flag == 0] inps.error_ts = e_ts[inps.ex_flag == 1] # Zero displacement for 1st acquisition if inps.zero_first: inps.zero_idx = min(0, np.min(np.where(inps.ex_flag)[0])) # default lookup table file if not inps.lookup_file: inps.lookup_file = ut.get_lookup_file('./INPUTS/geometryRadar.h5') inps.coord = ut.coordinate(atr, inps.lookup_file) # size and lalo info inps.pix_box, inps.geo_box = subset.subset_input_dict2box(vars(inps), atr) inps.pix_box = inps.coord.check_box_within_data_coverage(inps.pix_box) inps.geo_box = inps.coord.box_pixel2geo(inps.pix_box) # Out message data_box = (0, 0, obj.width, obj.length) print('data coverage in y/x: ' + str(data_box)) print('subset coverage in y/x: ' + str(inps.pix_box)) print('data coverage in lat/lon: ' + str(inps.coord.box_pixel2geo(data_box))) print('subset coverage in lat/lon: ' + str(inps.geo_box)) print( '------------------------------------------------------------------------' ) # reference pixel if not inps.ref_lalo and 'REF_LAT' in atr.keys(): inps.ref_lalo = (float(atr['REF_LAT']), float(atr['REF_LON'])) if inps.ref_lalo: if inps.ref_lalo[1] > 180.: inps.ref_lalo[1] -= 360. inps.ref_yx = inps.coord.geo2radar(inps.ref_lalo[0], inps.ref_lalo[1], print_msg=False)[0:2] if not inps.ref_yx: inps.ref_yx = [int(atr['REF_Y']), int(atr['REF_X'])] # Initial Pixel Coord if inps.lalo: inps.yx = inps.coord.geo2radar(inps.lalo[0], inps.lalo[1], print_msg=False)[0:2] try: inps.lalo = inps.coord.radar2geo(inps.yx[0], inps.yx[1], print_msg=False)[0:2] except: inps.lalo = None # Flip up-down / left-right if inps.auto_flip: inps.flip_lr, inps.flip_ud = pp.auto_flip_direction(atr) # display unit ans wrap # if wrap_step == 2*np.pi (default value), set disp_unit_v = radian; # otherwise set disp_unit_v = disp_unit inps.disp_unit_v = inps.disp_unit if inps.wrap: inps.range2phase = -4. * np.pi / float(atr['WAVELENGTH']) if 'cm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 100. elif 'mm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 1000. elif 'm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 1. else: raise ValueError('un-recognized display unit: {}'.format( inps.disp_unit)) if (inps.wrap_range[1] - inps.wrap_range[0]) == 2 * np.pi: inps.disp_unit_v = 'radian' inps.vlim = inps.wrap_range inps.cbar_label = 'Displacement [{}]'.format(inps.disp_unit_v) return inps, atr
def plot_network(ax, date12List, dateList, pbaseList, plot_dict={}, date12List_drop=[], print_msg=True): """Plot Temporal-Perp baseline Network Inputs ax : matplotlib axes object date12List : list of string for date12 in YYYYMMDD_YYYYMMDD format dateList : list of string, for date in YYYYMMDD format pbaseList : list of float, perp baseline, len=number of acquisition plot_dict : dictionary with the following items: fontsize linewidth markercolor markersize cohList : list of float, coherence value of each interferogram, len = number of ifgrams disp_min/max : float, min/max range of the color display based on cohList colormap : string, colormap name coh_thres : float, coherence of where to cut the colormap for display disp_title : bool, show figure title or not, default: True disp_drop: bool, show dropped interferograms or not, default: True Output ax : matplotlib axes object """ # Figure Setting if not 'fontsize' in plot_dict.keys(): plot_dict['fontsize'] = 12 if not 'linewidth' in plot_dict.keys(): plot_dict['linewidth'] = 2 if not 'markercolor' in plot_dict.keys(): plot_dict['markercolor'] = 'orange' if not 'markersize' in plot_dict.keys(): plot_dict['markersize'] = 16 # For colorful display of coherence if not 'cohList' in plot_dict.keys(): plot_dict['cohList'] = None if not 'cbar_label' in plot_dict.keys(): plot_dict['cbar_label'] = 'Average Spatial Coherence' if not 'disp_min' in plot_dict.keys(): plot_dict['disp_min'] = 0.2 if not 'disp_max' in plot_dict.keys(): plot_dict['disp_max'] = 1.0 if not 'colormap' in plot_dict.keys(): plot_dict['colormap'] = 'RdBu' if not 'disp_title' in plot_dict.keys(): plot_dict['disp_title'] = True if not 'coh_thres' in plot_dict.keys(): plot_dict['coh_thres'] = None if not 'disp_drop' in plot_dict.keys(): plot_dict['disp_drop'] = True if not 'every_year' in plot_dict.keys(): plot_dict['every_year'] = 1 cohList = plot_dict['cohList'] disp_min = plot_dict['disp_min'] disp_max = plot_dict['disp_max'] coh_thres = plot_dict['coh_thres'] transparency = 0.7 # Date Convert dateList = ptime.yyyymmdd(sorted(dateList)) dates, datevector = ptime.date_list2vector(dateList) tbaseList = ptime.date_list2tbase(dateList)[0] ## maxBperp and maxBtemp date12List = ptime.yyyymmdd_date12(date12List) ifgram_num = len(date12List) pbase12 = np.zeros(ifgram_num) tbase12 = np.zeros(ifgram_num) for i in range(ifgram_num): m_date, s_date = date12List[i].split('_') m_idx = dateList.index(m_date) s_idx = dateList.index(s_date) pbase12[i] = pbaseList[s_idx] - pbaseList[m_idx] tbase12[i] = tbaseList[s_idx] - tbaseList[m_idx] if print_msg: print('max perpendicular baseline: {:.2f} m'.format(np.max(np.abs(pbase12)))) print('max temporal baseline: {} days'.format(np.max(tbase12))) ## Keep/Drop - date12 date12List_keep = sorted(list(set(date12List) - set(date12List_drop))) idx_date12_keep = [date12List.index(i) for i in date12List_keep] idx_date12_drop = [date12List.index(i) for i in date12List_drop] if not date12List_drop: plot_dict['disp_drop'] = False ## Keep/Drop - date m_dates = [i.split('_')[0] for i in date12List_keep] s_dates = [i.split('_')[1] for i in date12List_keep] dateList_keep = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates)))) dateList_drop = sorted(list(set(dateList) - set(dateList_keep))) idx_date_keep = [dateList.index(i) for i in dateList_keep] idx_date_drop = [dateList.index(i) for i in dateList_drop] # Ploting # ax=fig.add_subplot(111) # Colorbar when conherence is colored if cohList is not None: data_min = min(cohList) data_max = max(cohList) # Normalize normalization = False if normalization: cohList = [(coh-data_min) / (data_min-data_min) for coh in cohList] disp_min = data_min disp_max = data_max if print_msg: print('showing coherence') print(('colormap: '+plot_dict['colormap'])) print(('display range: '+str([disp_min, disp_max]))) print(('data range: '+str([data_min, data_max]))) splitColormap = True if splitColormap: # Use lower/upper part of colormap to emphasis dropped interferograms if not coh_thres: # Find proper cut percentage so that all keep pairs are blue and drop pairs are red cohList_keep = [cohList[i] for i in idx_date12_keep] cohList_drop = [cohList[i] for i in idx_date12_drop] if cohList_drop: coh_thres = max(cohList_drop) else: coh_thres = min(cohList_keep) if coh_thres < disp_min: disp_min = 0.0 if print_msg: print('data range exceed orginal display range, set new display range to: [0.0, %f]' % (disp_max)) c1_num = np.ceil(200.0 * (coh_thres - disp_min) / (disp_max - disp_min)).astype('int') coh_thres = c1_num / 200.0 * (disp_max-disp_min) + disp_min cmap = plt.get_cmap(plot_dict['colormap']) colors1 = cmap(np.linspace(0.0, 0.3, c1_num)) colors2 = cmap(np.linspace(0.6, 1.0, 200 - c1_num)) cmap = LinearSegmentedColormap.from_list('truncate_RdBu', np.vstack((colors1, colors2))) if print_msg: print(('color jump at '+str(coh_thres))) else: cmap = plt.get_cmap(plot_dict['colormap']) divider = make_axes_locatable(ax) cax = divider.append_axes("right", "3%", pad="3%") norm = mpl.colors.Normalize(vmin=disp_min, vmax=disp_max) cbar = mpl.colorbar.ColorbarBase(cax, cmap=cmap, norm=norm) cbar.set_label(plot_dict['cbar_label'], fontsize=plot_dict['fontsize']) # plot low coherent ifgram first and high coherence ifgram later cohList_keep = [cohList[date12List.index(i)] for i in date12List_keep] date12List_keep = [x for _, x in sorted(zip(cohList_keep, date12List_keep))] # Dot - SAR Acquisition if idx_date_keep: x_list = [dates[i] for i in idx_date_keep] y_list = [pbaseList[i] for i in idx_date_keep] ax.plot(x_list, y_list, 'ko', alpha=0.7, ms=plot_dict['markersize'], mfc=plot_dict['markercolor']) if idx_date_drop: x_list = [dates[i] for i in idx_date_drop] y_list = [pbaseList[i] for i in idx_date_drop] ax.plot(x_list, y_list, 'ko', alpha=0.7, ms=plot_dict['markersize'], mfc='gray') ## Line - Pair/Interferogram # interferograms dropped if plot_dict['disp_drop']: for date12 in date12List_drop: date1, date2 = date12.split('_') idx1 = dateList.index(date1) idx2 = dateList.index(date2) x = np.array([dates[idx1], dates[idx2]]) y = np.array([pbaseList[idx1], pbaseList[idx2]]) if cohList is not None: coh = cohList[date12List.index(date12)] coh_idx = (coh - disp_min) / (disp_max - disp_min) ax.plot(x, y, '--', lw=plot_dict['linewidth'], alpha=transparency, c=cmap(coh_idx)) else: ax.plot(x, y, '--', lw=plot_dict['linewidth'], alpha=transparency, c='k') # interferograms kept for date12 in date12List_keep: date1, date2 = date12.split('_') idx1 = dateList.index(date1) idx2 = dateList.index(date2) x = np.array([dates[idx1], dates[idx2]]) y = np.array([pbaseList[idx1], pbaseList[idx2]]) if cohList is not None: coh = cohList[date12List.index(date12)] coh_idx = (coh - disp_min) / (disp_max - disp_min) ax.plot(x, y, '-', lw=plot_dict['linewidth'], alpha=transparency, c=cmap(coh_idx)) else: ax.plot(x, y, '-', lw=plot_dict['linewidth'], alpha=transparency, c='k') if plot_dict['disp_title']: ax.set_title('Interferogram Network', fontsize=plot_dict['fontsize']) # axis format ax = auto_adjust_xaxis_date(ax, datevector, plot_dict['fontsize'], every_year=plot_dict['every_year'])[0] ax = auto_adjust_yaxis(ax, pbaseList, plot_dict['fontsize']) ax.set_xlabel('Time [years]', fontsize=plot_dict['fontsize']) ax.set_ylabel('Perp Baseline [m]', fontsize=plot_dict['fontsize']) # Legend if plot_dict['disp_drop']: solid_line = mlines.Line2D([], [], color='k', ls='solid', label='Interferograms') dash_line = mlines.Line2D([], [], color='k', ls='dashed', label='Interferograms dropped') ax.legend(handles=[solid_line, dash_line]) return ax
def main(iargs=None): # Actual code. inps = cmd_line_parse(iargs) # Time Series Info atr = readfile.read_attribute(inps.timeseries_file) k = atr['FILE_TYPE'] print('input file is ' + k + ': ' + inps.timeseries_file) if not k in ['timeseries', 'GIANT_TS']: raise ValueError('Only timeseries file is supported!') obj = timeseries(inps.timeseries_file) obj.open() h5 = h5py.File(inps.timeseries_file, 'r') if k in ['GIANT_TS']: dateList = [ dt.fromordinal(int(i)).strftime('%Y%m%d') for i in h5['dates'][:].tolist() ] else: dateList = obj.dateList date_num = len(dateList) inps.dates, inps.yearList = ptime.date_list2vector(dateList) # Read exclude dates if inps.ex_date_list: input_ex_date = list(inps.ex_date_list) inps.ex_date_list = [] if input_ex_date: for ex_date in input_ex_date: if os.path.isfile(ex_date): ex_date = ptime.read_date_list(ex_date) else: ex_date = [ptime.yyyymmdd(ex_date)] inps.ex_date_list += list( set(ex_date) - set(inps.ex_date_list)) # delete dates not existed in input file inps.ex_date_list = sorted( list(set(inps.ex_date_list).intersection(dateList))) inps.ex_dates = ptime.date_list2vector(inps.ex_date_list)[0] inps.ex_idx_list = sorted( [dateList.index(i) for i in inps.ex_date_list]) print('exclude date:' + str(inps.ex_date_list)) # Zero displacement for 1st acquisition if inps.zero_first: if inps.ex_date_list: inps.zero_idx = min( list(set(range(date_num)) - set(inps.ex_idx_list))) else: inps.zero_idx = 0 # File Size length = int(atr['LENGTH']) width = int(atr['WIDTH']) print('data size in [y0,y1,x0,x1]: [%d, %d, %d, %d]' % (0, length, 0, width)) try: ullon = float(atr['X_FIRST']) ullat = float(atr['Y_FIRST']) lon_step = float(atr['X_STEP']) lat_step = float(atr['Y_STEP']) lrlon = ullon + width * lon_step lrlat = ullat + length * lat_step print('data size in [lat0,lat1,lon0,lon1]: [%.4f, %.4f, %.4f, %.4f]' % (lrlat, ullat, ullon, lrlon)) except: pass # Initial Pixel Coord if inps.lalo and 'Y_FIRST' in atr.keys(): y = int((inps.lalo[0] - ullat) / lat_step + 0.5) x = int((inps.lalo[1] - ullon) / lon_step + 0.5) inps.yx = [y, x] if inps.ref_lalo and 'Y_FIRST' in atr.keys(): y = int((inps.ref_lalo[0] - ullat) / lat_step + 0.5) x = int((inps.ref_lalo[1] - ullon) / lon_step + 0.5) inps.ref_yx = [y, x] # Display Unit if inps.disp_unit == 'cm': inps.unit_fac = 100.0 elif inps.disp_unit == 'm': inps.unit_fac = 1.0 elif inps.disp_unit == 'dm': inps.unit_fac = 10.0 elif inps.disp_unit == 'mm': inps.unit_fac = 1000.0 elif inps.disp_unit == 'km': inps.unit_fac = 0.001 else: raise ValueError('Un-recognized unit: ' + inps.disp_unit) if k in ['GIANT_TS']: print('data unit: mm') inps.unit_fac *= 0.001 else: print('data unit: m') print('display unit: ' + inps.disp_unit) # Flip up-down / left-right if inps.auto_flip: inps.flip_lr, inps.flip_ud = pp.auto_flip_direction(atr) else: inps.flip_ud = False inps.left_lr = False # Mask file if not inps.mask_file: if os.path.basename(inps.timeseries_file).startswith('geo_'): file_list = ['geo_maskTempCoh.h5'] else: file_list = ['maskTempCoh.h5', 'mask.h5'] try: inps.mask_file = ut.get_file_list(file_list)[0] except: inps.mask_file = None try: mask = readfile.read(inps.mask_file, datasetName='mask')[0] mask[mask != 0] = 1 print('load mask from file: ' + inps.mask_file) except: mask = None print('No mask used.') # Initial Map d_v = readfile.read( inps.timeseries_file, datasetName=dateList[inps.epoch_num])[0] * inps.unit_fac if inps.ref_date: inps.ref_d_v = readfile.read( inps.timeseries_file, datasetName=inps.ref_date)[0] * inps.unit_fac d_v -= inps.ref_d_v if mask is not None: d_v = mask_matrix(d_v, mask) if inps.ref_yx: d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]] data_lim = [np.nanmin(d_v), np.nanmax(d_v)] if not inps.ylim_mat: inps.ylim_mat = data_lim print('Initial data range: ' + str(data_lim)) print('Display data range: ' + str(inps.ylim_mat)) # Fig 1 - Cumulative Displacement Map if not inps.disp_fig: plt.switch_backend('Agg') fig_v = plt.figure('Cumulative Displacement') # Axes 1 #ax_v = fig_v.add_subplot(111) # ax_v.set_position([0.125,0.25,0.75,0.65]) # This works on OSX. Original worked on Linux. # rect[left, bottom, width, height] ax_v = fig_v.add_axes([0.125, 0.25, 0.75, 0.65]) if inps.dem_file: dem = readfile.read(inps.dem_file, datasetName='height')[0] ax_v = pp.plot_dem_yx(ax_v, dem) img = ax_v.imshow(d_v, cmap=inps.colormap, clim=inps.ylim_mat, interpolation='nearest') # Reference Pixel if inps.ref_yx: d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]] ax_v.plot(inps.ref_yx[1], inps.ref_yx[0], 'ks', ms=6) else: try: ax_v.plot(int(atr['REF_X']), int(atr['REF_Y']), 'ks', ms=6) except: pass # Initial Pixel if inps.yx: ax_v.plot(inps.yx[1], inps.yx[0], 'ro', markeredgecolor='black') ax_v.set_xlim(0, np.shape(d_v)[1]) ax_v.set_ylim(np.shape(d_v)[0], 0) # Status Bar def format_coord(x, y): col = int(x + 0.5) row = int(y + 0.5) if 0 <= col < width and 0 <= row < length: z = d_v[row, col] try: lon = ullon + x * lon_step lat = ullat + y * lat_step return 'x=%.0f, y=%.0f, value=%.4f, lon=%.4f, lat=%.4f' % ( x, y, z, lon, lat) except: return 'x=%.0f, y=%.0f, value=%.4f' % (x, y, z) ax_v.format_coord = format_coord # Title and Axis Label ax_v.set_title( 'N = %d, Time = %s' % (inps.epoch_num, inps.dates[inps.epoch_num].strftime('%Y-%m-%d'))) if not 'Y_FIRST' in atr.keys(): ax_v.set_xlabel('Range') ax_v.set_ylabel('Azimuth') # Flip axis if inps.flip_lr: ax_v.invert_xaxis() print('flip map left and right') if inps.flip_ud: ax_v.invert_yaxis() print('flip map up and down') # Colorbar cbar = fig_v.colorbar(img, orientation='vertical') cbar.set_label('Displacement [%s]' % inps.disp_unit) # Axes 2 - Time Slider ax_time = fig_v.add_axes([0.125, 0.1, 0.6, 0.07], facecolor='lightgoldenrodyellow', yticks=[]) tslider = Slider(ax_time, 'Years', inps.yearList[0], inps.yearList[-1], valinit=inps.yearList[inps.epoch_num]) tslider.ax.bar(inps.yearList, np.ones(len(inps.yearList)), facecolor='black', width=0.01, ecolor=None) tslider.ax.set_xticks( np.round( np.linspace(inps.yearList[0], inps.yearList[-1], num=5) * 100) / 100) def time_slider_update(val): """Update Displacement Map using Slider""" timein = tslider.val idx_nearest = np.argmin(np.abs(np.array(inps.yearList) - timein)) ax_v.set_title( 'N = %d, Time = %s' % (idx_nearest, inps.dates[idx_nearest].strftime('%Y-%m-%d'))) d_v = h5[dateList[idx_nearest]][:] * inps.unit_fac if inps.ref_date: d_v -= inps.ref_d_v if mask is not None: d_v = mask_matrix(d_v, mask) if inps.ref_yx: d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]] img.set_data(d_v) fig_v.canvas.draw() tslider.on_changed(time_slider_update) # Fig 2 - Time Series Displacement - Point fig_ts = plt.figure('Time series - point', figsize=inps.fig_size) ax_ts = fig_ts.add_subplot(111) # Read Error List inps.error_ts = None if inps.error_file: error_fileContent = np.loadtxt(inps.error_file, dtype=bytes).astype(str) inps.error_ts = error_fileContent[:, 1].astype( np.float) * inps.unit_fac if inps.ex_date_list: e_ts = inps.error_ts[:] inps.ex_error_ts = np.array([e_ts[i] for i in inps.ex_idx_list]) inps.error_ts = np.array([ e_ts[i] for i in range(date_num) if i not in inps.ex_idx_list ]) def plot_timeseries_errorbar(ax, dis_ts, inps): dates = list(inps.dates) d_ts = dis_ts[:] if inps.ex_date_list: # Update displacement time-series dates = sorted(list(set(inps.dates) - set(inps.ex_dates))) ex_d_ts = np.array([dis_ts[i] for i in inps.ex_idx_list]) d_ts = np.array([ dis_ts[i] for i in range(date_num) if i not in inps.ex_idx_list ]) # Plot excluded dates (_, caps, _) = ax.errorbar(inps.ex_dates, ex_d_ts, yerr=inps.ex_error_ts, fmt='-o', color='gray', ms=inps.marker_size, lw=0, alpha=1, mfc='gray', elinewidth=inps.edge_width, ecolor='black', capsize=inps.marker_size * 0.5) for cap in caps: cap.set_markeredgewidth(inps.edge_width) # Plot kept dates (_, caps, _) = ax.errorbar(dates, d_ts, yerr=inps.error_ts, fmt='-o', ms=inps.marker_size, lw=0, alpha=1, elinewidth=inps.edge_width, ecolor='black', capsize=inps.marker_size * 0.5) for cap in caps: cap.set_markeredgewidth(inps.edge_width) return ax def plot_timeseries_scatter(ax, dis_ts, inps): dates = list(inps.dates) d_ts = dis_ts[:] if inps.ex_date_list: # Update displacement time-series dates = sorted(list(set(inps.dates) - set(inps.ex_dates))) ex_d_ts = np.array([dis_ts[i] for i in inps.ex_idx_list]) d_ts = np.array([ dis_ts[i] for i in range(date_num) if i not in inps.ex_idx_list ]) # Plot excluded dates ax.scatter(inps.ex_dates, ex_d_ts, s=inps.marker_size**2, color='gray') # color='crimson' # Plot kept dates ax.scatter(dates, d_ts, s=inps.marker_size**2) return ax def update_timeseries(ax_ts, y, x): """Plot point time series displacement at pixel [y, x]""" d_ts = read_timeseries_yx( inps.timeseries_file, y, x, ref_yx=inps.ref_yx) * inps.unit_fac # for date in dateList: # d = h5[k].get(date)[y,x] # if inps.ref_yx: # d -= h5[k].get(date)[inps.ref_yx[0], inps.ref_yx[1]] # d_ts.append(d*inps.unit_fac) if inps.zero_first: d_ts -= d_ts[inps.zero_idx] ax_ts.cla() if inps.error_file: ax_ts = plot_timeseries_errorbar(ax_ts, d_ts, inps) else: ax_ts = plot_timeseries_scatter(ax_ts, d_ts, inps) if inps.ylim: ax_ts.set_ylim(inps.ylim) for tick in ax_ts.yaxis.get_major_ticks(): tick.label.set_fontsize(inps.font_size) # Title title_ts = 'Y = %d, X = %d' % (y, x) try: lat = ullat + y * lat_step lon = ullon + x * lon_step title_ts += ', lat = %.4f, lon = %.4f' % (lat, lon) except: pass if inps.disp_title: ax_ts.set_title(title_ts) ax_ts = pp.auto_adjust_xaxis_date(ax_ts, inps.yearList, fontSize=inps.font_size)[0] ax_ts.set_xlabel('Time', fontsize=inps.font_size) ax_ts.set_ylabel('Displacement [%s]' % inps.disp_unit, fontsize=inps.font_size) fig_ts.canvas.draw() # Print to terminal print('\n---------------------------------------') print(title_ts) print(d_ts) # Slope estimation if inps.ex_date_list: inps.yearList_kept = [ inps.yearList[i] for i in range(date_num) if i not in inps.ex_idx_list ] d_ts_kept = [ d_ts[i] for i in range(date_num) if i not in inps.ex_idx_list ] d_slope = stats.linregress(np.array(inps.yearList_kept), np.array(d_ts_kept)) else: d_slope = stats.linregress(np.array(inps.yearList), np.array(d_ts)) print('linear velocity: %.2f +/- %.2f [%s/yr]' % (d_slope[0], d_slope[4], inps.disp_unit)) return d_ts # Initial point time series plot if inps.yx: d_ts = update_timeseries(ax_ts, inps.yx[0], inps.yx[1]) else: d_ts = np.zeros(len(inps.yearList)) ax_ts = plot_timeseries_scatter(ax_ts, d_ts, inps) def plot_timeseries_event(event): """Event function to get y/x from button press""" if event.inaxes != ax_v: return ii = int(event.ydata + 0.5) jj = int(event.xdata + 0.5) d_ts = update_timeseries(ax_ts, ii, jj) # Output if inps.save_fig and inps.yx: print('save info for pixel ' + str(inps.yx)) if not inps.fig_base: inps.fig_base = 'y%d_x%d' % (inps.yx[0], inps.yx[1]) # TXT - point time series outName = inps.fig_base + '_ts.txt' header_info = 'timeseries_file=' + inps.timeseries_file header_info += '\ny=%d, x=%d' % (inps.yx[0], inps.yx[1]) try: lat = ullat + inps.yx[0] * lat_step lon = ullon + inps.yx[1] * lon_step header_info += '\nlat=%.6f, lon=%.6f' % (lat, lon) except: pass if inps.ref_yx: header_info += '\nreference pixel: y=%d, x=%d' % (inps.ref_yx[0], inps.ref_yx[1]) else: header_info += '\nreference pixel: y=%s, x=%s' % (atr['REF_Y'], atr['REF_X']) header_info += '\nunit=m/yr' np.savetxt(outName, list(zip(np.array(dateList), np.array(d_ts) / inps.unit_fac)), fmt='%s', delimiter=' ', header=header_info) print('save time series displacement in meter to ' + outName) # Figure - point time series outName = inps.fig_base + '_ts.pdf' fig_ts.savefig(outName, bbox_inches='tight', transparent=True, dpi=inps.fig_dpi) print('save time series plot to ' + outName) # Figure - map outName = inps.fig_base + '_' + dateList[inps.epoch_num] + '.png' fig_v.savefig(outName, bbox_inches='tight', transparent=True, dpi=inps.fig_dpi) print('save map plot to ' + outName) # Final linking of the canvas to the plots. cid = fig_v.canvas.mpl_connect('button_press_event', plot_timeseries_event) if inps.disp_fig: plt.show() fig_v.canvas.mpl_disconnect(cid)
def plot_rms_bar(ax, date_list, rms, cutoff=3., font_size=12, tick_year_num=1, legend_loc='best', disp_legend=True, disp_side_plot=True, disp_thres_text=True, ylabel=r'Residual Phase $\hat \phi_{resid}$ RMS [mm]'): """ Bar plot Phase Residual RMS Parameters: ax : Axes object date_list : list of string in YYYYMMDD format rms : 1D np.array of float for RMS value in mm cutoff : cutoff value of MAD outlier detection tick_year_num : int, number of years per major tick legend_loc : 'upper right' or (0.5, 0.5) Returns: ax : Axes object """ dates, datevector = ptime.date_list2vector(date_list) try: bar_width = min(ut.most_common(np.diff(dates).tolist(), k=2)) * 3 / 4 except: bar_width = np.min(np.diff(dates).tolist()) * 3 / 4 rms = np.array(rms) # Plot all dates ax.bar(dates, rms, bar_width.days, color=pp.mplColors[0]) # Plot reference date ref_idx = np.argmin(rms) ax.bar(dates[ref_idx], rms[ref_idx], bar_width.days, color=pp.mplColors[1], label='Reference date') # Plot exclude dates rms_threshold = ut.median_abs_deviation_threshold(rms, center=0., cutoff=cutoff) ex_idx = rms > rms_threshold if not np.all(ex_idx == False): ax.bar(dates[ex_idx], rms[ex_idx], bar_width.days, color='darkgray', label='Exclude date') # Plot rms_threshold line (ax, xmin, xmax) = pp.auto_adjust_xaxis_date(ax, datevector, font_size, every_year=tick_year_num) ax.plot(np.array([xmin, xmax]), np.array([rms_threshold, rms_threshold]), '--k', label='RMS threshold') # axis format ax = pp.auto_adjust_yaxis(ax, np.append(rms, rms_threshold), font_size, ymin=0.0) ax.set_xlabel('Time [years]', fontsize=font_size) ax.set_ylabel(ylabel, fontsize=font_size) ax.tick_params(which='both', direction='in', labelsize=font_size, bottom=True, top=True, left=True, right=True) # 2nd axes for circles if disp_side_plot: divider = make_axes_locatable(ax) ax2 = divider.append_axes("right", "10%", pad="2%") ax2.plot(np.ones(rms.shape, np.float32) * 0.5, rms, 'o', mfc='none', color=pp.mplColors[0]) ax2.plot(np.ones(rms.shape, np.float32)[ref_idx] * 0.5, rms[ref_idx], 'o', mfc='none', color=pp.mplColors[1]) if not np.all(ex_idx == False): ax2.plot(np.ones(rms.shape, np.float32)[ex_idx] * 0.5, rms[ex_idx], 'o', mfc='none', color='darkgray') ax2.plot(np.array([0, 1]), np.array([rms_threshold, rms_threshold]), '--k') ax2.set_ylim(ax.get_ylim()) ax2.set_xlim([0, 1]) ax2.tick_params(which='both', direction='in', labelsize=font_size, bottom=True, top=True, left=True, right=True) ax2.get_xaxis().set_ticks([]) ax2.get_yaxis().set_ticklabels([]) if disp_legend: ax.legend(loc=legend_loc, frameon=False, fontsize=font_size) # rms_threshold text if disp_thres_text: ymin, ymax = ax.get_ylim() yoff = (ymax - ymin) * 0.1 if (rms_threshold - ymin) > 0.5 * (ymax - ymin): yoff *= -1. ax.annotate('Median Abs Dev * {}'.format(cutoff), xy=(xmin + (xmax - xmin) * 0.05, rms_threshold + yoff), color='k', xycoords='data', fontsize=font_size) return ax