def get_gps_los_velocity(self, geom_obj, start_date=None, end_date=None, ref_site=None, gps_comp='enu2los', horz_az_angle=-90.): dates, dis = self.read_gps_los_displacement( geom_obj, start_date=start_date, end_date=end_date, ref_site=ref_site, gps_comp=gps_comp, horz_az_angle=horz_az_angle)[:2] # displacement -> velocity # if: # 1. num of observations > 2 AND # 2. time overlap > 1/4 dis2vel = True if len(dates) <= 2: dis2vel = False elif start_date and end_date: t0 = ptime.date_list2vector([start_date])[0][0] t1 = ptime.date_list2vector([end_date])[0][0] if dates[-1] - dates[0] <= (t1 - t0) / 4: dis2vel = False if dis2vel: date_list = [dt.datetime.strftime(i, '%Y%m%d') for i in dates] A = time_func.get_design_matrix4time_func(date_list) self.velocity = np.dot(np.linalg.pinv(A), dis)[1] else: self.velocity = np.nan return self.velocity, dis
def search_gps(SNWE, start_date=None, end_date=None, site_list_file=None, min_num_solution=50, print_msg=True): """Search available GPS sites within the geo bounding box from UNR website Parameters: SNWE : tuple of 4 float, indicating (South, North, West, East) in degrees start_date : string in YYYYMMDD format end_date : string in YYYYMMDD format site_list_file : string. min_num_solution : int, minimum number of solutions available Returns: site_names : 1D np.array of string for GPS station names site_lats : 1D np.array for lat site_lons : 1D np.array for lon """ # download site list file if it's not found in current directory if site_list_file is None: site_list_file = os.path.basename(unr_site_list_file) if not os.path.isfile(site_list_file): dload_site_list(print_msg=print_msg) txt_data = np.loadtxt(site_list_file, dtype=bytes, skiprows=1, usecols=(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10)).astype(str) site_names = txt_data[:, 0] site_lats, site_lons = txt_data[:, 1:3].astype(np.float32).T site_lons -= np.round(site_lons / (360.)) * 360. t_start = np.array([ dt(*time.strptime(i, "%Y-%m-%d")[0:5]) for i in txt_data[:, 7].astype(str) ]) t_end = np.array([ dt(*time.strptime(i, "%Y-%m-%d")[0:5]) for i in txt_data[:, 8].astype(str) ]) num_solution = txt_data[:, 10].astype(np.int16) # limit on space idx = ((site_lats >= SNWE[0]) * (site_lats <= SNWE[1]) * (site_lons >= SNWE[2]) * (site_lons <= SNWE[3])) # limit on time if start_date: t0 = ptime.date_list2vector([start_date])[0][0] idx *= t_end >= t0 if end_date: t1 = ptime.date_list2vector([end_date])[0][0] idx *= t_start <= t1 # limit on number of solutions if min_num_solution is not None: idx *= num_solution >= min_num_solution return site_names[idx], site_lats[idx], site_lons[idx]
def read_displacement(self, start_date=None, end_date=None, print_msg=True, display=False): """ Read GPS displacement time-series (defined by start/end_date) Parameters: start/end_date : str in YYYYMMDD format Returns: dates : 1D np.ndarray of datetime.datetime object dis_e/n/u : 1D np.ndarray of displacement in meters in np.float32 std_e/n/u : 1D np.ndarray of displacement STD in meters in np.float32 """ # download file if it's not exists. if not os.path.isfile(self.file): self.dload_site(print_msg=print_msg) # read dates, dis_e, dis_n, dis_u if print_msg: print( 'reading time and displacement in east/north/vertical direction' ) data = np.loadtxt(self.file, dtype=bytes, skiprows=1).astype(str) self.dates = np.array( [dt.datetime.strptime(i, "%y%b%d") for i in data[:, 1]]) #self.dates = np.array([ptime.decimal_year2datetime(i) for i in data[:, 2]]) self.date_list = [x.strftime('%Y%m%d') for x in self.dates] (self.dis_e, self.dis_n, self.dis_u, self.std_e, self.std_n, self.std_u) = data[:, (8, 10, 12, 14, 15, 16)].astype(np.float32).T # cut out the specified time range t_flag = np.ones(len(self.dates), np.bool_) if start_date: t0 = ptime.date_list2vector([start_date])[0][0] t_flag[self.dates < t0] = 0 if end_date: t1 = ptime.date_list2vector([end_date])[0][0] t_flag[self.dates > t1] = 0 self.dates = self.dates[t_flag] self.dis_e = self.dis_e[t_flag] self.dis_n = self.dis_n[t_flag] self.dis_u = self.dis_u[t_flag] self.std_e = self.std_e[t_flag] self.std_n = self.std_n[t_flag] self.std_u = self.std_u[t_flag] if display: import matplotlib.pyplot as plt fig, ax = plt.subplots(nrows=3, ncols=1, sharex=True) ax[0].scatter(self.dates, self.dis_e, s=2**2, label='East') ax[1].scatter(self.dates, self.dis_n, s=2**2, label='North') ax[2].scatter(self.dates, self.dis_u, s=2**2, label='Up') plt.show() return (self.dates, self.dis_e, self.dis_n, self.dis_u, self.std_e, self.std_n, self.std_u)
def velocity2timeseries(date_list, vel=0.03, display=False): '''Simulate displacement time-series from linear velocity Inputs: date_list - list of string in YYYYMMDD or YYMMDD format vel - float, velocity in meter per year display - bool, display simulation or not Output: ts - 2D np.array in size of (date_num,1), displacement time-series in m Example: date_list = pnet.read_baseline_file('bl_list.txt')[0] ts0 = simulate_timeseries(date_list, vel=0.03, display=True) ''' tbase_list = ptime.date_list2tbase(date_list)[0] ts = vel / 365.25 * np.array(tbase_list) ts = ts.reshape(-1,1) if display: dates = ptime.date_list2vector(date_list)[0] ## Display marker_size = 5 plt.figure() plt.scatter(dates, ts*100.0, s=marker_size**2) plt.xlabel('Time (years)') plt.ylabel('LOS Displacement (cm)') plt.title('Displacement time-series with velocity = '+str(vel)+' m/yr') plt.show() return ts
def read_timeseries_yx(y, x, ts_file, ref_y=None, ref_x=None, win_size=1): """ Read time-series of one pixel with input y/x Parameters: y/x : int, row/column number of interest ts_file : string, filename of time-series HDF5 file ref_y/x : int, row/column number of reference pixel Returns: dates : 1D np.array of datetime.datetime objects, i.e. datetime.datetime(2010, 10, 20, 0, 0) dis : 1D np.array of float in meter """ # read date obj = timeseries(ts_file) obj.open(print_msg=False) dates = ptime.date_list2vector(obj.dateList)[0] dates = np.array(dates) # read displacement print('input y / x: {} / {}'.format(y, x)) box = (x, y, x + 1, y + 1) dis = readfile.read(ts_file, box=box)[0] if win_size != 1: buf = int(win_size / 2) box_win = (x - buf, y - buf, x + buf + 1, y + buf + 1) dis_win = readfile.read(ts_file, box=box_win)[0] dis = np.nanmean(dis_win.reshape((obj.numDate, -1)), axis=1) # reference pixel if ref_y is not None: ref_box = (ref_x, ref_y, ref_x + 1, ref_y + 1) dis -= readfile.read(ts_file, box=ref_box)[0] #start at zero dis -= dis[0] return dates, dis
def exclude_dates(): global inps, dateList if inps.ex_date_list: input_ex_date = list(inps.ex_date_list) inps.ex_date_list = [] if input_ex_date: for ex_date in input_ex_date: if os.path.isfile(ex_date): ex_date = ptime.read_date_list(ex_date) else: ex_date = [ptime.yyyymmdd(ex_date)] inps.ex_date_list += list( set(ex_date) - set(inps.ex_date_list)) # delete dates not existed in input file inps.ex_date_list = sorted( list(set(inps.ex_date_list).intersection(dateList))) inps.ex_dates = ptime.date_list2vector(inps.ex_date_list)[0] inps.ex_idx_list = sorted( [dateList.index(i) for i in inps.ex_date_list]) print(('exclude date:' + str(inps.ex_date_list)))
def velocity2timeseries(date_list, vel=0.03, display=False): '''Simulate displacement time-series from linear velocity Inputs: date_list - list of string in YYYYMMDD or YYMMDD format vel - float, velocity in meter per year display - bool, display simulation or not Output: ts - 2D np.array in size of (date_num,1), displacement time-series in m Example: date_list = pnet.read_baseline_file('bl_list.txt')[0] ts0 = simulate_timeseries(date_list, vel=0.03, display=True) ''' tbase_list = ptime.date_list2tbase(date_list)[0] ts = vel / 365.25 * np.array(tbase_list) ts = ts.reshape(-1, 1) if display: dates = ptime.date_list2vector(date_list)[0] ## Display marker_size = 5 plt.figure() plt.scatter(dates, ts * 100.0, s=marker_size**2) plt.xlabel('time [years]') plt.ylabel('LOS displacement [cm]') plt.title('displacement time-series with velocity = ' + str(vel) + ' m/yr') plt.show() return ts
def read_displacement(self, start_date=None, end_date=None, print_msg=True, display=False): # download file if it's not exists. if not os.path.isfile(self.file): self.dload_site(print_msg=print_msg) # read dates, dis_e, dis_n, dis_u if print_msg: print('reading time and displacement in east/north/vertical direction') data = np.loadtxt(self.file, dtype=bytes, skiprows=1).astype(str) self.dates = np.array([dt(*time.strptime(i, "%y%b%d")[0:5]) for i in data[:, 1]]) #self.dates = np.array([ptime.decimal_year2datetime(i) for i in data[:, 2]]) (self.dis_e, self.dis_n, self.dis_u, self.std_e, self.std_n, self.std_u) = data[:, (8,10,12,14,15,16)].astype(np.float32).T # cut out the specified time range t_flag = np.ones(len(self.dates), np.bool_) if start_date: t0 = ptime.date_list2vector([start_date])[0][0] t_flag[self.dates < t0] = 0 if end_date: t1 = ptime.date_list2vector([end_date])[0][0] t_flag[self.dates > t1] = 0 self.dates = self.dates[t_flag] self.dis_e = self.dis_e[t_flag] self.dis_n = self.dis_n[t_flag] self.dis_u = self.dis_u[t_flag] self.std_e = self.std_e[t_flag] self.std_n = self.std_n[t_flag] self.std_u = self.std_u[t_flag] if display: import matplotlib.pyplot as plt fig, ax = plt.subplots(nrows=3, ncols=1, sharex=True) ax[0].scatter(self.dates, self.dis_e, s=2**2, label='East') ax[1].scatter(self.dates, self.dis_n, s=2**2, label='North') ax[2].scatter(self.dates, self.dis_u, s=2**2, label='Up') plt.show() return (self.dates, self.dis_e, self.dis_n, self.dis_u, self.std_e, self.std_n, self.std_u)
def manual_select_pairs_to_remove(stackFile): """Manually select interferograms to remove""" print('\n-------------------------------------------------------------') print('Manually select interferograms to remove') print('1) click two dates/points to select one pair of interferogram') print('2) repeat until you select all pairs you would like to remove') print('3) close the figure to continue the program ...') print('-------------------------------------------------------------\n') obj = ifgramStack(stackFile) obj.open() date12ListAll = obj.date12List pbase = obj.get_perp_baseline_timeseries(dropIfgram=False) dateList = obj.dateList datesNum = mdates.date2num(np.array(ptime.date_list2vector(dateList)[0])) date12ListKept = obj.get_date12_list(dropIfgram=True) date12ListDropped = sorted(list(set(date12ListAll) - set(date12ListKept))) # Display the network fig = plt.figure() ax = fig.add_subplot(111) ax = pp.plot_network(ax, date12ListAll, dateList, list(pbase), date12List_drop=date12ListDropped) print('display the network of interferogram of file: ' + stackFile) date_click = [] date12_click = [] def onclick(event): idx = nearest_neighbor(event.xdata, event.ydata, datesNum, pbase) print('click at ' + dateList[idx]) date_click.append(dateList[idx]) if len(date_click) % 2 == 0 and date_click[-2] != date_click[-1]: [mDate, sDate] = sorted(date_click[-2:]) mIdx = dateList.index(mDate) sIdx = dateList.index(sDate) date12 = mDate + '_' + sDate if date12 in date12ListAll: print('select date12: ' + date12) date12_click.append(date12) ax.plot([datesNum[mIdx], datesNum[sIdx]], [pbase[mIdx], pbase[sIdx]], 'r', lw=4) else: print(date12 + ' is not existed in input file') plt.draw() cid = fig.canvas.mpl_connect('button_press_event', onclick) plt.show() if not ut.yes_or_no('Proceed to drop the ifgrams/date12?'): date12_click = None return date12_click
def print_timseries_date_stat(dateList): datevector = ptime.date_list2vector(dateList)[1] print('Start Date: {}'.format(dateList[0])) print('End Date: {}'.format(dateList[-1])) print('Number of dates : {}'.format(len(dateList))) print('STD of datetimes : {:.2f} years'.format(np.std(datevector))) #print('----------------------') #print('List of dates:\n{}'.format(dateList)) #print('----------------------') #print('List of dates in years:\n{}'.format(datevector)) return
def estimate_S1AB_bias(mintpy_dir, dates, ts_dis): """Estimate the bias between Sentinel-1 A and B. Parameters: mintpy_dir - str, path of the mintpy working directory dates - list of datetime.datetime objects ts_dis - 2D np.ndarray in size of (num_date, num_pixel) in float32 Returns: bias - 1D np.ndarray in size of (num_pixel) in float32 flagA/B - 1D np.ndarray in size of (num_date) in bool dates_fit - list of datetime.datetime objects ts_fitA/B - 1D np.ndarray in size of (num_date_fit) in float32 """ num_date = len(dates) ts_dis = ts_dis.reshape(num_date, -1) # dates/flags for S1A/B date_listA = np.loadtxt(os.path.join(mintpy_dir, 'S1A_date.txt'), dtype=str).tolist() date_listB = np.loadtxt(os.path.join(mintpy_dir, 'S1B_date.txt'), dtype=str).tolist() date_list = sorted(date_listA + date_listB) min_date = date_listB[0] flagA = np.array([x in date_listA and x >= min_date for x in date_list], dtype=np.bool_) flagB = np.array([x in date_listB and x >= min_date for x in date_list], dtype=np.bool_) # update date_list to the shared time period only date_listA = np.array(date_list)[flagA].tolist() date_listB = np.array(date_list)[flagB].tolist() # fit model = dict(polynomial=1) mA = time_func.estimate_time_func(model, date_listA, ts_dis[flagA, :], ref_date=date_listA[0])[1] mB = time_func.estimate_time_func(model, date_listB, ts_dis[flagB, :], ref_date=date_listB[0])[1] # grab bias/offset from the fitting time-series date_list_fit = ptime.get_date_range(min_date, date_list[-1], dstep=1) dates_fit = ptime.date_list2vector(date_list_fit)[0] GA_fit = time_func.get_design_matrix4time_func(date_list_fit, model, ref_date=date_listA[0]) GB_fit = time_func.get_design_matrix4time_func(date_list_fit, model, ref_date=date_listB[0]) ts_fitA = np.matmul(GA_fit, mA) ts_fitB = np.matmul(GB_fit, mB) bias = np.median(ts_fitB - ts_fitA, axis=0) return bias, flagA, flagB, dates_fit, ts_fitA, ts_fitB
def print_timseries_date_stat(dateList): datevector = ptime.date_list2vector(dateList)[1] print('Start Date: ' + dateList[0]) print('End Date: ' + dateList[-1]) print('Number of acquisitions : %d' % len(dateList)) print('Std. of acquisition times : %.2f yeras' % std(datevector)) print('----------------------') print('List of dates:') print(dateList) print('----------------------') print('List of dates in years') print(datevector) return
def print_timseries_date_stat(dateList): datevector = ptime.date_list2vector(dateList)[1] print('Start Date: '+dateList[0]) print('End Date: '+dateList[-1]) print('Number of acquisitions : %d' % len(dateList)) print('Std. of acquisition times : %.2f yeras' % std(datevector)) print('----------------------') print('List of dates:') print(dateList) print('----------------------') print('List of dates in years') print(datevector) return
def search_gps(SNWE, start_date=None, end_date=None, site_list_file=None, print_msg=True): """Search available GPS sites within the geo bounding box from UNR website Parameters: SNWE : tuple of 4 float, indicating (South, North, West, East) in degrees start_date : string in YYYYMMDD format end_date : string in YYYYMMDD format site_list_file : string. Returns: site_names : 1D np.array of string for GPS station names site_lats : 1D np.array for lat site_lons : 1D np.array for lon """ # download site list file if it's not found in current directory if site_list_file is None: site_list_file = os.path.basename(unr_site_list_file) if not os.path.isfile(site_list_file): dload_site_list(print_msg=print_msg) txt_data = np.loadtxt(site_list_file, dtype=bytes, skiprows=1, usecols=(0,1,2,3,4,5,6,7,8,9)).astype(str) site_names = txt_data[:, 0] site_lats, site_lons = txt_data[:, 1:3].astype(np.float32).T site_lons -= np.round(site_lons / (360.)) * 360. t_start = np.array([dt(*time.strptime(i, "%Y-%m-%d")[0:5]) for i in txt_data[:, 7].astype(str)]) t_end = np.array([dt(*time.strptime(i, "%Y-%m-%d")[0:5]) for i in txt_data[:, 8].astype(str)]) # limit on space idx = ((site_lats >= SNWE[0]) * (site_lats <= SNWE[1]) * (site_lons >= SNWE[2]) * (site_lons <= SNWE[3])) # limit on time if start_date: t0 = ptime.date_list2vector([start_date])[0][0] idx *= t_end >= t0 if end_date: t1 = ptime.date_list2vector([end_date])[0][0] idx *= t_start <= t1 return site_names[idx], site_lats[idx], site_lons[idx]
def read_exclude_date(input_ex_date, dateListAll): # default value ex_date_list = [] ex_dates = [] ex_flag = np.ones((len(dateListAll)), np.bool_) ex_date_list = ptime.read_date_list(input_ex_date, date_list_all=dateListAll) if ex_date_list: ex_dates = ptime.date_list2vector(ex_date_list)[0] for i in ex_date_list: ex_flag[dateListAll.index(i)] = False vprint('exclude date:' + str(ex_date_list)) return ex_date_list, ex_dates, ex_flag
def manual_select_pairs_to_remove(stackFile): """Manually select interferograms to remove""" print('\n-------------------------------------------------------------') print('Manually select interferograms to remove') print('1) click two dates/points to select one pair of interferogram') print('2) repeat until you select all pairs you would like to remove') print('3) close the figure to continue the program ...') print('-------------------------------------------------------------\n') obj = ifgramStack(stackFile) obj.open() date12ListAll = obj.date12List pbase = obj.get_perp_baseline_timeseries(dropIfgram=False) dateList = obj.dateList datesNum = mdates.date2num(np.array(ptime.date_list2vector(dateList)[0])) date12ListKept = obj.get_date12_list(dropIfgram=True) date12ListDropped = sorted(list(set(date12ListAll) - set(date12ListKept))) # Display the network fig = plt.figure() ax = fig.add_subplot(111) ax = pp.plot_network(ax, date12ListAll, dateList, list(pbase), date12List_drop=date12ListDropped) print('display the network of interferogram of file: '+stackFile) date_click = [] date12_click = [] def onclick(event): idx = nearest_neighbor(event.xdata, event.ydata, datesNum, pbase) print('click at '+dateList[idx]) date_click.append(dateList[idx]) if len(date_click) % 2 == 0 and date_click[-2] != date_click[-1]: [mDate, sDate] = sorted(date_click[-2:]) mIdx = dateList.index(mDate) sIdx = dateList.index(sDate) date12 = mDate+'_'+sDate if date12 in date12ListAll: print('select date12: '+date12) date12_click.append(date12) ax.plot([datesNum[mIdx], datesNum[sIdx]], [pbase[mIdx], pbase[sIdx]], 'r', lw=4) else: print(date12+' is not existed in input file') plt.draw() cid = fig.canvas.mpl_connect('button_press_event', onclick) plt.show() if not ut.yes_or_no('Proceed to drop the ifgrams/date12?'): date12_click = None return date12_click
def velo_disp(inps): """calculated displacement during startDate_endDate period based on linear assumption and velocity.h5""" data, atr = readfile.read('geo_velocity.h5') # calculate disp dt1, dt2 = ptime.date_list2vector([inps.startDate, inps.endDate])[0] data *= (dt2 - dt1).days / 365.25 # displacement to phase range2phase = -4. * np.pi / float(atr['WAVELENGTH']) data *= range2phase # write atr atr['PROCESSOR'] = 'roipac' atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' out_file = 'geo_' + '{}_{}.unw'.format(inps.startDate, inps.endDate) writefile.write(data, out_file=out_file, metadata=atr)
def read_timeseries_info(): global atr, k, h5, dateList, tims, date_num, inps atr = readfile.read_attribute(inps.timeseries_file) k = atr['FILE_TYPE'] print(('input file is '+k+': '+inps.timeseries_file)) if not k in ['timeseries','GIANT_TS']: raise ValueError('Only timeseries file is supported!') h5 = h5py.File(inps.timeseries_file,'r') if k in ['GIANT_TS']: dateList = [dt.fromordinal(int(i)).strftime('%Y%m%d') for i in h5['dates'][:].tolist()] else: dateList = timeseries(inps.timeseries_file).get_date_list() date_num = len(dateList) inps.dates, tims = ptime.date_list2vector(dateList)
def read_timeseries_info(): """Reads basic information about timeseries file being viewed""" global atr, k, h5, dateList, inps.tims, inps.num_date, inps atr = readfile.read_attribute(inps.timeseries_file) k = atr['FILE_TYPE'] print(('input file is '+k+': '+inps.timeseries_file)) if not k in ['timeseries','giantTimeseries']: raise ValueError('Only timeseries file is supported!') h5 = h5py.File(inps.timeseries_file,'r') if k in ['giantTimeseries']: dateList = [dt.fromordinal(int(i)).strftime('%Y%m%d') for i in h5['dates'][:].tolist()] else: dateList = timeseries(inps.timeseries_file).get_date_list() inps.num_date = len(dateList) inps.dates, inps.tims = ptime.date_list2vector(dateList)
def read_timeseries_info(): global atr, k, h5, dateList, tims, date_num, inps atr = readfile.read_attribute(inps.timeseries_file) k = atr['FILE_TYPE'] print(('input file is ' + k + ': ' + inps.timeseries_file)) if not k in ['timeseries', 'GIANT_TS']: raise ValueError('Only timeseries file is supported!') h5 = h5py.File(inps.timeseries_file, 'r') if k in ['GIANT_TS']: dateList = [ dt.fromordinal(int(i)).strftime('%Y%m%d') for i in h5['dates'][:].tolist() ] else: dateList = timeseries(inps.timeseries_file).get_date_list() date_num = len(dateList) inps.dates, tims = ptime.date_list2vector(dateList)
def read_exclude_date(input_ex_date, dateListAll): """ Parameters: input_ex_date : list of string in YYYYMMDD or filenames for excluded dates dateListAll : list of string in YYYYMMDD for all dates Returns: ex_date_list : list of string in YYYYMMDD for excluded dates ex_dates : list of datetime.datetime objects for excluded dates ex_flag : 1D np.ndarray in size of (num_date), 1/True for kept, 0/False for excluded """ # default value ex_date_list = [] ex_dates = [] ex_flag = np.ones((len(dateListAll)), np.bool_) ex_date_list = ptime.read_date_list(input_ex_date, date_list_all=dateListAll) if ex_date_list: ex_dates = ptime.date_list2vector(ex_date_list)[0] for i in ex_date_list: ex_flag[dateListAll.index(i)] = False vprint('exclude date:'+str(ex_date_list)) return ex_date_list, ex_dates, ex_flag
def main(iargs=None): inps = cmd_line_parse(iargs) print('\n*************** Spatial Average ******************') mean_list, date_list = ut.spatial_average(inps.file, datasetName=inps.datasetName, maskFile=inps.mask_file, saveList=True) atr = readfile.read_attribute(inps.file) k = atr['FILE_TYPE'] if inps.disp_fig and k == 'timeseries': dates, datevector = ptime.date_list2vector(date_list) # plot fig = plt.figure() ax = fig.add_subplot(111) ax.plot(dates, mean_list, '-o')#, lw=2, ms=16, alpha=0.7) #, mfc='crimson') ax.set_title('Spatial Average', fontsize=12) ax = pp.auto_adjust_xaxis_date(ax, datevector)[0] ax.set_xlabel('Time [years]', fontsize=12) ax.set_ylabel('Mean', fontsize=12) plt.show() return
def exclude_dates(): global inps, dateList if inps.ex_date_list: input_ex_date = list(inps.ex_date_list) inps.ex_date_list = [] if input_ex_date: for ex_date in input_ex_date: if os.path.isfile(ex_date): ex_date = ptime.read_date_list(ex_date) else: ex_date = [ptime.yyyymmdd(ex_date)] inps.ex_date_list += list(set(ex_date) - set(inps.ex_date_list)) # delete dates not existed in input file inps.ex_date_list = sorted(list(set(inps.ex_date_list).intersection(dateList))) inps.ex_dates = ptime.date_list2vector(inps.ex_date_list)[0] inps.ex_idx_list = sorted([dateList.index(i) for i in inps.ex_date_list]) print(('exclude date:' + str(inps.ex_date_list)))
def read_timeseries_yx(y, x, ts_file, ref_y=None, ref_x=None, zero_first=True, win_size=1, unit='m', method='mean', print_msg=True): """ Read time-series of one pixel with input y/x Parameters: y/x - int, row/column number of interest ts_file - string, filename of time-series HDF5 file ref_y/x - int, row/column number of reference pixel zero_first - bool, shift the time-series so that it starts from zero win_size - int, windows size centered at point of interest unit - str, output displacement unit method - str, method to calculate the output displacement and its dispersity Returns: dates - 1D np.ndarray of datetime.datetime objects, i.e. datetime.datetime(2010, 10, 20, 0, 0) dis - 1D np.ndarray of float32, displacement dis_std - 1D np.ndarray of float32, displacement dispersity """ # read date obj = timeseries(ts_file) obj.open(print_msg=False) dates = ptime.date_list2vector(obj.dateList)[0] dates = np.array(dates) # read displacement if print_msg: print('input y / x: {} / {}'.format(y, x)) box = (x, y, x + 1, y + 1) dis = readfile.read(ts_file, box=box)[0] dis_std = None if win_size != 1: buf = int(win_size / 2) box_win = (x - buf, y - buf, x + buf + 1, y + buf + 1) dis_win = readfile.read(ts_file, box=box_win)[0].reshape(obj.numDate, -1) if method == 'mean': dis = np.nanmean(dis_win, axis=1) dis_std = np.nanstd(dis_win, axis=1) elif method == 'median': dis = np.nanmedian(dis_win, axis=1) dis_std = median_abs_deviation(dis_win) else: raise ValueError('un-recognized method: {}'.format(method)) # reference pixel if ref_y is not None: ref_box = (ref_x, ref_y, ref_x + 1, ref_y + 1) dis -= readfile.read(ts_file, box=ref_box)[0] #start at zero if zero_first: dis -= dis[0] # custom output unit if unit == 'm': pass elif unit == 'cm': dis *= 100. dis_std *= 100. elif unit == 'mm': dis *= 1000. dis_std *= 1000. else: raise ValueError('un-supported output unit: {}'.format(unit)) return dates, dis, dis_std
def read_data(inps): """ Returns: defo: 2D np.array with in-valid/masked-out pixel in NaN """ # metadata inps.metadata = readfile.read_attribute(inps.file) k = inps.metadata['FILE_TYPE'] inps.range2phase = -4. * np.pi / float(inps.metadata['WAVELENGTH']) ext = os.path.splitext(inps.file)[1] # mask if inps.mask_file: inps.mask = readfile.read(inps.mask_file)[0] else: inps.mask = np.ones( (int(inps.metadata['LENGTH']), int(inps.metadata['WIDTH'])), dtype=np.bool_) # data if k in ['.unw', 'velocity']: inps.phase = readfile.read(inps.file)[0] if k == 'velocity': # velocity to displacement date1, date2 = inps.metadata['DATE12'].split('_') dt1, dt2 = ptime.date_list2vector([date1, date2])[0] inps.phase *= (dt2 - dt1).days / 365.25 # displacement to phase inps.phase *= inps.range2phase # update mask to exclude pixel with NaN value inps.mask *= ~np.isnan(inps.phase) # set all masked out pixel to NaN inps.phase[inps.mask == 0] = np.nan else: raise ValueError("input file not support yet: {}".format(k)) print('number of pixels: {}'.format(np.sum(inps.mask))) # change reference point if inps.ref_lalo: coord = ut.coordinate(inps.metadata) ref_lat, ref_lon = inps.ref_lalo ref_y, ref_x = coord.geo2radar(ref_lat, ref_lon)[0:2] # update data inps.phase -= inps.phase[ref_y, ref_x] # update metadata inps.metadata['REF_LAT'] = ref_lat inps.metadata['REF_LON'] = ref_lon inps.metadata['REF_Y'] = ref_y inps.metadata['REF_X'] = ref_x # read geometry inps.lat, inps.lon = ut.get_lat_lon(inps.metadata) inps.inc_angle = readfile.read(inps.geom_file, datasetName='incidenceAngle')[0] inps.head_angle = np.ones(inps.inc_angle.shape, dtype=np.float32) * float( inps.metadata['HEADING']) inps.lat[inps.mask == 0] = np.nan inps.lon[inps.mask == 0] = np.nan inps.inc_angle[inps.mask == 0] = np.nan inps.head_angle[inps.mask == 0] = np.nan # output filename if not inps.outfile: out_dir = os.path.dirname(inps.file) proj_name = sensor.project_name2sensor_name(out_dir)[1] if not proj_name: raise ValueError('No custom/auto output filename found.') inps.outfile = '{}_{}.mat'.format(proj_name, inps.metadata['DATE12']) inps.outfile = os.path.join(out_dir, inps.outfile) inps.outfile = os.path.abspath(inps.outfile) return
def read_init_info(inps): # Time Series Info atr = readfile.read_attribute(inps.file[0]) inps.key = atr['FILE_TYPE'] if inps.key == 'timeseries': obj = timeseries(inps.file[0]) elif inps.key == 'giantTimeseries': obj = giantTimeseries(inps.file[0]) elif inps.key == 'HDFEOS': obj = HDFEOS(inps.file[0]) else: raise ValueError('input file is {}, not timeseries.'.format(inps.key)) obj.open(print_msg=inps.print_msg) inps.seconds = atr.get('CENTER_LINE_UTC', 0) if not inps.file_label: inps.file_label = [] for fname in inps.file: fbase = os.path.splitext(os.path.basename(fname))[0] fbase = fbase.replace('timeseries', '') inps.file_label.append(fbase) # default mask file if not inps.mask_file and 'msk' not in inps.file[0]: dir_name = os.path.dirname(inps.file[0]) if 'Y_FIRST' in atr.keys(): inps.mask_file = os.path.join(dir_name, 'geo_maskTempCoh.h5') else: inps.mask_file = os.path.join(dir_name, 'maskTempCoh.h5') if not os.path.isfile(inps.mask_file): inps.mask_file = None ## date info inps.date_list = obj.dateList inps.num_date = len(inps.date_list) if inps.start_date: inps.date_list = [i for i in inps.date_list if int(i) >= int(inps.start_date)] if inps.end_date: inps.date_list = [i for i in inps.date_list if int(i) <= int(inps.end_date)] inps.num_date = len(inps.date_list) inps.dates, inps.yearList = ptime.date_list2vector(inps.date_list) (inps.ex_date_list, inps.ex_dates, inps.ex_flag) = read_exclude_date(inps.ex_date_list, inps.date_list) # reference date/index if not inps.ref_date: inps.ref_date = atr.get('REF_DATE', None) if inps.ref_date: inps.ref_idx = inps.date_list.index(inps.ref_date) else: inps.ref_idx = None # date/index of interest for initial display if not inps.idx: if (not inps.ref_idx) or (inps.ref_idx < inps.num_date / 2.): inps.idx = inps.num_date - 2 else: inps.idx = 2 # Display Unit (inps.disp_unit, inps.unit_fac) = pp.scale_data2disp_unit(metadata=atr, disp_unit=inps.disp_unit)[1:3] # Map info - coordinate unit inps.coord_unit = atr.get('Y_UNIT', 'degrees').lower() # Read Error List inps.ts_plot_func = plot_ts_scatter inps.error_ts = None inps.ex_error_ts = None if inps.error_file: # assign plot function inps.ts_plot_func = plot_ts_errorbar # read error file error_fc = np.loadtxt(inps.error_file, dtype=bytes).astype(str) inps.error_ts = error_fc[:, 1].astype(np.float)*inps.unit_fac # update error file with exlcude date if inps.ex_date_list: e_ts = inps.error_ts[:] inps.ex_error_ts = e_ts[inps.ex_flag == 0] inps.error_ts = e_ts[inps.ex_flag == 1] # Zero displacement for 1st acquisition if inps.zero_first: inps.zero_idx = min(0, np.min(np.where(inps.ex_flag)[0])) # default lookup table file and coordinate object if not inps.lookup_file: inps.lookup_file = ut.get_lookup_file('./inputs/geometryRadar.h5') inps.coord = ut.coordinate(atr, inps.lookup_file) ## size and lalo info inps.pix_box, inps.geo_box = subset.subset_input_dict2box(vars(inps), atr) inps.pix_box = inps.coord.check_box_within_data_coverage(inps.pix_box) inps.geo_box = inps.coord.box_pixel2geo(inps.pix_box) data_box = (0, 0, int(atr['WIDTH']), int(atr['LENGTH'])) vprint('data coverage in y/x: '+str(data_box)) vprint('subset coverage in y/x: '+str(inps.pix_box)) vprint('data coverage in lat/lon: '+str(inps.coord.box_pixel2geo(data_box))) vprint('subset coverage in lat/lon: '+str(inps.geo_box)) vprint('------------------------------------------------------------------------') # calculate multilook_num # ONLY IF: # inps.multilook is True (no --nomultilook input) AND # inps.multilook_num ==1 (no --multilook-num input) # Note: inps.multilook is used for this check ONLY # Note: multilooking is only applied to the 3D data cubes and their related operations: # e.g. spatial indexing, referencing, etc. All the other variables are in the original grid # so that users get the same result as the non-multilooked version. if inps.multilook and inps.multilook_num == 1: inps.multilook_num = pp.auto_multilook_num(inps.pix_box, inps.num_date, max_memory=inps.maxMemory, print_msg=inps.print_msg) ## reference pixel if not inps.ref_lalo and 'REF_LAT' in atr.keys(): inps.ref_lalo = (float(atr['REF_LAT']), float(atr['REF_LON'])) if inps.ref_lalo: # set longitude to [-180, 180) if inps.coord_unit.lower().startswith('deg') and inps.ref_lalo[1] >= 180.: inps.ref_lalo[1] -= 360. # ref_lalo --> ref_yx if not set in cmd if not inps.ref_yx: inps.ref_yx = inps.coord.geo2radar(inps.ref_lalo[0], inps.ref_lalo[1], print_msg=False)[0:2] # use REF_Y/X if ref_yx not set in cmd if not inps.ref_yx and 'REF_Y' in atr.keys(): inps.ref_yx = (int(atr['REF_Y']), int(atr['REF_X'])) # ref_yx --> ref_lalo if in geo-coord # for plotting purpose only if inps.ref_yx and 'Y_FIRST' in atr.keys(): inps.ref_lalo = inps.coord.radar2geo(inps.ref_yx[0], inps.ref_yx[1], print_msg=False)[0:2] # do not plot native reference point if it's out of the coverage due to subset if (inps.ref_yx and 'Y_FIRST' in atr.keys() and inps.ref_yx == (int(atr.get('REF_Y',-999)), int(atr.get('REF_X',-999))) and not ( inps.pix_box[0] <= inps.ref_yx[1] < inps.pix_box[2] and inps.pix_box[1] <= inps.ref_yx[0] < inps.pix_box[3])): inps.disp_ref_pixel = False print('the native REF_Y/X is out of subset box, thus do not display') ## initial pixel coord if inps.lalo: inps.yx = inps.coord.geo2radar(inps.lalo[0], inps.lalo[1], print_msg=False)[0:2] try: inps.lalo = inps.coord.radar2geo(inps.yx[0], inps.yx[1], print_msg=False)[0:2] except: inps.lalo = None ## figure settings # Flip up-down / left-right if inps.auto_flip: inps.flip_lr, inps.flip_ud = pp.auto_flip_direction(atr, print_msg=inps.print_msg) # Transparency - Alpha if not inps.transparency: # Auto adjust transparency value when showing shaded relief DEM if inps.dem_file and inps.disp_dem_shade: inps.transparency = 0.7 else: inps.transparency = 1.0 ## display unit ans wrap # if wrap_step == 2*np.pi (default value), set disp_unit_img = radian; # otherwise set disp_unit_img = disp_unit inps.disp_unit_img = inps.disp_unit if inps.wrap: inps.range2phase = -4. * np.pi / float(atr['WAVELENGTH']) if 'cm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 100. elif 'mm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 1000. elif 'm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 1. else: raise ValueError('un-recognized display unit: {}'.format(inps.disp_unit)) if (inps.wrap_range[1] - inps.wrap_range[0]) == 2*np.pi: inps.disp_unit_img = 'radian' inps.vlim = inps.wrap_range inps.cbar_label = 'Displacement [{}]'.format(inps.disp_unit_img) ## fit a suite of time func to the time series inps.model, inps.num_param = ts2vel.read_inps2model(inps, date_list=inps.date_list) # dense TS for plotting inps.date_list_fit = ptime.get_date_range(inps.date_list[0], inps.date_list[-1]) inps.dates_fit = ptime.date_list2vector(inps.date_list_fit)[0] inps.G_fit = time_func.get_design_matrix4time_func( date_list=inps.date_list_fit, model=inps.model, seconds=inps.seconds) return inps, atr
def read_init_info(inps): # Time Series Info ts_file0 = inps.file[0] atr = readfile.read_attribute(ts_file0) inps.key = atr['FILE_TYPE'] if inps.key == 'timeseries': obj = timeseries(ts_file0) elif inps.key == 'giantTimeseries': obj = giantTimeseries(ts_file0) elif inps.key == 'HDFEOS': obj = HDFEOS(ts_file0) else: raise ValueError('input file is {}, not timeseries.'.format(inps.key)) obj.open(print_msg=inps.print_msg) if not inps.file_label: inps.file_label = [str(i) for i in list(range(len(inps.file)))] # default mask file if not inps.mask_file and 'masked' not in ts_file0: dir_name = os.path.dirname(ts_file0) if 'Y_FIRST' in atr.keys(): inps.mask_file = os.path.join(dir_name, 'geo_maskTempCoh.h5') else: inps.mask_file = os.path.join(dir_name, 'maskTempCoh.h5') if not os.path.isfile(inps.mask_file): inps.mask_file = None # date info inps.date_list = obj.dateList inps.num_date = len(inps.date_list) if inps.start_date: inps.date_list = [ i for i in inps.date_list if int(i) >= int(inps.start_date) ] if inps.end_date: inps.date_list = [ i for i in inps.date_list if int(i) <= int(inps.end_date) ] inps.num_date = len(inps.date_list) inps.dates, inps.yearList = ptime.date_list2vector(inps.date_list) (inps.ex_date_list, inps.ex_dates, inps.ex_flag) = read_exclude_date(inps.ex_date_list, inps.date_list) # initial display index #if atr['REF_DATE'] in inps.date_list: # inps.ref_idx = inps.date_list.index(atr['REF_DATE']) #else: # inps.ref_idx = 0 if inps.ref_date: inps.ref_idx = inps.date_list.index(inps.ref_date) else: inps.ref_idx = 3 if not inps.idx: if inps.ref_idx < inps.num_date / 2.: inps.idx = inps.num_date - 3 else: inps.idx = 3 # Display Unit (inps.disp_unit, inps.unit_fac) = pp.scale_data2disp_unit(metadata=atr, disp_unit=inps.disp_unit)[1:3] # Map info - coordinate unit inps.coord_unit = atr.get('Y_UNIT', 'degrees').lower() # Read Error List inps.ts_plot_func = plot_ts_scatter inps.error_ts = None inps.ex_error_ts = None if inps.error_file: # assign plot function inps.ts_plot_func = plot_ts_errorbar # read error file error_fc = np.loadtxt(inps.error_file, dtype=bytes).astype(str) inps.error_ts = error_fc[:, 1].astype(np.float) * inps.unit_fac # update error file with exlcude date if inps.ex_date_list: e_ts = inps.error_ts[:] inps.ex_error_ts = e_ts[inps.ex_flag == 0] inps.error_ts = e_ts[inps.ex_flag == 1] # Zero displacement for 1st acquisition if inps.zero_first: inps.zero_idx = min(0, np.min(np.where(inps.ex_flag)[0])) # default lookup table file if not inps.lookup_file: inps.lookup_file = ut.get_lookup_file('./inputs/geometryRadar.h5') inps.coord = ut.coordinate(atr, inps.lookup_file) # size and lalo info inps.pix_box, inps.geo_box = subset.subset_input_dict2box(vars(inps), atr) inps.pix_box = inps.coord.check_box_within_data_coverage(inps.pix_box) inps.geo_box = inps.coord.box_pixel2geo(inps.pix_box) data_box = (0, 0, int(atr['WIDTH']), int(atr['LENGTH'])) vprint('data coverage in y/x: ' + str(data_box)) vprint('subset coverage in y/x: ' + str(inps.pix_box)) vprint('data coverage in lat/lon: ' + str(inps.coord.box_pixel2geo(data_box))) vprint('subset coverage in lat/lon: ' + str(inps.geo_box)) vprint( '------------------------------------------------------------------------' ) # reference pixel if not inps.ref_lalo and 'REF_LAT' in atr.keys(): inps.ref_lalo = (float(atr['REF_LAT']), float(atr['REF_LON'])) if inps.ref_lalo: if inps.ref_lalo[1] > 180.: inps.ref_lalo[1] -= 360. inps.ref_yx = inps.coord.geo2radar(inps.ref_lalo[0], inps.ref_lalo[1], print_msg=False)[0:2] if not inps.ref_yx and 'REF_Y' in atr.keys(): inps.ref_yx = [int(atr['REF_Y']), int(atr['REF_X'])] # Initial Pixel Coord if inps.lalo: inps.yx = inps.coord.geo2radar(inps.lalo[0], inps.lalo[1], print_msg=False)[0:2] try: inps.lalo = inps.coord.radar2geo(inps.yx[0], inps.yx[1], print_msg=False)[0:2] except: inps.lalo = None # Flip up-down / left-right if inps.auto_flip: inps.flip_lr, inps.flip_ud = pp.auto_flip_direction( atr, print_msg=inps.print_msg) # Transparency - Alpha if not inps.transparency: # Auto adjust transparency value when showing shaded relief DEM if inps.dem_file and inps.disp_dem_shade: inps.transparency = 0.7 else: inps.transparency = 1.0 # display unit ans wrap # if wrap_step == 2*np.pi (default value), set disp_unit_img = radian; # otherwise set disp_unit_img = disp_unit inps.disp_unit_img = inps.disp_unit if inps.wrap: inps.range2phase = -4. * np.pi / float(atr['WAVELENGTH']) if 'cm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 100. elif 'mm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 1000. elif 'm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 1. else: raise ValueError('un-recognized display unit: {}'.format( inps.disp_unit)) if (inps.wrap_range[1] - inps.wrap_range[0]) == 2 * np.pi: inps.disp_unit_img = 'radian' inps.vlim = inps.wrap_range inps.cbar_label = 'Displacement [{}]'.format(inps.disp_unit_img) return inps, atr
def read_data(inps): """ Returns: defo: 2D np.array with in-valid/masked-out pixel in NaN """ # metadata inps.metadata = readfile.read_attribute(inps.file) k = inps.metadata['FILE_TYPE'] inps.range2phase = -4. * np.pi / float(inps.metadata['WAVELENGTH']) # mask if inps.mask_file: inps.mask = readfile.read(inps.mask_file)[0] else: inps.mask = np.ones((int(inps.metadata['LENGTH']), int(inps.metadata['WIDTH'])), dtype=np.bool_) # data if k in ['.unw','velocity']: inps.phase = readfile.read(inps.file)[0] if k == 'velocity': # velocity to displacement date1, date2 = inps.metadata['DATE12'].split('_') dt1, dt2 = ptime.date_list2vector([date1, date2])[0] inps.phase *= (dt2 - dt1).days / 365.25 # displacement to phase inps.phase *= inps.range2phase # update mask to exclude pixel with NaN value inps.mask *= ~np.isnan(inps.phase) # set all masked out pixel to NaN inps.phase[inps.mask==0] = np.nan else: raise ValueError("input file not support yet: {}".format(k)) print('number of pixels: {}'.format(np.sum(inps.mask))) # change reference point if inps.ref_lalo: coord = ut.coordinate(inps.metadata) ref_lat, ref_lon = inps.ref_lalo ref_y, ref_x = coord.geo2radar(ref_lat, ref_lon)[0:2] # update data inps.phase -= inps.phase[ref_y, ref_x] # update metadata inps.metadata['REF_LAT'] = ref_lat inps.metadata['REF_LON'] = ref_lon inps.metadata['REF_Y'] = ref_y inps.metadata['REF_X'] = ref_x # read geometry inps.lat, inps.lon = ut.get_lat_lon(inps.metadata) inps.inc_angle = readfile.read(inps.geom_file, datasetName='incidenceAngle')[0] inps.head_angle = np.ones(inps.inc_angle.shape, dtype=np.float32) * float(inps.metadata['HEADING']) inps.height = readfile.read(inps.geom_file, datasetName='height')[0] # convert the height of ellipsoid to geoid (mean sea level) # ref: https://github.com/vandry/geoidheight if inps.ellipsoid2geoid: # import geoid module try: import geoid except: raise ImportError('Can not import geoidheight!') # calculate offset and correct height egm_file = os.path.join(os.path.dirname(geoid.__file__), 'geoids/egm2008-1.pgm') gh_obj = geoid.GeoidHeight(egm_file) h_offset = gh_obj.get(lat=np.nanmean(inps.lat), lon=np.nanmean(inps.lon)) inps.height -= h_offset # print message msg = 'convert height from ellipsoid to geoid' msg += '\n\tby subtracting a constant offset of {:.2f} m'.format(h_offset) print(msg) inps.lat[inps.mask==0] = np.nan inps.lon[inps.mask==0] = np.nan inps.inc_angle[inps.mask==0] = np.nan inps.head_angle[inps.mask==0] = np.nan inps.height[inps.mask==0] = np.nan # output filename if not inps.outfile: proj_name = sensor.project_name2sensor_name(inps.file)[1] if not proj_name: raise ValueError('No custom/auto output filename found.') inps.outfile = '{}_{}.mat'.format(proj_name, inps.metadata['DATE12']) if not inps.outdir: inps.outdir = os.path.dirname(inps.file) inps.outfile = os.path.join(inps.outdir, inps.outfile) inps.outfile = os.path.abspath(inps.outfile) return
transparent=True, dpi=fig_dpi) # plt.show() # Backup - Plot Velocity with POI displacement timeseries # read ts data # load coordinates of observed wells # ll = pd.read_csv('/scratch/hpham/insar/insar_pahrump/GPS_points_PV1.csv') # only gps sta #ll = pd.read_csv('/scratch/hpham/insar/insar_pahrump/GPS_points_PV1_full.csv') # print(f'Reading ifile_loc {ifile_loc} \n') ll = pd.read_csv(ifile_loc) obj = timeseries(ts_file) obj.open() dates = ptime.date_list2vector(obj.dateList)[0] df = pd.DataFrame({'Date': dates}) for i in range(ll.shape[0]): cur_point = str(ll['Name'][i]) lat, lon = ll['Latitude'][i], ll['Longitude'][i] print(f'\nName={cur_point}, lat={lat}, lon={lon}') y, x = coord.geo2radar(lat, lon)[0:2] print(f'x={x}, y={y}') print('y/x: {}/{}'.format(y, x)) d_ts = np.squeeze(readfile.read(ts_file, box=(x, y, x + 1, y + 1))[0]) * 100. # d_ts = d_ts/ #(Unw_Phase * wavelength in cm) / (-4 * PI * cos(rad(incident_angle))) # https://forum.step.esa.int/t/subswath-iw2-sentinel-1a/4322/4 try:
def plot_rms_bar(ax, date_list, rms, cutoff=3., font_size=12, tick_year_num=1, legend_loc='best', disp_legend=True, disp_side_plot=True, disp_thres_text=False, ylabel=r'Residual Phase $\hat \phi_{resid}$ RMS [mm]'): """ Bar plot Phase Residual RMS Parameters: ax : Axes object date_list : list of string in YYYYMMDD format rms : 1D np.array of float for RMS value in mm cutoff : cutoff value of MAD outlier detection tick_year_num : int, number of years per major tick legend_loc : 'upper right' or (0.5, 0.5) Returns: ax : Axes object """ dates, datevector = ptime.date_list2vector(date_list) dates = np.array(dates) try: bar_width = min(ut.most_common(np.diff(dates).tolist(), k=2)) * 3 / 4 except: bar_width = np.min(np.diff(dates).tolist()) * 3 / 4 rms = np.array(rms) # Plot all dates ax.bar(dates, rms, bar_width.days, color=pp.mplColors[0]) # Plot reference date ref_idx = np.argmin(rms) ax.bar(dates[ref_idx], rms[ref_idx], bar_width.days, color=pp.mplColors[1], label='Reference date') # Plot exclude dates rms_threshold = ut.median_abs_deviation_threshold(rms, center=0., cutoff=cutoff) ex_idx = rms > rms_threshold if not np.all(ex_idx == False): ax.bar(dates[ex_idx], rms[ex_idx], bar_width.days, color='darkgray', label='Exclude date') # Plot rms_threshold line (ax, xmin, xmax) = pp.auto_adjust_xaxis_date(ax, datevector, font_size, every_year=tick_year_num) ax.plot(np.array([xmin, xmax]), np.array([rms_threshold, rms_threshold]), '--k', label='Median Abs Dev * {}'.format(cutoff)) # axis format ax = pp.auto_adjust_yaxis(ax, np.append(rms, rms_threshold), font_size, ymin=0.0) ax.set_xlabel('Time [years]', fontsize=font_size) ax.set_ylabel(ylabel, fontsize=font_size) ax.tick_params(which='both', direction='in', labelsize=font_size, bottom=True, top=True, left=True, right=True) # 2nd axes for circles if disp_side_plot: divider = make_axes_locatable(ax) ax2 = divider.append_axes("right", "10%", pad="2%") ax2.plot(np.ones(rms.shape, np.float32) * 0.5, rms, 'o', mfc='none', color=pp.mplColors[0]) ax2.plot(np.ones(rms.shape, np.float32)[ref_idx] * 0.5, rms[ref_idx], 'o', mfc='none', color=pp.mplColors[1]) if not np.all(ex_idx == False): ax2.plot(np.ones(rms.shape, np.float32)[ex_idx] * 0.5, rms[ex_idx], 'o', mfc='none', color='darkgray') ax2.plot(np.array([0, 1]), np.array([rms_threshold, rms_threshold]), '--k') ax2.set_ylim(ax.get_ylim()) ax2.set_xlim([0, 1]) ax2.tick_params(which='both', direction='in', labelsize=font_size, bottom=True, top=True, left=True, right=True) ax2.get_xaxis().set_ticks([]) ax2.get_yaxis().set_ticklabels([]) if disp_legend: ax.legend(loc=legend_loc, frameon=False, fontsize=font_size) # rms_threshold text if disp_thres_text: ymin, ymax = ax.get_ylim() yoff = (ymax - ymin) * 0.1 if (rms_threshold - ymin) > 0.5 * (ymax - ymin): yoff *= -1. ax.annotate('Median Abs Dev * {}'.format(cutoff), xy=(xmin + (xmax - xmin) * 0.05, rms_threshold + yoff), color='k', xycoords='data', fontsize=font_size) return ax
def vtec2iono_ramp_timeseries(date_list, vtec_list, geom_file, iono_file, sub_tec_ratio=None, ds_dict_ext=None, update_mode=True): """Convert zenith TEC to 2D slant range delay (ramp due to the incidence angle variation) and write to HDF5 time-series file. Parameters: date_list - list of str, dates in YYYYMMDD format vtec_list - list of float32, zenith TEC in TECU geom_file - str, path of the geometry file including incidenceAngle data iono_file - str, path of output iono ramp time-series file update_mode - bool, ds_dict_ext - dict, extra dictionary of dataset to be saved into the HDF5 file. Returns: iono_file - str, path of output iono ramp time-series file """ top_perc_file = os.path.join(os.path.dirname(mintpy.__file__), 'data', 'top_tec_perc_s1.txt') # prepare geometry (iono_inc_angle, iono_lat, iono_lon, iono_height) = iono.prep_geometry_iono(geom_file, print_msg=True) # prepare date/time num_date = len(date_list) if len(vtec_list) != num_date: msg = 'Input tec_list and date_list have different size!' msg += '\nFor acquisitions without TEC data, set it to NaN.' raise ValueError(msg) meta = readfile.read_attribute(geom_file) length = int(meta['LENGTH']) width = int(meta['WIDTH']) freq = SPEED_OF_LIGHT / float(meta['WAVELENGTH']) # Note: Scaling gives slightly better RMSE for SenD but much worse RMSE for SenA and Alos2 # thus this is not used by default. if sub_tec_ratio is not None: if ut.is_number(sub_tec_ratio): print('multiply VTEC by {}'.format(sub_tec_ratio)) vtec_list = (np.array(vtec_list).flatten() * float(sub_tec_ratio)).tolist() elif sub_tec_ratio.startswith('adap'): dates = ptime.date_list2vector(date_list)[0] ydays = np.array([x.timetuple().tm_yday for x in dates]) fc = np.loadtxt(top_perc_file, dtype=bytes).astype(np.float32) print( 'multiply VTEC adaptively based on the day of the year from: {}' .format(top_perc_file)) sub_perc = fc[:, 2][np.array(ydays)] vtec_list = (np.array(vtec_list).flatten() * sub_perc).tolist() # loop to calculate the range delay (ramp) print('calculating ionospheric phase ramp time-series from TEC ...') ts_ramp = np.zeros((num_date, length, width), dtype=np.float32) prog_bar = ptime.progressBar(maxValue=num_date) for i, date_str in enumerate(date_list): ts_ramp[i, :, :] = iono.vtec2range_delay( vtec_list[i], inc_angle=iono_inc_angle, freq=freq, ) prog_bar.update(i + 1, suffix=date_str) prog_bar.close() ## output # prepare metadata meta['FILE_TYPE'] = 'timeseries' meta['UNIT'] = 'm' meta['IONO_LAT'] = iono_lat meta['IONO_LON'] = iono_lon meta['IONO_HEIGHT'] = iono_height meta['IONO_INCIDENCE_ANGLE'] = np.nanmean(iono_inc_angle) # absolute delay without double reference for key in ['REF_X', 'REF_Y', 'REF_LAT', 'REF_LON', 'REF_DATE']: if key in meta.keys(): meta.pop(key) # prepare data matrix ds_dict = {} ds_dict['date'] = np.array(date_list, dtype=np.string_) ds_dict['vtec'] = np.array(vtec_list, dtype=np.float32) ds_dict['timeseries'] = ts_ramp # add the extra dataset if specified, e.g. vtec_gim, vtec_top, vtec_sub if ds_dict_ext is not None: ds_names = ds_dict.keys() for ds_name, ds_val in ds_dict_ext.items(): if ds_name not in ds_names: ds_dict[ds_name] = ds_val # write to disk writefile.write(ds_dict, iono_file, metadata=meta) return iono_file
def plot_rms_bar(ax, date_list, rms, cutoff=3., font_size=12, tick_year_num=1, legend_loc='best', disp_legend=True, disp_side_plot=True, disp_thres_text=True, ylabel=r'Residual Phase $\hat \phi_{resid}$ RMS [mm]'): """ Bar plot Phase Residual RMS Parameters: ax : Axes object date_list : list of string in YYYYMMDD format rms : 1D np.array of float for RMS value in mm cutoff : cutoff value of MAD outlier detection tick_year_num : int, number of years per major tick legend_loc : 'upper right' or (0.5, 0.5) Returns: ax : Axes object """ dates, datevector = ptime.date_list2vector(date_list) dates = np.array(dates) try: bar_width = min(ut.most_common(np.diff(dates).tolist(), k=2))*3/4 except: bar_width = np.min(np.diff(dates).tolist())*3/4 rms = np.array(rms) # Plot all dates ax.bar(dates, rms, bar_width.days, color=pp.mplColors[0]) # Plot reference date ref_idx = np.argmin(rms) ax.bar(dates[ref_idx], rms[ref_idx], bar_width.days, color=pp.mplColors[1], label='Reference date') # Plot exclude dates rms_threshold = ut.median_abs_deviation_threshold(rms, center=0., cutoff=cutoff) ex_idx = rms > rms_threshold if not np.all(ex_idx==False): ax.bar(dates[ex_idx], rms[ex_idx], bar_width.days, color='darkgray', label='Exclude date') # Plot rms_threshold line (ax, xmin, xmax) = pp.auto_adjust_xaxis_date(ax, datevector, font_size, every_year=tick_year_num) ax.plot(np.array([xmin, xmax]), np.array([rms_threshold, rms_threshold]), '--k', label='RMS threshold') # axis format ax = pp.auto_adjust_yaxis(ax, np.append(rms, rms_threshold), font_size, ymin=0.0) ax.set_xlabel('Time [years]', fontsize=font_size) ax.set_ylabel(ylabel, fontsize=font_size) ax.tick_params(which='both', direction='in', labelsize=font_size, bottom=True, top=True, left=True, right=True) # 2nd axes for circles if disp_side_plot: divider = make_axes_locatable(ax) ax2 = divider.append_axes("right", "10%", pad="2%") ax2.plot(np.ones(rms.shape, np.float32) * 0.5, rms, 'o', mfc='none', color=pp.mplColors[0]) ax2.plot(np.ones(rms.shape, np.float32)[ref_idx] * 0.5, rms[ref_idx], 'o', mfc='none', color=pp.mplColors[1]) if not np.all(ex_idx==False): ax2.plot(np.ones(rms.shape, np.float32)[ex_idx] * 0.5, rms[ex_idx], 'o', mfc='none', color='darkgray') ax2.plot(np.array([0, 1]), np.array([rms_threshold, rms_threshold]), '--k') ax2.set_ylim(ax.get_ylim()) ax2.set_xlim([0, 1]) ax2.tick_params(which='both', direction='in', labelsize=font_size, bottom=True, top=True, left=True, right=True) ax2.get_xaxis().set_ticks([]) ax2.get_yaxis().set_ticklabels([]) if disp_legend: ax.legend(loc=legend_loc, frameon=False, fontsize=font_size) # rms_threshold text if disp_thres_text: ymin, ymax = ax.get_ylim() yoff = (ymax - ymin) * 0.1 if (rms_threshold - ymin) > 0.5 * (ymax - ymin): yoff *= -1. ax.annotate('Median Abs Dev * {}'.format(cutoff), xy=(xmin + (xmax-xmin)*0.05, rms_threshold + yoff ), color='k', xycoords='data', fontsize=font_size) return ax
def read_data(inps): # metadata atr = readfile.read_attribute(inps.file) if 'WAVELENGTH' in atr.keys(): range2phase = -4 * np.pi / float(atr['WAVELENGTH']) # change reference pixel if inps.ref_lalo: if 'Y_FIRST' in atr.keys(): coord = ut.coordinate(atr) ref_y, ref_x = coord.geo2radar(inps.ref_lalo[0], inps.ref_lalo[1])[0:2] inps.ref_yx = [ref_y, ref_x] else: raise ValueError( "input file is not geocoded --> reference point in lat/lon is NOT support" ) if inps.ref_yx: atr['REF_Y'] = inps.ref_yx[0] atr['REF_X'] = inps.ref_yx[1] if 'Y_FIRST' in atr.keys(): coord = ut.coordinate(atr) ref_lat, ref_lon = coord.radar2geo(inps.ref_yx[0], inps.ref_yx[1])[0:2] atr['REF_LAT'] = ref_lat atr['REF_LON'] = ref_lon print('change reference point to y/x: {}'.format(inps.ref_yx)) # various file types print('read {} from file {}'.format(inps.dset, inps.file)) k = atr['FILE_TYPE'] if k == 'velocity': # read/prepare data data = readfile.read(inps.file)[0] # velocity to displacement print('convert velocity to displacement for {}'.format(atr['DATE12'])) date1, date2 = atr['DATE12'].split('_') dt1, dt2 = ptime.date_list2vector([date1, date2])[0] data *= (dt2 - dt1).days / 365.25 # displacement to phase print('convert displacement to phase in radian') data *= range2phase if inps.ref_yx: data -= data[inps.ref_yx[0], inps.ref_yx[1]] # metadata atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' # output filename if not inps.outfile: inps.outfile = os.path.join(os.path.dirname(inps.file), '{}.unw'.format(atr['DATE12'])) elif k == 'timeseries': # date1 and date2 if '_' in inps.dset: date1, date2 = ptime.yyyymmdd(inps.dset.split('_')) else: date1 = atr['REF_DATE'] date2 = ptime.yyyymmdd(inps.dset) # read/prepare data data = readfile.read(inps.file, datasetName=date2)[0] data -= readfile.read(inps.file, datasetName=date1)[0] print('converting range to phase') data *= range2phase if inps.ref_yx: data -= data[inps.ref_yx[0], inps.ref_yx[1]] # metadata atr['DATE'] = date1[2:8] atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8]) atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' # output filename if not inps.outfile: inps.outfile = '{}_{}.unw'.format(date1, date2) if inps.file.startswith('geo_'): inps.outfile = 'geo_' + inps.outfile elif k == 'HDFEOS': dname = inps.dset.split('-')[0] # date1 and date2 if dname == 'displacement': if '-' in inps.dset: suffix = inps.dset.split('-')[1] if '_' in suffix: date1, date2 = ptime.yyyymmdd(suffix.split('_')) else: date1 = atr['REF_DATE'] date2 = ptime.yyyymmdd(suffix) else: raise ValueError( "No '-' in input dataset! It is required for {}".format( dname)) else: date_list = HDFEOS(inps.file).get_date_list() date1 = date_list[0] date2 = date_list[-1] date12 = '{}_{}'.format(date1, date2) # read / prepare data slice_list = readfile.get_slice_list(inps.file) if 'displacement' in inps.dset: # read/prepare data slice_name1 = view.check_dataset_input( slice_list, '{}-{}'.format(dname, date1))[0][0] slice_name2 = view.check_dataset_input( slice_list, '{}-{}'.format(dname, date2))[0][0] data = readfile.read(inps.file, datasetName=slice_name1)[0] data -= readfile.read(inps.file, datasetName=slice_name2)[0] print('converting range to phase') data *= range2phase if inps.ref_yx: data -= data[inps.ref_yx[0], inps.ref_yx[1]] else: slice_name = view.check_dataset_input(slice_list, inps.dset)[0][0] data = readfile.read(inps.file, datasetName=slice_name)[0] # metadata atr['DATE'] = date1[2:8] atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8]) if dname == 'displacement': atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' elif 'coherence' in dname.lower(): atr['FILE_TYPE'] = '.cor' atr['UNIT'] = '1' elif dname == 'height': atr['FILE_TYPE'] = '.dem' atr['DATA_TYPE'] = 'int16' else: raise ValueError('unrecognized input dataset type: {}'.format( inps.dset)) # output filename if not inps.outfile: inps.outfile = '{}{}'.format(date12, atr['FILE_TYPE']) elif k == 'ifgramStack': dname, date12 = inps.dset.split('-') date1, date2 = date12.split('_') # read / prepare data data = readfile.read(inps.file, datasetName=inps.dset)[0] if dname.startswith('unwrapPhase'): if 'REF_X' in atr.keys(): data -= data[int(atr['REF_Y']), int(atr['REF_X'])] print('consider reference pixel in y/x: ({}, {})'.format( atr['REF_Y'], atr['REF_X'])) else: print('No REF_Y/X found.') # metadata atr['DATE'] = date1[2:8] atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8]) if dname.startswith('unwrapPhase'): atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' elif dname == 'coherence': atr['FILE_TYPE'] = '.cor' atr['UNIT'] = '1' elif dname == 'wrapPhase': atr['FILE_TYPE'] = '.int' atr['UNIT'] = 'radian' elif dname == 'connectComponent': atr['FILE_TYPE'] = '.conncomp' atr['UNIT'] = '1' atr['DATA_TYPE'] = 'byte' else: raise ValueError('unrecognized dataset type: {}'.format(inps.dset)) # output filename if not inps.outfile: inps.outfile = '{}{}'.format(date12, atr['FILE_TYPE']) if inps.file.startswith('geo_'): inps.outfile = 'geo_' + inps.outfile else: # read data data = readfile.read(inps.file, datasetName=inps.dset)[0] if inps.outfile: fext = os.path.splitext(inps.outfile)[1] atr['FILE_TYPE'] = fext else: # metadata if 'coherence' in k.lower(): atr['FILE_TYPE'] = '.cor' elif k in ['mask']: atr['FILE_TYPE'] = '.msk' elif k in ['geometry'] and inps.dset == 'height': if 'Y_FIRST' in atr.keys(): atr['FILE_TYPE'] = '.dem' else: atr['FILE_TYPE'] = '.hgt' atr['UNIT'] = 'm' else: atr['FILE_TYPE'] = '.unw' inps.outfile = '{}{}'.format( os.path.splitext(inps.file)[0], atr['FILE_TYPE']) # mask if inps.mask_file: for m_file in inps.mask_file: print('mask data based on input file: {}'.format(m_file)) mask = readfile.read(m_file)[0] mask *= ~np.isnan(data) data[mask == 0] = np.nan # get rid of starting . if output as hdf5 file if inps.outfile.endswith('.h5'): if atr['FILE_TYPE'].startswith('.'): atr['FILE_TYPE'] = atr['FILE_TYPE'][1:] atr['PROCESSOR'] = 'roipac' return data, atr, inps.outfile