def get_exclude_date(inps, date_list_all): '''Get inps.ex_date full list Inputs: inps - Namespace, date_list_all - list of string for all available date in YYYYMMDD format Output: inps.ex_date - list of string for exclude date in YYYYMMDD format ''' yy_list_all = ptime.yyyymmdd2years(date_list_all) # 1. template_file if inps.template_file: print 'read option from template file: ' + inps.template_file inps = read_template2inps(inps.template_file, inps) # 2. ex_date input_ex_date = list(inps.ex_date) inps.ex_date = [] if input_ex_date: for ex_date in input_ex_date: if os.path.isfile(ex_date): ex_date = ptime.read_date_list(ex_date) else: ex_date = [ptime.yyyymmdd(ex_date)] inps.ex_date += list(set(ex_date) - set(inps.ex_date)) # delete dates not existed in input file inps.ex_date = list(set(inps.ex_date).intersection(date_list_all)) print 'exclude date:' + str(inps.ex_date) # 3. min_date if inps.min_date: print 'start date: ' + inps.min_date yy_min = ptime.yyyymmdd2years(ptime.yyyymmdd(inps.min_date)) for i in range(len(date_list_all)): date = date_list_all[i] if yy_list_all[i] < yy_min and date not in inps.ex_date: print ' remove date: ' + date inps.ex_date.append(date) # 4. max_date if inps.max_date: print 'end date: ' + inps.max_date yy_max = ptime.yyyymmdd2years(ptime.yyyymmdd(inps.max_date)) for i in range(len(date_list_all)): date = date_list_all[i] if yy_list_all[i] > yy_max and date not in inps.ex_date: print ' remove date: ' + date inps.ex_date.append(date) return inps.ex_date
def correct_lod_file(File, outFile=None): # Check Sensor Type print 'input file: ' + File atr = readfile.read_attribute(File) k = atr['FILE_TYPE'] platform = atr['PLATFORM'] print 'platform: ' + platform if not platform.lower() in ['env', 'envisat']: print 'No need to correct LOD for ' + platform sys.exit(1) # Output Filename if not outFile: ext = os.path.splitext(File)[1] outFile = os.path.splitext(File)[0] + '_LODcor' + ext # Get LOD phase ramp from empirical model width = int(atr['WIDTH']) length = int(atr['FILE_LENGTH']) range_resolution = float(atr['RANGE_PIXEL_SIZE']) r = np.linspace(0, width - 1, width) R = range_resolution * r * (3.87e-7) Ramp = np.tile(R, [length, 1]) yref = int(atr['ref_y']) xref = int(atr['ref_x']) Ramp -= Ramp[yref][xref] # Correct LOD Ramp for Input File if k in multi_group_hdf5_file + multi_dataset_hdf5_file: h5 = h5py.File(File, 'r') epochList = sorted(h5[k].keys()) h5out = h5py.File(outFile, 'w') group = h5out.create_group(k) if k in ['interferograms', 'wrapped']: print 'number of interferograms: ' + str(len(epochList)) wvl = float(atr['WAVELENGTH']) Ramp *= -4 * np.pi / wvl for epoch in epochList: print epoch data = h5[k][epoch].get(epoch)[:] atr = h5[k][epoch].attrs dates = ptime.yyyymmdd(atr['DATE12'].split('-')) dates = ptime.yyyymmdd2years(dates) dt = date[1] - date[0] data -= Ramp * dt gg = group.create_group(epoch) dset = gg.create_dataset(epoch, data=data, compression='gzip') for key, value in atr.iteritems(): gg.attrs[key] = value elif k == 'timeseries': print 'number of acquisitions: ' + str(len(epochList)) tbase = [ float(dy) / 365.25 for dy in ptime.date_list2tbase(epochList)[0] ] for i in range(len(epochList)): epoch = epochList[i] print epoch data = h5[k].get(epoch)[:] data -= Ramp * tbase[i] dset = group.create_dataset(epoch, data=data, compression='gzip') for key, value in atr.iteritems(): group.attrs[key] = value else: print 'No need to correct for LOD for ' + k + ' file' sys.exit(1) h5.close() h5out.close() else: data, atr = readfile.read(File) data -= Ramp writefile.write(data, atr, outFile) return outFile
def correct_lod_file(File, rangeDistFile=None, outFile=None): # Check Sensor Type print 'correct Local Oscilator Drift for Envisat using an empirical model (Marinkovic and Larsen, 2013)' print 'input file: ' + File atr = readfile.read_attribute(File) k = atr['FILE_TYPE'] platform = atr['PLATFORM'] print 'platform: ' + platform if not platform.lower() in ['env', 'envisat']: print 'No need to correct LOD for ' + platform sys.exit(1) # Output Filename if not outFile: ext = os.path.splitext(File)[1] outFile = os.path.splitext(File)[0] + '_LODcor' + ext # Get LOD phase ramp from empirical model if not rangeDistFile: print 'calculate range distance from input file attributes' width = int(atr['WIDTH']) length = int(atr['FILE_LENGTH']) range_resolution = float(atr['RANGE_PIXEL_SIZE']) rangeDist1D = range_resolution * np.linspace(0, width - 1, width) rangeDist = np.tile(rangeDist1D, (length, 1)) else: print 'read range distance from file: %s' % (rangeDistFile) rangeDist = readfile.read(rangeDistFile, epoch='slantRangeDistance')[0] yref = int(atr['ref_y']) xref = int(atr['ref_x']) rangeDist -= rangeDist[yref][xref] Ramp = np.array(rangeDist * 3.87e-7, np.float32) # Correct LOD Ramp for Input File if k in multi_group_hdf5_file + multi_dataset_hdf5_file: h5 = h5py.File(File, 'r') epochList = sorted(h5[k].keys()) epochNum = len(epochList) print 'writing >>> %s' % (outFile) h5out = h5py.File(outFile, 'w') group = h5out.create_group(k) prog_bar = ptime.progress_bar(maxValue=epochNum) if k in ['interferograms', 'wrapped']: Ramp *= -4 * np.pi / float(atr['WAVELENGTH']) print 'number of interferograms: ' + str(epochNum) date12List = ptime.list_ifgram2date12(epochList) for i in range(epochNum): epoch = epochList[i] data = h5[k][epoch].get(epoch)[:] atr = h5[k][epoch].attrs dates = ptime.yyyymmdd(atr['DATE12'].split('-')) dates = ptime.yyyymmdd2years(dates) dt = dates[1] - dates[0] data -= Ramp * dt gg = group.create_group(epoch) dset = gg.create_dataset(epoch, data=data, compression='gzip') for key, value in atr.iteritems(): gg.attrs[key] = value prog_bar.update(i + 1, suffix=date12List[i]) elif k == 'timeseries': print 'number of acquisitions: ' + str(len(epochList)) tbase = [ float(dy) / 365.25 for dy in ptime.date_list2tbase(epochList)[0] ] for i in range(epochNum): epoch = epochList[i] data = h5[k].get(epoch)[:] data -= Ramp * tbase[i] dset = group.create_dataset(epoch, data=data, compression='gzip') prog_bar.update(i + 1, suffix=epoch) for key, value in atr.iteritems(): group.attrs[key] = value else: print 'No need to correct for LOD for ' + k + ' file' sys.exit(1) prog_bar.close() h5.close() h5out.close() elif k in ['.unw']: data, atr = readfile.read(File) Ramp *= -4 * np.pi / float(atr['WAVELENGTH']) dates = ptime.yyyymmdd(atr['DATE12'].split('-')) dates = ptime.yyyymmdd2years(dates) dt = dates[1] - dates[0] data -= Ramp * dt print 'writing >>> %s' % (outFile) writefile.write(data, atr, outFile) else: print 'No need to correct for LOD for %s file' % (k) return outFile
def main(argv): ## Default settings contour_step = 200.0 contour_sigma = 3.0 demShade = "yes" demContour = "yes" global markerSize, markderSize2, markerColor, markerColor2, rectColor global lineWidth, lineWidth2, edgeWidth, fontSize # global markerColor_ref, markerColor_ref2 markerSize = 16 markerSize2 = 16 markerColor = "crimson" # g markerColor2 = "lightgray" markerColor_ref = "white" markerColor_ref2 = "lightgray" rectColor = "black" lineWidth = 0 lineWidth2 = 0 edgeWidth = 1.5 fontSize = 16 global unit, radius, saveFig, dispFig, fig_dpi fig_dpi = 300 radius = 0 saveFig = "no" dispFig = "yes" unit = "cm" dispDisplacement = "no" dispOpposite = "no" dispContour = "only" smoothContour = "no" contour_step = 200 showRef = "yes" vel_alpha = 1.0 zero_start = "yes" global ref_xsub, ref_ysub, ref_date global h5timeseries_2, dates_2, dateList_2 global lbound, hbound ############### Check Inputs ################## if len(sys.argv) < 2: Usage() sys.exit(1) elif len(sys.argv) == 2: if argv[0] == "-h": Usage() sys.exit(1) elif os.path.isfile(argv[0]): timeSeriesFile = argv[0] h5timeseries = h5py.File(timeSeriesFile) k = h5timeseries.keys() if not "timeseries" in k: print "ERROR: Input file is " + k[0] + ".\n\tOnly timeseries is supported.\n" sys.exit(1) else: Usage() sys.exit(1) elif len(sys.argv) > 2: try: opts, args = getopt.getopt( argv, "f:F:v:a:b:s:m:c:w:u:l:h:D:V:t:T:d:r:x:y:X:Y:o:E:", [ "save", "nodisplay", "unit=", "exclude=", "ref-date=", "rect-color=", "zero-start=", "zoom-x=", "zoom-y=", "zoom-lon", "zoom-lat", "lalo=", "opposite", "dem-nocontour", "dem-noshade", "displacement", "contour-step=", "contour-smooth=", "LALO=", ], ) except getopt.GetoptError: Usage() sys.exit(1) for opt, arg in opts: if opt == "-f": timeSeriesFile = arg elif opt == "-F": timeSeriesFile_2 = arg elif opt == "-v": velocityFile = arg elif opt == "-a": vmin = float(arg) elif opt == "-b": vmax = float(arg) elif opt == "-s": fontSize = int(arg) elif opt == "-m": markerSize = int(arg) markerSize2 = int(arg) elif opt == "-c": markerColor = arg elif opt == "-w": lineWidth = int(arg) elif opt == "-u": unit = arg elif opt == "-l": lbound = float(arg) elif opt == "-h": hbound = float(arg) elif opt == "-D": demFile = arg elif opt == "-V": contour_step = float(arg) elif opt == "-t": minDate = arg elif opt == "-T": maxDate = arg elif opt == "-r": radius = abs(int(arg)) elif opt == "-x": xsub = [int(i) for i in arg.split(":")] xsub.sort() # dispVelFig='no' elif opt == "-y": ysub = [int(i) for i in arg.split(":")] ysub.sort() # dispVelFig='no' elif opt == "-X": ref_xsub = [int(i) for i in arg.split(":")] ref_xsub.sort() elif opt == "-Y": ref_ysub = [int(i) for i in arg.split(":")] ref_ysub.sort() # dispVelFig='no' elif opt == "--contour-step": contour_step = float(arg) elif opt == "--contour-smooth": contour_sigma = float(arg) elif opt == "--dem-nocontour": demContour = "no" elif opt == "--dem-noshade": demShade = "no" elif opt == "--displacement": dispDisplacement = "yes" elif opt in ["-E", "--exclude"]: datesNot2show = arg.split(",") elif opt in "--lalo": lalosub = [float(i) for i in arg.split(",")] elif opt in "--LALO": ref_lalosub = [float(i) for i in arg.split(",")] elif opt in ["--rect-color"]: rectColor = arg elif opt in ["--ref-date"]: ref_date = ptime.yyyymmdd(arg) elif opt in ["-u", "--unit"]: unit = arg.lower() elif opt == "--save": saveFig = "yes" elif opt == "--nodisplay": dispFig = "no" saveFig = "yes" elif opt == "--opposite": dispOpposite = "yes" elif opt == "--zero-start": zero_start = arg.lower() elif opt == "--zoom-x": win_x = [int(i) for i in arg.split(":")] win_x.sort() elif opt == "--zoom-y": win_y = [int(i) for i in arg.split(":")] win_y.sort() elif opt == "--zoom-lon": win_lon = [float(i) for i in arg.split(":")] win_lon.sort() elif opt == "--zoom-lat": win_lat = [float(i) for i in arg.split(":")] win_lat.sort() ############################################################## ## Read time series file info if not os.path.isfile(timeSeriesFile): print "\nERROR: Input time series file does not exist: " + timeSeriesFile + "\n" sys.exit(1) h5timeseries = h5py.File(timeSeriesFile) k = h5timeseries.keys() # read h5 file and its group type if not "timeseries" in k: print "ERROR: Input file is " + k[0] + ".\n\tOnly timeseries is supported.\n" sys.exit(1) atr = readfile.read_attributes(timeSeriesFile) dateList1 = h5timeseries["timeseries"].keys() dateList1 = sorted(dateList1) dates1, datevector1 = ptime.date_list2vector(dateList1) print "\n************ Time Series Display - Point *************" ##### Select Check try: lalosub xsub = subset.coord_geo2radar([lalosub[1]], atr, "longitude") ysub = subset.coord_geo2radar([lalosub[0]], atr, "latitude") xsub = [xsub] ysub = [ysub] if radius == 0: radius = 3 except: pass try: ref_lalosub ref_xsub = subset.coord_geo2radar([ref_lalosub[1]], atr, "longitude") ref_ysub = subset.coord_geo2radar([ref_lalosub[0]], atr, "latitude") ref_xsub = [ref_xsub] ref_ysub = [ref_ysub] if radius == 0: radius = 3 except: pass ############################################################## global dates, dateList, datevector_all, dateListMinMax print "*******************" print "All dates existed:" print dateList1 print "*******************" ## Check exclude date input try: datesNot2show if os.path.isfile(datesNot2show[0]): try: datesNot2show = ptime.read_date_list(datesNot2show[0]) except: print "Can not read date list file: " + datesNot2show[0] print "dates not to show: " + str(datesNot2show) except: datesNot2show = [] ## Check Min / Max Date dateListMinMax = [] try: minDate minDate = ptime.yyyymmdd(minDate) dateListMinMax.append(minDate) minDateyy = ptime.yyyymmdd2years(minDate) print "minimum date: " + minDate for date in dateList1: yy = ptime.yyyymmdd2years(date) if yy < minDateyy: datesNot2show.append(date) except: pass try: maxDate maxDate = ptime.yyyymmdd(maxDate) dateListMinMax.append(maxDate) maxDateyy = ptime.yyyymmdd2years(maxDate) print "maximum date: " + maxDate for date in dateList1: yy = ptime.yyyymmdd2years(date) if yy > maxDateyy: datesNot2show.append(date) except: pass dateListMinMax = sorted(dateListMinMax) if not dateListMinMax: print "no min/max date input." else: datesMinMax, dateVecMinMax = ptime.date_list2vector(dateListMinMax) ## Finalize Date List try: dateList = [] for date in dateList1: if date not in datesNot2show: dateList.append(date) print "--------------------------------------------" print "dates used to show time series displacements:" print dateList print "--------------------------------------------" except: dateList = dateList1 print "using all dates to show time series displacement" ## Read Date Info (x axis for time series display) dates, datevector = ptime.date_list2vector(dateList) datevector_all = list(datevector) ## Check reference date input try: ref_date if not ref_date in dateList: print "Reference date - " + ref_date + " - is not included in date list to show." sys.exit(1) else: print "reference date: " + ref_date except: if zero_start == "yes": ref_date = dateList[0] print "set the 1st date as reference for displacement display." else: pass ############################################################## ##### Plot Fig 1 - Velocity / last epoch of time series / DEM fig = plt.figure(1) ax = fig.add_subplot(111) ##### Check subset range width = int(atr["WIDTH"]) length = int(atr["FILE_LENGTH"]) print "file size: " + str(length) + ", " + str(width) try: win_y = subset.coord_geo2radar(win_lat, atr, "latitude") except: try: win_y except: win_y = [0, length] try: win_x = subset.coord_geo2radar(win_lon, atr, "longitude") except: try: win_x except: win_x = [0, width] win_y, win_x = subset.check_subset_range(win_y, win_x, atr) try: velocityFile try: vel, vel_atr = readfile.read(velocityFile) except: vel, vel_atr = readfile.read(timeSeriesFile, velocityFile) ax.set_title(velocityFile) print "display: " + velocityFile except: vel, vel_atr = readfile.read(timeSeriesFile, dateList1[-1]) ax.set_title("epoch: " + dateList1[-1]) print "display last epoch" ##### show displacement instead of phase if vel_atr["FILE_TYPE"] in ["interferograms", ".unw"] and dispDisplacement == "yes": print "show displacement" phase2range = -float(vel_atr["WAVELENGTH"]) / (4 * np.pi) vel *= phase2range else: dispDisplacement = "no" ## Reference Point if showRef == "yes": try: ax.plot(int(atr["ref_x"]), int(atr["ref_y"]), "ks", ms=6) except: pass if dispOpposite == "yes": print "show opposite value in figure/map 1" vel *= -1 ## Flip try: flip_lr except: try: flip_ud except: flip_lr, flip_ud = view.auto_flip_check(atr) ## Status bar ## Geo coordinate try: ullon = float(atr["X_FIRST"]) ullat = float(atr["Y_FIRST"]) lon_step = float(atr["X_STEP"]) lat_step = float(atr["Y_STEP"]) lon_unit = atr["Y_UNIT"] lat_unit = atr["X_UNIT"] geocoord = "yes" print "Input file is Geocoded" except: geocoord = "no" def format_coord(x, y): col = int(x + 0.5) row = int(y + 0.5) if col >= 0 and col <= width and row >= 0 and row <= length: z = vel[row, col] try: lon = ullon + x * lon_step lat = ullat + y * lat_step return "x=%.1f, y=%.1f, value=%.4f, lon=%.4f, lat=%.4f" % (x, y, z, lon, lat) except: return "x=%.1f, y=%.1f, value=%.4f" % (x, y, z) ax.format_coord = format_coord ## DEM try: demFile dem, demRsc = readfile.read(demFile) ax = view.plot_dem_yx(ax, dem, demShade, demContour, contour_step, contour_sigma) vel_alpha = 0.8 except: print "No DEM file" try: img = ax.imshow(vel, vmin=vmin, vmax=vmax, alpha=vel_alpha) except: img = ax.imshow(vel, alpha=vel_alpha) plt.colorbar(img) ## Zoom In (subset) if flip_lr == "yes": ax.set_xlim(win_x[1], win_x[0]) else: ax.set_xlim(win_x[0], win_x[1]) if flip_ud == "yes": ax.set_ylim(win_y[0], win_y[1]) else: ax.set_ylim(win_y[1], win_y[0]) ## Flip # if flip_lr == 'yes': fig.gca().invert_xaxis() # if flip_ud == 'yes': fig.gca().invert_yaxis() ########################################## ##### Plot Fig 2 - Time series plot # fig2 = plt.figure(num=2,figsize=(12,6)) fig2 = plt.figure(2, figsize=(12, 6)) ax2 = fig2.add_subplot(111) try: timeSeriesFile_2 h5timeseries_2 = h5py.File(timeSeriesFile_2) dateList_2 = h5timeseries_2["timeseries"].keys() dateList_2 = sorted(dateList_2) dates_2, datevector_2 = ptime.date_list2vector(dateList_2) datevector_all += list(set(datevector_2) - set(datevector_all)) datevector_all = sorted(datevector_all) except: pass ################################ Plot Code Package <start> ################################# def plot_ts(ax, ax2, fig2, xsub, ysub, h5timeseries): ax2.cla() print "\n-------------------------------------------------------------------------------" disp_min = 0 disp_max = 0 ############################# Plot Time Series ############################## global ref_xsub, ref_ysub ##### 1.1 Plot Reference time series try: ref_xsub ref_ysub ref_xsub, ref_ysub = check_yx(ref_xsub, ref_ysub, radius, ax, rectColor) print "----------------------------------------------------" print "Reference Point:" print "ref_x=" + str(ref_xsub[0]) + ":" + str(ref_xsub[1]) print "ref_y=" + str(ref_ysub[0]) + ":" + str(ref_ysub[1]) print "-----------------------------" print "Time series with all dates:" dis1, dis1_mean, dis1_std, dis1_vel = read_dis(ref_xsub, ref_ysub, dateList1, h5timeseries, unit) (_, caps, _) = ax2.errorbar( dates1, dis1_mean, yerr=dis1_std, fmt="-ks", ms=markerSize2, lw=0, alpha=1, mfc=markerColor_ref, mew=edgeWidth, elinewidth=edgeWidth, ecolor="black", capsize=markerSize * 0.5, ) for cap in caps: cap.set_markeredgewidth(edgeWidth) disp_min, disp_max = update_lim(disp_min, disp_max, dis1_mean, dis1_std) if not len(dateList) == len(dateList1): print "-----------------------------" print "Time series with dates of interest:" dis12, dis12_mean, dis12_std, dis12_vel = read_dis(ref_xsub, ref_ysub, dateList, h5timeseries, unit) (_, caps, _) = ax2.errorbar( dates, dis12_mean, yerr=dis12_std, fmt="-ks", ms=markerSize2, lw=0, alpha=1, mfc=markerColor_ref2, mew=edgeWidth, elinewidth=edgeWidth, ecolor="black", capsize=markerSize * 0.5, ) for cap in caps: cap.set_markeredgewidth(edgeWidth) disp_min, disp_max = update_lim(disp_min, disp_max, dis12_mean, dis12_std) except: pass ##### 1.2.0 Read y/x print "\n----------------------------------------------------" print "Point of Interest:" xsub, ysub = check_yx(xsub, ysub, radius, ax, rectColor) print "x=" + str(xsub[0]) + ":" + str(xsub[1]) print "y=" + str(ysub[0]) + ":" + str(ysub[1]) ##### 1.2.1 Plot 2nd time series try: timeSeriesFile_2 print "-----------------------------" print "2nd Time Series:" dis2, dis2_mean, dis2_std, dis2_vel = read_dis(xsub, ysub, dateList_2, h5timeseries_2, unit) (_, caps, _) = ax2.errorbar( dates_2, dis2_mean, yerr=dis2_std, fmt="-ko", ms=markerSize2, lw=0, alpha=1, mfc=markerColor2, elinewidth=0, ecolor="black", capsize=0, ) for cap in caps: cap.set_markeredgewidth(edgeWidth) disp_min, disp_max = update_lim(disp_min, disp_max, dis2_mean, dis2_std) except: pass ##### 1.2.2 Plot 1st time series print "-----------------------------" print "Time Series:" dis, dis_mean, dis_std, dis_vel = read_dis(xsub, ysub, dateList, h5timeseries, unit) (_, caps, _) = ax2.errorbar( dates, dis_mean, yerr=dis_std, fmt="-ko", ms=markerSize, lw=lineWidth, alpha=1, mfc=markerColor, elinewidth=edgeWidth, ecolor="black", capsize=markerSize * 0.5, ) for cap in caps: cap.set_markeredgewidth(edgeWidth) disp_min, disp_max = update_lim(disp_min, disp_max, dis_mean, dis_std) ####################### Figure Format ####################### ## x axis format try: ax2 = ptime.adjust_xaxis_date(ax2, dateVecMinMax, fontSize) except: ax2 = ptime.adjust_xaxis_date(ax2, datevector_all, fontSize) ## y axis format ax2.set_ylabel("Displacement [" + unit + "]", fontsize=fontSize) try: lbound hbound ax2.set_ylim(lbound, hbound) except: disp_buf = 0.2 * (disp_max - disp_min) ax2.set_ylim(disp_min - disp_buf, disp_max + disp_buf) for tick in ax2.yaxis.get_major_ticks(): tick.label.set_fontsize(fontSize) ## title figTitle = "x=" + str(xsub[0]) + ":" + str(xsub[1]) + ", y=" + str(ysub[0]) + ":" + str(ysub[1]) try: lonc = ullon + (xsub[0] + xsub[1]) / 2.0 * lon_step latc = ullat + (ysub[0] + ysub[1]) / 2.0 * lat_step figTitle += ", lalo=" + "%.4f,%.4f" % (latc, lonc) except: pass ax2.set_title(figTitle) ################## Save and Output ##################### if saveFig == "yes": print "-----------------------------" Delay = {} Delay["displacement"] = dis Delay["unit"] = unit Delay["time"] = datevector Delay["velocity"] = dis_vel[0] Delay["velocity_unit"] = unit + "/yr" Delay["velocity_std"] = dis_vel[4] figBase = "x" + str(xsub[0]) + "_" + str(xsub[1] - 1) + "y" + str(ysub[0]) + "_" + str(ysub[1] - 1) sio.savemat(figBase + "_ts.mat", {"displacement": Delay}) print "saved " + figBase + "_ts.mat" fig2.savefig(figBase + "_ts.pdf", bbox_inches="tight", transparent=True, dpi=fig_dpi) print "saved " + figBase + "_ts.pdf" if dispFig == "no": fig.savefig(figBase + "_vel.png", bbox_inches="tight", transparent=True, dpi=fig_dpi) print "saved " + figBase + "_vel.png" ################################ Plot Code Package <end> ################################# ########### 1. Plot Time Series with x/y ########## try: xsub ysub plot_ts(ax, ax2, fig2, xsub, ysub, h5timeseries) except: print "No x/y input" pass ########### 2. Plot Time Series with Click ########## ## similar to 1. Plot Time Series with x/y def onclick(event): ax2.cla() xsub = [int(event.xdata)] ysub = [int(event.ydata)] plot_ts(ax, ax2, fig2, xsub, ysub, h5timeseries) if dispFig == "yes": plt.show() try: cid = fig.canvas.mpl_connect("button_press_event", onclick) except: pass if dispFig == "yes": plt.show()
def main(argv): inps = cmdLineParse() #print '\n********** Inversion: Time Series to Velocity ***********' atr = readfile.read_attribute(inps.timeseries_file) k = atr['FILE_TYPE'] print 'input '+k+' file: '+inps.timeseries_file if not k == 'timeseries': sys.exit('ERROR: input file is not timeseries!') h5file = h5py.File(inps.timeseries_file) ##################################### ## Date Info dateListAll = sorted(h5file[k].keys()) dateListAll = ptime.yyyymmdd(dateListAll) yyListAll = ptime.yyyymmdd2years(dateListAll) print '--------------------------------------------' print 'Dates from input file: '+str(len(dateListAll)) print dateListAll # Extrac exclude dates from input arguments inps.datesNot2include = [] # 1. template_file if inps.template_file: inps = update_inps_from_template(inps, inps.template_file) # 2. ex_date if inps.ex_date: for ex_date in inps.ex_date: if os.path.isfile(ex_date): ex_date = ptime.read_date_list(ex_date) else: ex_date = [ptime.yyyymmdd(ex_date)] inps.datesNot2include += list(set(ex_date) - set(inps.datesNot2include)) # delete dates not existed in input file inps.datesNot2include = list(set(inps.datesNot2include).intersection(dateListAll)) print 'date excluded:'+str(inps.datesNot2include) # 3. min_date if inps.min_date: inps.min_date = ptime.yyyymmdd(inps.min_date) print 'minimum date: '+inps.min_date yy_min = ptime.yyyymmdd2years(inps.min_date) for i in range(len(dateListAll)): date = dateListAll[i] if yyListAll[i] < yy_min and date not in inps.datesNot2include: print ' remove date: '+date inps.datesNot2include.append(date) # 4. max_date if inps.max_date: inps.max_date = ptime.yyyymmdd(inps.max_date) print 'minimum date: '+inps.max_date yy_max = ptime.yyyymmdd2years(inps.max_date) for i in range(len(dateListAll)): date = dateListAll[i] if yyListAll[i] > yy_max and date not in inps.datesNot2include: print ' remove date: '+date inps.datesNot2include.append(date) # Summary dateList = sorted(list(set(dateListAll) - set(inps.datesNot2include))) print '--------------------------------------------' if len(dateList) == len(dateListAll): print 'using all dates to calculate the velocity' else: print 'Dates used to estimate the velocity: '+str(len(dateList)) print dateList print '--------------------------------------------' # Date Aux Info dates, datevector = ptime.date_list2vector(dateList) ##################################### ## Inversion # Design matrix B=np.ones([len(datevector),2]) B[:,0]=datevector #B1 = np.linalg.pinv(B) B1 = np.dot(np.linalg.inv(np.dot(B.T,B)),B.T) B1 = np.array(B1,np.float32) # Loading timeseries print "Loading time series file: "+inps.timeseries_file+' ...' width = int(atr['WIDTH']) length = int(atr['FILE_LENGTH']) dateNum = len(dateList) timeseries = np.zeros([dateNum,length*width],np.float32) start_time = time.time() for i in range(dateNum): date = dateList[i] ut.print_progress(i+1, dateNum, prefix='loading:', suffix=date, elapsed_time=time.time()-start_time) timeseries[i,:] = h5file[k].get(date)[:].flatten() h5file.close() # Velocity Inversion print 'Calculating velocity ...' x = np.dot(B1,timeseries) velocity = np.reshape(x[0,:],[length,width]) print 'Calculating rmse ...' timeseries_linear = np.dot(B,x) rmse = np.reshape(np.sqrt((np.sum((timeseries_linear-timeseries)**2,0))/dateNum),[length,width]) print 'Calculating the standard deviation of the estimated velocity ...' residual = timeseries_linear - timeseries s1 = np.sqrt(np.sum(residual**2,0)/(dateNum-2)) s2 = np.sqrt(np.sum((datevector-np.mean(datevector))**2)) std = np.reshape(s1/s2,[length,width]) # SSt=np.sum((timeseries-np.mean(timeseries,0))**2,0) # SSres=np.sum(residual**2,0) # SS_REG=SSt-SSres # Rsquared=np.reshape(SS_REG/SSt,[length,width]) ###################################################### # covariance of the velocities ##################################### # Output file name if not inps.outfile: inps.outfile = 'velocity.h5' if inps.datesNot2include: inps.outfile = os.path.splitext(inps.outfile)[0]+'_ex'+os.path.splitext(inps.outfile)[1] inps.outfile_rmse = 'rmse_'+inps.outfile inps.outfile_std = 'std_'+inps.outfile inps.outfile_r2 = 'R2_'+inps.outfile # Attributes atr['date1'] = datevector[0] atr['date2'] = datevector[dateNum-1] # File Writing print '--------------------------------------' atr['FILE_TYPE'] = 'velocity' print 'writing >>> '+inps.outfile writefile.write(velocity, atr, inps.outfile) atr['FILE_TYPE'] = 'rmse' print 'writing >>> '+inps.outfile_rmse writefile.write(rmse, atr, inps.outfile_rmse) atr['FILE_TYPE'] = 'rmse' print 'writing >>> '+inps.outfile_std writefile.write(std, atr, inps.outfile_std) print 'Done.' return inps.outfile
def main(argv): inps = cmdLineParse() suffix = '_demErr' if not inps.outfile: inps.outfile = os.path.splitext( inps.timeseries_file)[0] + suffix + os.path.splitext( inps.timeseries_file)[1] # 1. template_file if inps.template_file: print 'read option from template file: ' + inps.template_file inps = read_template2inps(inps.template_file, inps) # Read Time Series print "loading time series: " + inps.timeseries_file atr = readfile.read_attribute(inps.timeseries_file) length = int(atr['FILE_LENGTH']) width = int(atr['WIDTH']) h5 = h5py.File(inps.timeseries_file) date_list = sorted(h5['timeseries'].keys()) date_num = len(date_list) print 'number of acquisitions: ' + str(date_num) # Exclude date info #inps.ex_date = ['20070115','20100310'] if inps.ex_date: inps = get_exclude_date(inps, date_list) if inps.ex_date: inps.ex_flag = np.array([i not in inps.ex_date for i in date_list]) timeseries = np.zeros((len(date_list), length * width), np.float32) prog_bar = ptime.progress_bar(maxValue=date_num, prefix='loading: ') for i in range(date_num): date = date_list[i] d = h5['timeseries'].get(date)[:] timeseries[i][:] = d.flatten('F') prog_bar.update(i + 1, suffix=date) del d h5.close() prog_bar.close() # Perpendicular Baseline print 'read perpendicular baseline' try: inps.pbase = ut.perp_baseline_timeseries(atr, dimension=0) if inps.pbase.shape[1] > 1: print '\tconsider P_BASELINE variation in azimuth direction' else: pbase = inps.pbase except: print '\tCannot find P_BASELINE_TIMESERIES from timeseries file.' print '\tTrying to calculate it from interferograms file' if inps.ifgram_file: inps.pbase = np.array( ut.perp_baseline_ifgram2timeseries( inps.ifgram_file)[0]).reshape(date_num, 1) else: message = 'No interferogram file input!\n'+\ 'Can not correct for DEM residula without perpendicular base info!' raise Exception(message) # Temporal Baseline print 'read temporal baseline' inps.tbase = np.array(ptime.date_list2tbase(date_list)[0]).reshape( date_num, 1) # Incidence angle (look angle in the paper) if inps.incidence_angle: if os.path.isfile(inps.incidence_angle): print 'reading incidence angle from file: ' + inps.incidence_angle inps.incidence_angle = readfile.read(inps.incidence_angle)[0] else: try: inps.incidence_angle = np.array(float(inps.incidence_angle)) print 'use input incidence angle : ' + str( inps.incidence_angle) except: raise ValueError('Can not read input incidence angle: ' + str(inps.incidence_angle)) else: print 'calculate incidence angle using attributes of time series file' if inps.pbase.shape[1] > 1: inps.incidence_angle = ut.incidence_angle(atr, dimension=2) else: inps.incidence_angle = ut.incidence_angle(atr, dimension=1) inps.incidence_angle *= np.pi / 180.0 # Range distance if inps.range_dis: if os.path.isfile(inps.range_dis): print 'reading range distance from file: ' + inps.range_dis inps.range_dis = readfile.read(inps.range_dis)[0] else: try: inps.range_dis = np.array(float(inps.range_dis)) print 'use input range distance : ' + str(inps.range_dis) except: raise ValueError('Can not read input incidence angle: ' + str(inps.range_dis)) else: print 'calculate range distance using attributes from time series file' if inps.pbase.shape[1] > 1: inps.range_dis = ut.range_distance(atr, dimension=2) else: inps.range_dis = ut.range_distance(atr, dimension=1) # Design matrix - temporal deformation model using tbase print '-------------------------------------------------' if inps.phase_velocity: print 'using phase velocity history' A1 = np.ones((date_num - 1, 1)) A2 = (inps.tbase[1:date_num] + inps.tbase[0:date_num - 1]) / 2.0 A3 = (inps.tbase[1:date_num]**3 - inps.tbase[0:date_num - 1]** 3) / np.diff(inps.tbase, axis=0) / 6.0 #A3 = (inps.tbase[1:date_num]**2 + inps.tbase[1:date_num]*inps.tbase[0:date_num-1] +\ # inps.tbase[0:date_num-1]**2) / 6.0 else: print 'using phase history' A1 = np.hstack((np.ones((date_num, 1)), inps.tbase)) A2 = inps.tbase**2 / 2.0 A3 = inps.tbase**3 / 6.0 # Polynomial order of model print "temporal deformation model's polynomial order = " + str( inps.poly_order) if inps.poly_order == 1: A_def = A1 elif inps.poly_order == 2: A_def = np.hstack((A1, A2)) elif inps.poly_order == 3: A_def = np.hstack((A1, A2, A3)) # step function if inps.step_date: print "temporal deformation model's step function step at " + inps.step_date step_yy = ptime.yyyymmdd2years(inps.step_date) yy_list = ptime.yyyymmdd2years(date_list) flag_array = np.array(yy_list) >= step_yy A_step = np.zeros((date_num, 1)) A_step[flag_array] = 1.0 A_def = np.hstack((A_def, A_step)) # Heresh's original code for phase history approach #A_def = np.hstack((A2,A1,np.ones((date_num,1)))) print '-------------------------------------------------' ##---------------------------------------- Loop for L2-norm inversion -----------------------------------## delta_z_mat = np.zeros([length, width], dtype=np.float32) resid_n = np.zeros([A_def.shape[0], length * width], dtype=np.float32) constC = np.zeros([length, width], dtype=np.float32) #delta_a_mat = np.zeros([length, width]) if inps.incidence_angle.ndim == 2 and inps.range_dis.ndim == 2: print 'inversing using L2-norm minimization (unweighted least squares)'\ ' pixel by pixel: %d loops in total' % (length*width) prog_bar = ptime.progress_bar(maxValue=length * width, prefix='calculating: ') for i in range(length * width): row = i % length col = i / length range_dis = inps.range_dis[row, col] inc_angle = inps.incidence_angle[row, col] # Consider P_BASELINE variation within one interferogram if inps.pbase.shape[1] > 1: pbase = inps.pbase[:, row].reshape(date_num, 1) # Design matrix - DEM error using pbase, range distance and incidence angle A_delta_z = pbase / (range_dis * np.sin(inc_angle)) if inps.phase_velocity: pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0) A_delta_z_v = pbase_v / (range_dis * np.sin(inc_angle)) A = np.hstack((A_delta_z_v, A_def)) else: A = np.hstack((A_delta_z, A_def)) # L-2 norm inversion if inps.ex_date: A_inv = np.linalg.pinv(A[inps.ex_flag, :]) else: A_inv = np.linalg.pinv(A) # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...] ts_dis = timeseries[:, i] if inps.phase_velocity: ts_dis = np.diff(ts_dis, axis=0) / np.diff(inps.tbase, axis=0) if inps.ex_date: X = np.dot(A_inv, ts_dis[inps.ex_flag]) else: X = np.dot(A_inv, ts_dis) # Residual vector n resid_n[:, i] = ts_dis - np.dot(A, X) # Update DEM error / timeseries matrix delta_z = X[0] delta_z_mat[row, col] = delta_z if inps.update_timeseries: timeseries[:, i] -= np.dot(A_delta_z, delta_z).flatten() prog_bar.update(i + 1, every=length * width / 100) prog_bar.close() elif inps.incidence_angle.ndim == 1 and inps.range_dis.ndim == 1: print 'inversing using L2-norm minimization (unweighted least squares)'\ ' column by column: %d loops in total' % (width) prog_bar = ptime.progress_bar(maxValue=width, prefix='calculating: ') for i in range(width): range_dis = inps.range_dis[i] inc_angle = inps.incidence_angle[i] # Design matrix - DEM error using pbase, range distance and incidence angle A_delta_z = pbase / (range_dis * np.sin(inc_angle)) if inps.phase_velocity: pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0) A_delta_z_v = pbase_v / (range_dis * np.sin(inc_angle)) A = np.hstack((A_delta_z_v, A_def)) else: A = np.hstack((A_delta_z, A_def)) # L-2 norm inversion if inps.ex_date: A_inv = np.linalg.pinv(A[inps.ex_flag, :]) else: A_inv = np.linalg.pinv(A) # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...] ts_dis = timeseries[:, i * length:(i + 1) * length] if inps.phase_velocity: ts_dis = np.diff(ts_dis, axis=0) / np.diff(inps.tbase, axis=0) if inps.ex_date: X = np.dot(A_inv, ts_dis[inps.ex_flag, :]) else: X = np.dot(A_inv, ts_dis) # Residual vector n resid_n[:, i * length:(i + 1) * length] = ts_dis - np.dot(A, X) constC[:, i] = X[1].reshape((1, length)) # Update DEM error / timeseries matrix delta_z = X[0].reshape((1, length)) delta_z_mat[:, i] = delta_z if inps.update_timeseries: timeseries[:, i * length:(i + 1) * length] -= np.dot( A_delta_z, delta_z) prog_bar.update(i + 1, every=width / 100) prog_bar.close() elif inps.incidence_angle.ndim == 0 and inps.range_dis.ndim == 0: print 'inversing using L2-norm minimization (unweighted least squares) for the whole area' # Design matrix - DEM error using pbase, range distance and incidence angle A_delta_z = pbase / (inps.range_dis * np.sin(inps.incidence_angle)) if inps.phase_velocity: pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0) A_delta_z_v = pbase_v / (inps.range_dis * np.sin(inps.incidence_angle)) A = np.hstack((A_delta_z_v, A_def)) else: A = np.hstack((A_delta_z, A_def)) # L-2 norm inversion if inps.ex_date: A_inv = np.linalg.pinv(A[inps.ex_flag, :]) else: A_inv = np.linalg.pinv(A) # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...] if inps.phase_velocity: timeseries = np.diff(timeseries, axis=0) / np.diff(inps.tbase, axis=0) if inps.ex_date: X = np.dot(A_inv, timeseries[inps.ex_flag, :]) else: X = np.dot(A_inv, timeseries) # Residual vector n resid_n = ts_dis - np.dot(A, X) # Update DEM error / timeseries matrix delta_z_mat = X[0].reshape((1, length * width)) if inps.update_timeseries: timeseries -= np.dot(A_delta_z, delta_z_mat) delta_z_mat = np.reshape(delta_z_mat, [length, width], order='F') else: print 'ERROR: Script only support same dimension for both incidence angle and range distance matrix.' print 'dimension of incidence angle: ' + str(inps.incidence_angle.ndim) print 'dimension of range distance: ' + str(inps.range_dis.ndim) sys.exit(1) ##------------------------------------------------ Output --------------------------------------------## # DEM error file if 'Y_FIRST' in atr.keys(): dem_error_file = 'demGeo_error.h5' else: dem_error_file = 'demRadar_error.h5' #if inps.phase_velocity: suffix = '_pha_poly'+str(inps.poly_order) #else: suffix = '_vel_poly'+str(inps.poly_order) #dem_error_file = os.path.splitext(dem_error_file)[0]+suffix+os.path.splitext(dem_error_file)[1] print 'writing >>> ' + dem_error_file atr_dem_error = atr.copy() atr_dem_error['FILE_TYPE'] = 'dem' atr_dem_error['UNIT'] = 'm' writefile.write(delta_z_mat, atr_dem_error, dem_error_file) ## Phase Constant C = resid_n[0,:] #atrC = atr.copy() #atrC['FILE_TYPE'] = 'mask' #atrC['UNIT'] = 'm' #writefile.write(constC, atrC, 'constD.h5') ## Corrected DEM file #if inps.dem_file: # inps.dem_outfile = os.path.splitext(inps.dem_file)[0]+suffix+os.path.splitext(inps.dem_file)[1] # print '--------------------------------------' # print 'writing >>> '+inps.dem_outfile # dem, atr_dem = readfile.read(inps.dem_file) # writefile.write(dem+delta_z_mat, atr_dem, inps.dem_outfile) #outfile = 'delta_acc.h5' #print 'writing >>> '+outfile #atr_dem_error = atr.copy() #atr_dem_error['FILE_TYPE'] = 'velocity' #atr_dem_error['UNIT'] = 'm/s' #writefile.write(delta_a_mat, atr_dem_error, outfile) #print '**************************************' # Corrected Time Series if inps.update_timeseries: print 'writing >>> ' + inps.outfile print 'number of dates: ' + str(len(date_list)) h5out = h5py.File(inps.outfile, 'w') group = h5out.create_group('timeseries') prog_bar = ptime.progress_bar(maxValue=date_num, prefix='writing: ') for i in range(date_num): date = date_list[i] d = np.reshape(timeseries[i][:], [length, width], order='F') dset = group.create_dataset(date, data=d, compression='gzip') prog_bar.update(i + 1, suffix=date) prog_bar.close() for key, value in atr.iteritems(): group.attrs[key] = value h5out.close() outFile = os.path.splitext(inps.outfile)[0] + 'InvResid.h5' print 'writing >>> ' + outFile print 'number of dates: ' + str(A_def.shape[0]) h5out = h5py.File(outFile, 'w') group = h5out.create_group('timeseries') prog_bar = ptime.progress_bar(maxValue=A_def.shape[0], prefix='writing: ') for i in range(A_def.shape[0]): date = date_list[i] d = np.reshape(resid_n[i][:], [length, width], order='F') dset = group.create_dataset(date, data=d, compression='gzip') prog_bar.update(i + 1, suffix=date) prog_bar.close() # Attribute for key, value in atr.iteritems(): group.attrs[key] = value if A_def.shape[0] == date_num: group.attrs['UNIT'] = 'm' else: group.attrs['UNIT'] = 'm/yr' h5out.close() return
def main(argv): inps = cmdLineParse() suffix = '_demErr' if not inps.outfile: inps.outfile = os.path.splitext(inps.timeseries_file)[0]+suffix+os.path.splitext(inps.timeseries_file)[1] if inps.template_file: print 'read option from template file: '+inps.template_file inps = read_template2inps(inps.template_file, inps) ##### Read Data atr = readfile.read_attribute(inps.timeseries_file) coordType = 'radar' if 'Y_FIRST' in atr.keys(): coordType = 'geo' # 1. Incidence angle try: inps.inc_angle_file = ut.get_file_list(inps.inc_angle_file, coord=coordType)[0] except ValueError: print 'No incidence angle file found!\nRun incidence_angle.py to generate it.' print 'read incidence angle from file: '+str(inps.inc_angle_file) inps.inc_angle = readfile.read(inps.inc_angle_file, epoch='incidenceAngle')[0].flatten() inps.inc_angle *= np.pi/180.0 # 2. Slant Range distance try: inps.range_dist_file = ut.get_file_list(inps.range_dist_file, coord=coordType)[0] except ValueError: print 'No range distance file found!\nRun range_distance.py to generate it.' print 'read slant range distance from file: '+str(inps.range_dist_file) inps.range_dist = readfile.read(inps.range_dist_file, epoch='slantRangeDistance')[0].flatten() # 3. Perp Baseline - 1D in time, 0D/1D in space (azimuth) print 'read perpendicular baseline' try: inps.pbase = ut.perp_baseline_timeseries(atr, dimension=1) if inps.pbase.shape[1] > 1: print 'consider perp baseline variance in azimuth direction' except valueError: print 'No P_BASELINE_TIMESERIES found in timeseries file.\n'+\ 'Can not correct for DEM residula without it!' # 4. Time Series - 1D in time, 1D in space (flattened) print "read time series file: " + inps.timeseries_file h5 = h5py.File(inps.timeseries_file) date_list = sorted(h5['timeseries'].keys()) date_num = len(date_list) inps.tbase = np.array(ptime.date_list2tbase(date_list)[0]).reshape(-1,1) #Mark dates used in the estimation inps.ex_date = check_exclude_date(inps.ex_date, date_list) inps.date_flag = np.array([i not in inps.ex_date for i in date_list], dtype=np.bool_) if inps.poly_order > np.sum(inps.date_flag): raise ValueError("ERROR: input polynomial order=%d is larger than number of acquisition=%d used in estimation!" %\ (inps.poly_order, np.sum(inps.date_flag))) length = int(atr['FILE_LENGTH']) width = int(atr['WIDTH']) pixel_num = length*width timeseries = np.zeros((date_num, pixel_num),np.float32) for i in range(date_num): timeseries[i] = h5['timeseries'].get(date_list[i])[:].flatten() sys.stdout.write('\rreading acquisition %3d/%3d ...' % (i+1, date_num)) sys.stdout.flush() h5.close() print '' ##### Design matrix - temporal deformation model print '-------------------------------------------------' print 'Correct topographic phase residual using Fattahi and Amelung (2013, IEEE-TGRS)' msg = 'minimum-norm constrain on: phase' if inps.phase_velocity: msg += ' velocity' print msg # Heresh's original code for phase history approach #A1 = np.hstack((np.ones((date_num, 1)), inps.tbase)) #A2 = inps.tbase**2 / 2.0 #A_def = np.hstack((A2,A1,np.ones((date_num,1)))) # 1. Polynomial - 2D matrix in size of (date_num, polyOrder+1) print "temporal deformation model: polynomial order = "+str(inps.poly_order) A_def = np.ones((date_num, 1), np.float32) for i in range(inps.poly_order): Ai = inps.tbase**(i+1) / gamma(i+2) Ai = np.array(Ai, np.float32).reshape(-1,1) A_def = np.hstack((A_def, Ai)) # 2. Step function - 2D matrix in size of (date_num, stepNum) if inps.step_date: print "temporal deformation model: step functions at "+str(inps.step_date) yySteps = ptime.yyyymmdd2years(inps.step_date) yyList = np.array(ptime.yyyymmdd2years(date_list)).reshape(-1,1) for yyStep in yySteps: Ai = yyList > yyStep Ai = np.array(Ai, np.float32).reshape(-1,1) A_def = np.hstack((A_def, Ai)) inps.step_num = len(inps.step_date) print '-------------------------------------------------' ##---------------------------------------- Loop for L2-norm inversion -----------------------------------## ## Output estimated steps print 'ordinal least squares (OLS) inversion using L2-norm minimization' timeseriesCor = np.zeros((date_num, pixel_num), dtype=np.float32) timeseriesRes = np.zeros((date_num, pixel_num), dtype=np.float32) topoRes = np.zeros(pixel_num, dtype=np.float32) constC = np.zeros(pixel_num, dtype=np.float32) if inps.step_num > 0: stepModel = np.zeros((inps.step_num, pixel_num), dtype=np.float32) print 'skip pixels with zero/nan value in geometry files - incidence angle and range distance' mask = np.multiply(~np.isnan(inps.inc_angle), ~np.isnan(inps.range_dist)) mask[inps.inc_angle == 0.] = 0 mask[inps.range_dist == 0.] = 0 pixel_num2inv = np.sum(mask) pixel_idx2inv = np.where(mask)[0] print 'number of pixels in the file: %d' % (pixel_num) print 'number of pixels to inverse: %d' % (pixel_num2inv) if inps.pbase.shape[1] == 1: pbase = inps.pbase prog_bar = ptime.progress_bar(maxValue=pixel_num) for i in range(pixel_num2inv): prog_bar.update(i+1, every=1000, suffix='%s/%s pixels'%(str(i+1), str(pixel_num2inv))) idx = pixel_idx2inv[i] r = inps.range_dist[idx] inc_angle = inps.inc_angle[idx] if inps.pbase.shape[1] > 1: pbase = inps.pbase[:, int(idx/width)].reshape(-1,1) A_deltaZ = pbase / (r * np.sin(inc_angle)) A = np.hstack((A_deltaZ, A_def)) ts = timeseries[:,idx].reshape(date_num,-1) deltaZ, tsCor, tsRes, stepEst = topographic_residual_inversion(ts, A, inps) topoRes[idx:idx+1] = deltaZ timeseriesCor[:,idx:idx+1] = tsCor timeseriesRes[:,idx:idx+1] = tsRes if inps.step_num > 0: stepModel[:,idx:idx+1] = stepEst prog_bar.close() ##------------------------------------------------ Output --------------------------------------------## # 1. DEM error file if 'Y_FIRST' in atr.keys(): deltaZFile = 'demGeo_error.h5' else: deltaZFile = 'demRadar_error.h5' print 'writing >>> '+deltaZFile atrDeltaZ = atr.copy() atrDeltaZ['FILE_TYPE'] = 'dem' atrDeltaZ['UNIT'] = 'm' writefile.write(topoRes.reshape(length, width), atrDeltaZ, deltaZFile) # 2. Topo Residual Corrected Time Series print 'writing >>> '+inps.outfile h5 = h5py.File(inps.outfile,'w') group = h5.create_group('timeseries') for i in range(date_num): sys.stdout.write('\rwriting acquisition %3d/%3d ...' % (i+1, date_num)) sys.stdout.flush() dset = group.create_dataset(date_list[i], data=timeseriesCor[i].reshape(length, width), compression='gzip') print '' for key,value in atr.iteritems(): group.attrs[key] = value h5.close() # 3. Inversion residual Time Series tsResFile = os.path.join(os.path.dirname(inps.outfile), 'timeseriesResidual.h5') print 'writing >>> '+os.path.basename(tsResFile) h5 = h5py.File(tsResFile,'w') group = h5.create_group('timeseries') for i in range(date_num): sys.stdout.write('\rwriting acquisition %3d/%3d ...' % (i+1, date_num)) sys.stdout.flush() dset = group.create_dataset(date_list[i], data=timeseriesRes[i].reshape(length, width), compression='gzip') print '' # Attribute for key,value in atr.iteritems(): group.attrs[key] = value h5.close() # 4. Step temporal Model estimation if inps.step_num > 0: stepFile = os.path.join(os.path.dirname(inps.outfile), 'timeseriesStepModel.h5') print 'writing >>> '+os.path.basename(stepFile) h5 = h5py.File(stepFile,'w') group = h5.create_group('timeseries') for i in range(inps.step_num): sys.stdout.write('\rwriting acquisition %3d/%3d ...' % (i+1, inps.step_num)) sys.stdout.flush() dset = group.create_dataset(inps.step_date[i], data=stepModel[i].reshape(length, width), compression='gzip') print '' # Attribute for key,value in atr.iteritems(): group.attrs[key] = value group.attrs.pop('ref_date') h5.close() print 'Done.' return
def main(argv): ## Default settings contour_step = 200.0 contour_sigma = 3.0 demShade = 'yes' demContour = 'yes' global markerSize, markderSize2, markerColor, markerColor2, rectColor global lineWidth, lineWidth2, edgeWidth, fontSize #global markerColor_ref, markerColor_ref2 markerSize = 16 markerSize2 = 16 markerColor = 'crimson' # g markerColor2 = 'lightgray' markerColor_ref = 'white' markerColor_ref2 = 'lightgray' rectColor = 'black' lineWidth = 0 lineWidth2 = 0 edgeWidth = 1.5 fontSize = 16 global unit, radius, saveFig, dispFig, fig_dpi fig_dpi = 300 radius = 0 saveFig = 'no' dispFig = 'yes' unit = 'cm' dispDisplacement = 'no' dispOpposite = 'no' dispContour = 'only' smoothContour = 'no' contour_step = 200 showRef = 'yes' vel_alpha = 1.0 zero_start = 'yes' global ref_xsub, ref_ysub, ref_date global h5timeseries_2, dates_2, dateList_2 global lbound, hbound ############### Check Inputs ################## if len(sys.argv) < 2: usage() sys.exit(1) elif len(sys.argv) == 2: if argv[0] == '-h': usage() sys.exit(1) elif os.path.isfile(argv[0]): timeSeriesFile = argv[0] h5timeseries = h5py.File(timeSeriesFile) k = h5timeseries.keys() if not 'timeseries' in k: print 'ERROR: Input file is ' + k[ 0] + '.\n\tOnly timeseries is supported.\n' sys.exit(1) else: usage() sys.exit(1) elif len(sys.argv) > 2: try: opts, args = getopt.getopt(argv,"f:F:v:a:b:s:m:c:w:u:l:h:D:V:t:T:d:r:x:y:X:Y:o:E:", ['save','nodisplay','unit=','exclude=','ref-date=','rect-color=',\ 'zero-start=','zoom-x=','zoom-y=','zoom-lon','zoom-lat','lalo=',\ 'opposite','dem-nocontour','dem-noshade','displacement','contour-step=',\ 'contour-smooth=','LALO=']) except getopt.GetoptError: usage() sys.exit(1) for opt, arg in opts: if opt == '-f': timeSeriesFile = arg elif opt == '-F': timeSeriesFile_2 = arg elif opt == '-v': velocityFile = arg elif opt == '-a': vmin = float(arg) elif opt == '-b': vmax = float(arg) elif opt == '-s': fontSize = int(arg) elif opt == '-m': markerSize = int(arg) markerSize2 = int(arg) elif opt == '-c': markerColor = arg elif opt == '-w': lineWidth = int(arg) elif opt == '-u': unit = arg elif opt == '-l': lbound = float(arg) elif opt == '-h': hbound = float(arg) elif opt == '-D': demFile = arg elif opt == '-V': contour_step = float(arg) elif opt == '-t': minDate = arg elif opt == '-T': maxDate = arg elif opt == '-r': radius = abs(int(arg)) elif opt == '-x': xsub = sorted([int(i) for i in arg.split(':')]) elif opt == '-y': ysub = sorted([int(i) for i in arg.split(':')]) elif opt == '-X': ref_xsub = sorted([int(i) for i in arg.split(':')]) elif opt == '-Y': ref_ysub = sorted([int(i) for i in arg.split(':')]) elif opt == '--contour-step': contour_step = float(arg) elif opt == '--contour-smooth': contour_sigma = float(arg) elif opt == '--dem-nocontour': demContour = 'no' elif opt == '--dem-noshade': demShade = 'no' elif opt == '--displacement': dispDisplacement = 'yes' elif opt in ['-E', '--exclude']: datesNot2show = arg.split(',') elif opt in '--lalo': lalosub = [float(i) for i in arg.split(',')] elif opt in '--LALO': ref_lalosub = [float(i) for i in arg.split(',')] elif opt in ['--rect-color']: rectColor = arg elif opt in ['--ref-date']: ref_date = ptime.yyyymmdd(arg) elif opt in ['-u', '--unit']: unit = arg.lower() elif opt == '--save': saveFig = 'yes' elif opt == '--nodisplay': dispFig = 'no' saveFig = 'yes' elif opt == '--opposite': dispOpposite = 'yes' elif opt == '--zero-start': zero_start = arg.lower() elif opt == '--zoom-x': win_x = sorted([int(i) for i in arg.split(':')]) elif opt == '--zoom-y': win_y = sorted([int(i) for i in arg.split(':')]) elif opt == '--zoom-lon': win_lon = sorted([float(i) for i in arg.split(':')]) elif opt == '--zoom-lat': win_lat = sorted([float(i) for i in arg.split(':')]) ############################################################## ## Read time series file info if not os.path.isfile(timeSeriesFile): print '\nERROR: Input time series file does not exist: ' + timeSeriesFile + '\n' sys.exit(1) h5timeseries = h5py.File(timeSeriesFile, 'r') k = h5timeseries.keys() # read h5 file and its group type if not 'timeseries' in k: print 'ERROR: Input file is ' + k[ 0] + '.\n\tOnly timeseries is supported.\n' sys.exit(1) atr = readfile.read_attribute(timeSeriesFile) dateList1 = sorted(h5timeseries['timeseries'].keys()) dates1, datevector1 = ptime.date_list2vector(dateList1) print '\n************ Time Series Display - Point *************' ##### Select Check try: lalosub xsub = subset.coord_geo2radar([lalosub[1]], atr, 'longitude') ysub = subset.coord_geo2radar([lalosub[0]], atr, 'latitude') xsub = [xsub] ysub = [ysub] if radius == 0: radius = 3 except: pass try: ref_lalosub ref_xsub = subset.coord_geo2radar([ref_lalosub[1]], atr, 'longitude') ref_ysub = subset.coord_geo2radar([ref_lalosub[0]], atr, 'latitude') ref_xsub = [ref_xsub] ref_ysub = [ref_ysub] if radius == 0: radius = 3 except: pass ############################################################## global dates, dateList, datevector_all, dateListMinMax print '*******************' print 'All dates existed:' print dateList1 print '*******************' ## Check exclude date input try: datesNot2show if os.path.isfile(datesNot2show[0]): try: datesNot2show = ptime.read_date_list(datesNot2show[0]) except: print 'Can not read date list file: ' + datesNot2show[0] print 'dates not to show: ' + str(datesNot2show) except: datesNot2show = [] ## Check Min / Max Date dateListMinMax = [] try: minDate minDate = ptime.yyyymmdd(minDate) dateListMinMax.append(minDate) minDateyy = ptime.yyyymmdd2years(minDate) print 'minimum date: ' + minDate for date in dateList1: yy = ptime.yyyymmdd2years(date) if yy < minDateyy: datesNot2show.append(date) except: pass try: maxDate maxDate = ptime.yyyymmdd(maxDate) dateListMinMax.append(maxDate) maxDateyy = ptime.yyyymmdd2years(maxDate) print 'maximum date: ' + maxDate for date in dateList1: yy = ptime.yyyymmdd2years(date) if yy > maxDateyy: datesNot2show.append(date) except: pass dateListMinMax = sorted(dateListMinMax) if not dateListMinMax: print 'no min/max date input.' else: datesMinMax, dateVecMinMax = ptime.date_list2vector(dateListMinMax) ## Finalize Date List try: dateList = [] for date in dateList1: if date not in datesNot2show: dateList.append(date) print '--------------------------------------------' print 'dates used to show time series displacements:' print dateList print '--------------------------------------------' except: dateList = dateList1 print 'using all dates to show time series displacement' ## Read Date Info (x axis for time series display) dates, datevector = ptime.date_list2vector(dateList) datevector_all = list(datevector) ## Check reference date input try: ref_date if not ref_date in dateList: print 'Reference date - ' + ref_date + ' - is not included in date list to show.' sys.exit(1) else: print 'reference date: ' + ref_date except: if zero_start == 'yes': ref_date = dateList[0] print 'set the 1st date as reference for displacement display.' else: pass ############################################################## ##### Plot Fig 1 - Velocity / last epoch of time series / DEM fig = plt.figure(1) ax = fig.add_subplot(111) ##### Check subset range width = int(atr['WIDTH']) length = int(atr['FILE_LENGTH']) print 'file size: ' + str(length) + ', ' + str(width) try: win_y = subset.coord_geo2radar(win_lat, atr, 'latitude') except: try: win_y except: win_y = [0, length] try: win_x = subset.coord_geo2radar(win_lon, atr, 'longitude') except: try: win_x except: win_x = [0, width] win_box = (win_x[0], win_y[0], win_x[1], win_y[1]) win_box = subset.check_box_within_data_coverage(win_box, atr) try: velocityFile try: vel, vel_atr = readfile.read(velocityFile) except: vel, vel_atr = readfile.read(timeSeriesFile, velocityFile) ax.set_title(velocityFile) print 'display: ' + velocityFile except: vel, vel_atr = readfile.read(timeSeriesFile, dateList1[-1]) ax.set_title('epoch: ' + dateList1[-1]) print 'display last epoch' ##### show displacement instead of phase if vel_atr['FILE_TYPE'] in ['interferograms', '.unw' ] and dispDisplacement == 'yes': print 'show displacement' phase2range = -float(vel_atr['WAVELENGTH']) / (4 * np.pi) vel *= phase2range else: dispDisplacement = 'no' ## Reference Point if showRef == 'yes': try: ax.plot(int(atr['ref_x']), int(atr['ref_y']), 'ks', ms=6) except: pass if dispOpposite == 'yes': print 'show opposite value in figure/map 1' vel *= -1 ## Flip try: flip_lr except: try: flip_ud except: flip_lr, flip_ud = view.auto_flip_direction(atr) ## Status bar ## Geo coordinate try: ullon = float(atr['X_FIRST']) ullat = float(atr['Y_FIRST']) lon_step = float(atr['X_STEP']) lat_step = float(atr['Y_STEP']) lon_unit = atr['Y_UNIT'] lat_unit = atr['X_UNIT'] geocoord = 'yes' print 'Input file is Geocoded' except: geocoord = 'no' def format_coord(x, y): col = int(x + 0.5) row = int(y + 0.5) if col >= 0 and col <= width and row >= 0 and row <= length: z = vel[row, col] try: lon = ullon + x * lon_step lat = ullat + y * lat_step return 'x=%.1f, y=%.1f, value=%.4f, lon=%.4f, lat=%.4f' % ( x, y, z, lon, lat) except: return 'x=%.1f, y=%.1f, value=%.4f' % (x, y, z) ax.format_coord = format_coord ## DEM try: demFile dem, demRsc = readfile.read(demFile) ax = view.plot_dem_yx(ax, dem, demShade, demContour, contour_step, contour_sigma) vel_alpha = 0.8 except: print 'No DEM file' try: img = ax.imshow(vel, vmin=vmin, vmax=vmax, alpha=vel_alpha) except: img = ax.imshow(vel, alpha=vel_alpha) plt.colorbar(img) ## Zoom In (subset) if flip_lr == 'yes': ax.set_xlim(win_box[2], win_box[0]) else: ax.set_xlim(win_box[0], win_box[2]) if flip_ud == 'yes': ax.set_ylim(win_box[1], win_box[3]) else: ax.set_ylim(win_box[3], win_box[1]) ## Flip #if flip_lr == 'yes': fig.gca().invert_xaxis() #if flip_ud == 'yes': fig.gca().invert_yaxis() ########################################## ##### Plot Fig 2 - Time series plot #fig2 = plt.figure(num=2,figsize=(12,6)) fig2 = plt.figure(2, figsize=(12, 6)) ax2 = fig2.add_subplot(111) try: timeSeriesFile_2 h5timeseries_2 = h5py.File(timeSeriesFile_2) dateList_2 = sorted(h5timeseries_2['timeseries'].keys()) dates_2, datevector_2 = ptime.date_list2vector(dateList_2) datevector_all += list(set(datevector_2) - set(datevector_all)) datevector_all = sorted(datevector_all) except: pass ################################ Plot Code Package <start> ################################# def plot_ts(ax, ax2, fig2, xsub, ysub, h5timeseries): ax2.cla() print '\n-------------------------------------------------------------------------------' disp_min = 0 disp_max = 0 ############################# Plot Time Series ############################## global ref_xsub, ref_ysub ##### 1.1 Plot Reference time series try: ref_xsub ref_ysub ref_xsub, ref_ysub = check_yx(ref_xsub, ref_ysub, radius, ax, rectColor) print '----------------------------------------------------' print 'Reference Point:' print 'ref_x=' + str(ref_xsub[0]) + ':' + str(ref_xsub[1]) print 'ref_y=' + str(ref_ysub[0]) + ':' + str(ref_ysub[1]) print '-----------------------------' print 'Time series with all dates:' dis1, dis1_mean, dis1_std, dis1_vel = read_dis_xy( ref_xsub, ref_ysub, dateList1, h5timeseries, unit) (_, caps, _)=ax2.errorbar(dates1,dis1_mean,yerr=dis1_std,fmt='-ks',\ ms=markerSize2, lw=0, alpha=1,mfc=markerColor_ref,mew=edgeWidth,\ elinewidth=edgeWidth,ecolor='black',capsize=markerSize*0.5) for cap in caps: cap.set_markeredgewidth(edgeWidth) disp_min, disp_max = update_lim(disp_min, disp_max, dis1_mean, dis1_std) if not len(dateList) == len(dateList1): print '-----------------------------' print 'Time series with dates of interest:' dis12, dis12_mean, dis12_std, dis12_vel = read_dis_xy( ref_xsub, ref_ysub, dateList, h5timeseries, unit) (_, caps, _)=ax2.errorbar(dates,dis12_mean,yerr=dis12_std,fmt='-ks',\ ms=markerSize2, lw=0, alpha=1,mfc=markerColor_ref2,mew=edgeWidth,\ elinewidth=edgeWidth,ecolor='black',capsize=markerSize*0.5) for cap in caps: cap.set_markeredgewidth(edgeWidth) disp_min, disp_max = update_lim(disp_min, disp_max, dis12_mean, dis12_std) except: pass ##### 1.2.0 Read y/x print '\n----------------------------------------------------' print 'Point of Interest:' xsub, ysub = check_yx(xsub, ysub, radius, ax, rectColor) print 'x=' + str(xsub[0]) + ':' + str(xsub[1]) print 'y=' + str(ysub[0]) + ':' + str(ysub[1]) ##### 1.2.1 Plot 2nd time series try: timeSeriesFile_2 print '-----------------------------' print '2nd Time Series:' dis2, dis2_mean, dis2_std, dis2_vel = read_dis_xy( xsub, ysub, dateList_2, h5timeseries_2, unit) (_, caps, _)=ax2.errorbar(dates_2,dis2_mean,yerr=dis2_std,fmt='-ko',\ ms=markerSize2, lw=0, alpha=1, mfc=markerColor2,\ elinewidth=0,ecolor='black',capsize=0) for cap in caps: cap.set_markeredgewidth(edgeWidth) disp_min, disp_max = update_lim(disp_min, disp_max, dis2_mean, dis2_std) except: pass ##### 1.2.2 Plot 1st time series print '-----------------------------' print 'Time Series:' dis, dis_mean, dis_std, dis_vel = read_dis_xy(xsub, ysub, dateList, h5timeseries, unit) (_, caps, _)=ax2.errorbar(dates,dis_mean,yerr=dis_std,fmt='-ko',\ ms=markerSize, lw=lineWidth, alpha=1, mfc=markerColor,\ elinewidth=edgeWidth,ecolor='black',capsize=markerSize*0.5) for cap in caps: cap.set_markeredgewidth(edgeWidth) disp_min, disp_max = update_lim(disp_min, disp_max, dis_mean, dis_std) ####################### Figure Format ####################### ## x axis format try: ax2 = ptime.auto_adjust_xaxis_date(ax2, dateVecMinMax, fontSize) except: ax2 = ptime.auto_adjust_xaxis_date(ax2, datevector_all, fontSize) ## y axis format ax2.set_ylabel('Displacement [' + unit + ']', fontsize=fontSize) try: lbound hbound ax2.set_ylim(lbound, hbound) except: disp_buf = 0.2 * (disp_max - disp_min) ax2.set_ylim(disp_min - disp_buf, disp_max + disp_buf) for tick in ax2.yaxis.get_major_ticks(): tick.label.set_fontsize(fontSize) ## title figTitle = 'x=' + str(xsub[0]) + ':' + str(xsub[1]) + ', y=' + str( ysub[0]) + ':' + str(ysub[1]) try: lonc = ullon + (xsub[0] + xsub[1]) / 2.0 * lon_step latc = ullat + (ysub[0] + ysub[1]) / 2.0 * lat_step figTitle += ', lalo=' + '%.4f,%.4f' % (latc, lonc) except: pass ax2.set_title(figTitle) ################## Save and Output ##################### if saveFig == 'yes': print '-----------------------------' Delay = {} Delay['displacement'] = dis Delay['unit'] = unit Delay['time'] = datevector Delay['velocity'] = dis_vel[0] Delay['velocity_unit'] = unit + '/yr' Delay['velocity_std'] = dis_vel[4] figBase = 'x' + str(xsub[0]) + '_' + str(xsub[1] - 1) + 'y' + str( ysub[0]) + '_' + str(ysub[1] - 1) sio.savemat(figBase + '_ts.mat', {'displacement': Delay}) print 'saved ' + figBase + '_ts.mat' fig2.savefig(figBase + '_ts.pdf', bbox_inches='tight', transparent=True, dpi=fig_dpi) print 'saved ' + figBase + '_ts.pdf' if dispFig == 'no': fig.savefig(figBase + '_vel.png', bbox_inches='tight', transparent=True, dpi=fig_dpi) print 'saved ' + figBase + '_vel.png' ################################ Plot Code Package <end> ################################# ########### 1. Plot Time Series with x/y ########## try: xsub ysub plot_ts(ax, ax2, fig2, xsub, ysub, h5timeseries) except: print 'No x/y input' pass ########### 2. Plot Time Series with Click ########## ## similar to 1. Plot Time Series with x/y def onclick(event): ax2.cla() xsub = [int(event.xdata)] ysub = [int(event.ydata)] plot_ts(ax, ax2, fig2, xsub, ysub, h5timeseries) if dispFig == 'yes': plt.show() try: cid = fig.canvas.mpl_connect('button_press_event', onclick) except: pass if dispFig == 'yes': plt.show()