def update_date_options(): global ref_date atr = readfile.read_attribute(timeseries_file.get()) k = atr['FILE_TYPE'] if not k == 'timeseries': raise ValueError('Only timeseries file is supported!') h5 = h5py.File(timeseries_file.get(), 'r') date_list = sorted(h5[k].keys()) def format_date(raw_date): list_date = list(raw_date) list_date.insert(4, '-') list_date.insert(7, '-') the_date = "".join(list_date) return the_date for date in date_list: ref_date_option_menu.children['menu'].add_command( label=format_date(date), command=lambda val=date: ref_date.set(val)) ref_date.set(date_list[0])
def pick_file(): global attributes, starting_upper_lim if h5_file.get() == "": filename = filedialog.askopenfilename(initialdir="/", title="Select file", filetypes=(("jpeg files", "*.h5"), ("all files", "*.*"))) frame.filename = filename h5_file.set(frame.filename) h5_file_short.set(filename.split("/")[-1]) pick_h5_file_button.config(text="Cancel") atr = readfile.read_attribute(h5_file.get()) file_type = atr['FILE_TYPE'] if file_type not in readfile.multi_group_hdf5_file + readfile.multi_dataset_hdf5_file + ['HDFEOS']: data, attributes = readfile.read(h5_file.get()) max = numpy.amax(data) starting_upper_lim = max * 2 update_sliders("m") y_lim_upper.set(max) set_variables_from_attributes() return frame.filename else: h5_file.set("") h5_file_short.set("No File Selected") pick_h5_file_button.config(text="Select .h5 File")
def main(argv): try: File = argv[0] atr = readfile.read_attribute(File) except: usage() sys.exit(1) try: outFile = argv[1] except: outFile = 'incidenceAngle.h5' # Calculate look angle angle = ut.incidence_angle(atr, dimension=2) # Geo coord if 'Y_FIRST' in list(atr.keys()): print( 'Input file is geocoded, only center incident angle is calculated: ' ) print(angle) return angle # Radar coord else: print('writing >>> ' + outFile) atr['FILE_TYPE'] = 'mask' atr['UNIT'] = 'degree' writefile.write(angle, atr, outFile) return outFile
def main(argv): if len(sys.argv) < 3: usage() sys.exit(1) lat = float(argv[0]) lon = float(argv[1]) try: trans_file = argv[2] except: trans_file = ut.get_file_list('geomap*.trans')[0] try: radar_file = argv[3] except: radar_file = 'unwrapIfgram.h5' atr_rdr = readfile.read_attribute(radar_file) print('input geo coord: lat=%.4f, lon=%.4f' % (lat, lon)) y, x = ut.glob2radar(np.array(lat), np.array(lon), trans_file, atr_rdr)[0:2] print('corresponding radar coord: y=%d, x=%d' % (y, x)) return
def get_unavco_filename(timeseriesFile): '''Get output file name of UNAVCO InSAR Archive''' ##### Prepare Metadata pysar_meta_dict = readfile.read_attribute(timeseriesFile) k = pysar_meta_dict['FILE_TYPE'] h5_timeseries = h5py.File(timeseriesFile, 'r') dateList = sorted(h5_timeseries[k].keys()) unavco_meta_dict = metadata_pysar2unavco(pysar_meta_dict, dateList) h5_timeseries.close() meta_dict = pysar_meta_dict.copy() meta_dict.update(unavco_meta_dict) #### Open HDF5 File SAT = meta_dict['mission'] SW = meta_dict[ 'beam_mode'] # should be like FB08 for ALOS, need to find out, Yunjun, 2016-12-26 RELORB = "%03d" % (int(meta_dict['relative_orbit'])) FRAME = "%04d" % (int(meta_dict['frame'])) DATE1 = dt.strptime(meta_dict['first_date'], '%Y-%m-%d').strftime('%Y%m%d') DATE2 = dt.strptime(meta_dict['last_date'], '%Y-%m-%d').strftime('%Y%m%d') TBASE = "%04d" % (0) BPERP = "%05d" % (0) outName = SAT + '_' + SW + '_' + RELORB + '_' + FRAME + '_' + DATE1 + '-' + DATE2 + '_' + TBASE + '_' + BPERP + '.he5' return outName
def remove_reference_pixel(File): '''Remove reference pixel info from input file''' print("remove ref_y/x and/or ref_lat/lon from file: " + File) ext = os.path.splitext(File)[1] if ext not in ['.h5', '.he5']: sys.exit('ERROR: only hdf5 file supported for this function!') k = readfile.read_attribute(File)['FILE_TYPE'] h5 = h5py.File(File, 'r+') if k in multi_group_hdf5_file: ifgram_list = sorted(h5[k].keys()) for ifgram in ifgram_list: for key in ['ref_y', 'ref_x', 'ref_lat', 'ref_lon']: try: h5[k][ifgram].attrs.pop(key) except: pass else: for key in ['ref_y', 'ref_x', 'ref_lat', 'ref_lon']: try: h5[k].attrs.pop(key) except: pass h5.close() return File
def main(argv): try: File = argv[0] atr = readfile.read_attribute(File) except: usage(); sys.exit(1) try: outFile = argv[1] except: outFile = 'rangeDistance.h5' # Calculate look angle range_dis = ut.range_distance(atr, dimension=2) # Geo coord if 'Y_FIRST' in list(atr.keys()): print('Input file is geocoded, only center range distance is calculated: ') print(range_dis) return range_dis # Radar coord else: print('writing >>> '+outFile) atr['FILE_TYPE'] = 'mask' atr['UNIT'] = 'm' writefile.write(range_dis, atr, outFile) return outFile
def check_file_size(fileList, mode_width=None, mode_length=None): '''Update file list and drop those not in the same size with majority.''' # If input file list is empty if not fileList: return fileList, None, None # Read Width/Length list widthList =[] lengthList=[] for file in fileList: rsc = readfile.read_attribute(file) widthList.append(rsc['WIDTH']) lengthList.append(rsc['FILE_LENGTH']) # Mode of Width and Length if not mode_width: mode_width = mode(widthList) if not mode_length: mode_length = mode(lengthList) # Update Input List ext = os.path.splitext(fileList[0])[1] fileListOut = list(fileList) if widthList.count(mode_width)!=len(widthList) or lengthList.count(mode_length)!=len(lengthList): print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') print('WARNING: Some '+ext+' may have the wrong dimensions!') print('All '+ext+' should have the same size.') print('The width and length of the majority of '+ext+' are: '+str(mode_width)+', '+str(mode_length)) print('But the following '+ext+' have different dimensions and thus will not be loaded:') for i in range(len(fileList)): if widthList[i] != mode_width or lengthList[i] != mode_length: print('%s width: %s length: %s' % (os.path.basename(fileList[i]), widthList[i], lengthList[i])) fileListOut.remove(fileList[i]) print('\nNumber of '+ext+' left: '+str(len(fileListOut))) print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') return fileListOut, mode_width, mode_length
def main(argv): inps = cmdLineParse() print('\n*************** Spatial Average ******************') for File in inps.file: mean_list, date_list = ut.spatial_average(File, inps.mask_file, saveList=True) atr = readfile.read_attribute(File) k = atr['FILE_TYPE'] if inps.disp_fig and k == 'timeseries': dates, datevector = ptime.date_list2vector(date_list) # plot fig = plt.figure() ax = fig.add_subplot(111) ax.plot(dates, mean_list, '-ko', lw=2, ms=16, alpha=0.7, mfc='crimson') ax.set_title('Spatial Average', fontsize=12) ax = ptime.auto_adjust_xaxis_date(ax, datevector)[0] ax.set_xlabel('Time [years]', fontsize=12) ax.set_ylabel('Mean', fontsize=12) plt.show()
def reset_pairs(File): '''Reset/restore all pairs within the input file by set all drop_ifgram=no''' print("set drop_ifgram to 'no' for all interferograms for file: " + File) k = readfile.read_attribute(File)['FILE_TYPE'] h5 = h5py.File(File, 'r+') ifgram_list = sorted(h5[k].keys()) for ifgram in ifgram_list: h5[k][ifgram].attrs['drop_ifgram'] = 'no' h5.close() return File
def main(argv): ##### Check Inputs if not argv or argv[0] in ['-h', '--help']: usage() sys.exit(1) if len(argv) < 2 or not argv[1]: raise Exception('\nAt lease 2 inputs are needed.\n') ##### Read Original Attributes #print '************ Add / Update HDF5 File Attributes *************' File = argv[0] atr = readfile.read_attribute(File) print('Input file is ' + atr['PROCESSOR'] + ' ' + atr['FILE_TYPE'] + ': ' + File) ##### Read New Attributes atr_new = dict() for i in range(1, len(argv)): if os.path.isfile(argv[i]): atr_tmp = readfile.read_template(argv[i]) atr_new.update(atr_tmp) else: atr_tmp = argv[i].split('=') atr_new[atr_tmp[0].strip()] = atr_tmp[1].strip() print( "The following attributes will be added/updated, or removed if new value is 'None':" ) info.print_attributes(atr_new) ext = os.path.splitext(File)[1] ##### Update h5 File if ext in ['.h5', '.he5']: File = ut.add_attribute(File, atr_new) else: if not ut.update_attribute_or_not(atr_new, atr): print( 'All updated (removed) attributes already exists (do not exists) and have the same value, skip update.' ) else: for key, value in atr_new.items(): # delete the item is new value is None if value == 'None': try: atr.pop(key) except: pass else: atr[key] = value if atr['PROCESSOR'] == 'roipac': print('writing >>> ' + File + '.rsc') writefile.write_roipac_rsc(atr, File + '.rsc') return File
def ref_date_file(inFile, ref_date, outFile=None): '''Change input file reference date to a different one.''' if not outFile: outFile = os.path.splitext(inFile)[0]+'_refDate.h5' # Input file type atr = readfile.read_attribute(inFile) k = atr['FILE_TYPE'] if not k in ['timeseries']: print('Input file is '+k+', only timeseries is supported.') return None # Input reference date h5 = h5py.File(inFile, 'r') date_list = sorted(h5[k].keys()) h5.close() date_num = len(date_list) try: ref_date_orig = atr['ref_date'] except: ref_date_orig = date_list[0] ref_date = ptime.yyyymmdd(ref_date) print('input reference date: '+ref_date) if not ref_date in date_list: print('Input reference date was not found!\nAll dates available: '+str(date_list)) return None if ref_date == ref_date_orig: print('Same reference date chosen as existing reference date.') print('Copy %s to %s' % (inFile, outFile)) shutil.copy2(inFile, outFile) return outFile # Referencing in time h5 = h5py.File(inFile, 'r') ref_data = h5[k].get(ref_date)[:] print('writing >>> '+outFile) h5out = h5py.File(outFile,'w') group = h5out.create_group(k) prog_bar = ptime.progress_bar(maxValue=date_num) for i in range(date_num): date = date_list[i] data = h5[k].get(date)[:] dset = group.create_dataset(date, data=data-ref_data, compression='gzip') prog_bar.update(i+1, suffix=date) prog_bar.close() h5.close() ## Update attributes atr = ref_date_attribute(atr, ref_date, date_list) for key,value in atr.items(): group.attrs[key] = value h5out.close() return outFile
def igram_perp_baseline_list(File): '''Get perpendicular baseline list from input multi_group hdf5 file''' print('read perp baseline info from ' + File) p_baseline_list = [] k = readfile.read_attribute(File)['FILE_TYPE'] h5 = h5py.File(File, 'r') epochList = sorted(h5[k].keys()) for epoch in epochList: p_baseline = (float(h5[k][epoch].attrs['P_BASELINE_BOTTOM_HDR'])+\ float(h5[k][epoch].attrs['P_BASELINE_TOP_HDR']))/2 p_baseline_list.append(p_baseline) h5.close() return p_baseline_list
def read_timeseries_info(): global atr, k, h5, dateList, tims, date_num, inps atr = readfile.read_attribute(inps.timeseries_file) k = atr['FILE_TYPE'] print('input file is ' + k + ': ' + inps.timeseries_file) if not k == 'timeseries': raise ValueError('Only timeseries file is supported!') h5 = h5py.File(inps.timeseries_file, 'r') dateList = sorted(h5[k].keys()) date_num = len(dateList) inps.dates, tims = ptime.date_list2vector(dateList)
def read_seed_reference2inps(reference_file, inps=None): '''Read seed/reference info from reference file and update input namespace''' if not inps: inps = cmdLineParse(['']) atr_ref = readfile.read_attribute(inps.reference_file) atr_ref_key_list = list(atr_ref.keys()) if (not inps.ref_y or not inps.ref_x) and 'ref_x' in atr_ref_key_list: inps.ref_y = int(atr_ref['ref_y']) inps.ref_x = int(atr_ref['ref_x']) if (not inps.ref_lat or not inps.ref_lon) and 'ref_lon' in atr_ref_key_list: inps.ref_lat = float(atr_ref['ref_lat']) inps.ref_lon = float(atr_ref['ref_lon']) return inps
def load_single_dataset_hdf5(file_type, infile, outfile, extra_meta_dict=dict()): '''Convert ROI_PAC .dem / .hgt file to hdf5 file Based on load_dem.py written by Emre Havazli Inputs: file_type : string, group name of hdf5 file, i.e. dem, mask infile : string, input ROI_PAC file name outfile : string, output hdf5 file name extra_meta_dict : dict, extra attributes to output file Output: outfile : string, output hdf5 file name ''' atr = readfile.read_attribute(infile) if ut.update_file(outfile, infile): if (os.path.dirname(infile) == os.path.dirname(outfile) and \ os.path.splitext(infile)[1] == os.path.splitext(outfile)[1]): print(infile+' already in working directory with recommended format, no need to re-load.') outfile = infile else: # Read input file print('loading file: '+infile) data = readfile.read(infile)[0] # Write output file - data print('writing >>> '+outfile) h5 = h5py.File(outfile, 'w') group = h5.create_group(file_type) dset = group.create_dataset(file_type, data=data, compression='gzip') # Write output file - attributes for key, value in atr.items(): group.attrs[key] = value try: group.attrs['PROJECT_NAME'] = extra_meta_dict['project_name'] except: pass key = 'INSAR_PROCESSOR' if key not in list(atr.keys()): try: atr[key] = extra_meta_dict['insar_processor'] except: pass h5.close() #if (os.path.abspath(infile) != os.path.abspath(outfile) and \ # os.path.dirname(infile) == os.path.dirname(outfile)): # print 'remove the duplicated, obsolete '+atr['FILE_TYPE']+' file in the same directory' # rmCmd = 'rm '+infile # print rmCmd # os.system(rmCmd) return outfile
def read_timeseries_yx(timeseries_file, y, x): '''Read time-series displacement on point (y,x) from timeseries_file Inputs: timeseries_file : string, name/path of timeseries hdf5 file y/x : int, row/column number of point of interest Output: dis_ts : list of float, displacement time-series of point of interest ''' atr = readfile.read_attribute(timeseries_file) k = atr['FILE_TYPE'] h5 = h5py.File(timeseries_file, 'r') date_list = list(h5[k].keys()) dis_ts = [] for date in date_list: dis_ts.append(h5[k].get(date)[y, x]) h5.close() return dis_ts
def main(argv): inps = cmdLineParse() # Input file info atr = readfile.read_attribute(inps.ifgram_file) k = atr['FILE_TYPE'] if not k == 'interferograms': sys.exit('ERROR: only interferograms file supported, input is ' + k + ' file!') # Network Inversion if not inps.inverse_method == 'L1': print('Inverse time-series using L2 norm minimization') ut.timeseries_inversion(inps.ifgram_file, inps.timeseries_file) else: print('Inverse time-series using L1 norm minimization') ut.timeseries_inversion_L1(inps.ifgram_file, inps.timeseries_file) return inps.timeseries_file
def check_existed_hdf5_file(roipacFileList, hdf5File): '''Check file list with existed hdf5 file''' # If input file list is empty outFileList = list(roipacFileList) if not outFileList: return outFileList # if previous hdf5 file existed if os.path.isfile(hdf5File): print(os.path.basename(hdf5File)+' already exists.') try: atr = readfile.read_attribute(hdf5File) except: print('File exists but not readable, delete it.') rmCmd = 'rm '+hdf5File; print(rmCmd); os.system(rmCmd) return outFileList k = atr['FILE_TYPE'] h5 = h5py.File(hdf5File, 'r') epochList = sorted(h5[k].keys()) h5.close() # Remove file/epoch that already existed for epoch in epochList: for file in roipacFileList: if epoch in file: outFileList.remove(file) # Check mode length/width with existed hdf5 file if outFileList: ext = os.path.splitext(outFileList[0])[1] outFileList, mode_width, mode_length = check_file_size(outFileList) if mode_width != atr['WIDTH'] or mode_length != atr['FILE_LENGTH']: print('WARNING: input ROI_PAC files have different size than existed hdf5 file:') print('ROI_PAC file size: '+mode_length+', '+mode_width) print('HDF5 file size: '+atr['FILE_LENGTH']+', '+atr['WIDTH']) print('Continue WITHOUT loading '+ext+' file') print('To enforse loading, change/remove existed HDF5 filename and re-run loading script') outFileList = None return outFileList
def read_timeseries_lalo(timeseries_file, lat, lon): '''Read time-series displacement on point (y,x) from timeseries_file Inputs: timeseries_file : string, name/path of timeseries hdf5 file lat/lon : float, latitude/longitude of point of interest Output: dis_ts : list of float, displacement time-series of point of interest ''' atr = readfile.read_attribute(timeseries_file) if 'X_FIRST' not in list(atr.keys()): print('ERROR: input file is not geocoded') return None lat0 = float(atr['Y_FIRST']) lat_step = float(atr['Y_STEP']) lon0 = float(atr['X_FIRST']) lon_step = float(atr['X_STEP']) y = int(np.rint((lat - lat0) / lat_step)) x = int(np.rint((lon - lon0) / lon_step)) dis_ts = read_timeseries_yx(timeseries_file, y, x) return dis_ts
def main(argv): try: File = argv[0] atr = readfile.read_attribute(File) epoch = argv[1] except: usage() sys.exit(1) try: outFile = argv[2] except: outFile = 'perpBaseline.h5' # Calculate look angle pbase = ut.perp_baseline_timeseries(atr, dimension=1) if pbase.shape[1] == 1: print(pbase) return pbase k = atr['FILE_TYPE'] width = int(atr['WIDTH']) length = int(atr['FILE_LENGTH']) h5 = h5py.File(File, 'r') epochList = sorted(h5[k].keys()) epoch = ptime.yyyymmdd(epoch) epoch_idx = epochList.index(epoch) pbase_y = pbase[epoch_idx, :].reshape(length, 1) pbase_xy = np.tile(pbase_y, (1, width)) print('writing >>> ' + outFile) atr['FILE_TYPE'] = 'mask' atr['UNIT'] = 'm' writefile.write(pbase_xy, atr, outFile) return outFile
def main(argv): inps = cmdLineParse() ##### 1. Read data atr = readfile.read_attribute(inps.file) k = atr['FILE_TYPE'] print('Input file is ' + k) # Check: file in geo coord if 'X_FIRST' not in list(atr.keys()): sys.exit('ERROR: Input file is not geocoded.') # Check: epoch is required for multi_dataset/group files if not inps.epoch and k in multi_group_hdf5_file + multi_dataset_hdf5_file: print("No date/date12 input.\nIt's required for " + k + " file") sys.exit(1) # Read data data, atr = readfile.read(inps.file, (), inps.epoch) # Output filename if not inps.outfile: inps.outfile = pview.auto_figure_title(inps.file, inps.epoch, vars(inps)) # Data Operation - Display Unit & Rewrapping data, inps.disp_unit, inps.wrap = pview.scale_data4disp_unit_and_rewrap( data, atr, inps.disp_unit, inps.wrap) if inps.wrap: inps.ylim = [-np.pi, np.pi] ##### 2. Generate Google Earth KMZ kmz_file = write_kmz_file(data, atr, inps.outfile, inps) print('Done.') return
def main(argv): inps = cmdLineParse() ##### 1. Read data atr = readfile.read_attribute(inps.file) k = atr['FILE_TYPE'] print('Input file is '+k) # Check: file in geo coord if 'X_FIRST' not in list(atr.keys()): sys.exit('ERROR: Input file is not geocoded.') # Check: epoch is required for multi_dataset/group files if not inps.epoch: if k in multi_group_hdf5_file: print("No date/date12 input.\nIt's required for "+k+" file") sys.exit(1) elif k in multi_dataset_hdf5_file: print('No input date ..., continue to convert the last date of time-series.') h5 = h5py.File(inps.file, 'r') date_list = sorted(h5[k].keys()) h5.close() inps.epoch = date_list[-1] # Read data data, atr = readfile.read(inps.file, (), inps.epoch) # Output filename if not inps.outfile: inps.outfile = pview.auto_figure_title(inps.file, inps.epoch, vars(inps)) inps.outfile = os.path.splitext(inps.outfile)[0]+'.grd' ##### 2. Write GMT .grd file inps.outfile = write_grd_file(data, atr, inps.outfile) print('Done.') return inps.outfile
def get_date12_list(File): '''Read Date12 info from input file: Pairs.list or multi-group hdf5 file Output: date12_list - list of string in YYMMDD-YYMMDD format Example: date12List = get_date12_list('unwrapIfgram.h5') date12List = get_date12_list('Pairs.list') ''' #print 'read pairs info from '+File date12_list = [] ext = os.path.splitext(File)[1].lower() if ext == '.h5': k = readfile.read_attribute(File)['FILE_TYPE'] h5 = h5py.File(File, 'r') epochList = sorted(h5[k].keys()) for epoch in epochList: date12 = h5[k][epoch].attrs['DATE12'] date12_list.append(date12) h5.close() else: date12_list = np.loadtxt(File, dtype=str).tolist() date12_list = sorted(date12_list) return date12_list
def unwrap_error_correction_phase_closure(ifgram_file, mask_file, ifgram_cor_file=None): '''Correct unwrapping errors in network of interferograms using phase closure. Inputs: ifgram_file - string, name/path of interferograms file mask_file - string, name/path of mask file to mask the pixels to be corrected ifgram_cor_file - string, optional, name/path of corrected interferograms file Output: ifgram_cor_file Example: 'unwrapIfgram_unwCor.h5' = unwrap_error_correction_phase_closure('Seeded_unwrapIfgram.h5','mask.h5') ''' print('read mask from file: '+mask_file) mask = readfile.read(mask_file)[0].flatten(1) atr = readfile.read_attribute(ifgram_file) length = int(atr['FILE_LENGTH']) width = int(atr['WIDTH']) k = atr['FILE_TYPE'] pixel_num = length*width # Check reference pixel try: ref_y = int(atr['ref_y']) ref_x = int(atr['ref_x']) except: sys.exit('ERROR: Can not find ref_y/x value, input file is not referenced in space!') h5 = h5py.File(ifgram_file,'r') ifgram_list = sorted(h5[k].keys()) ifgram_num = len(ifgram_list) ##### Prepare curls curls, Triangles, C = ut.get_triangles(h5) curl_num = np.shape(curls)[0] print('Number of triangles: '+ str(curl_num)) curl_file='curls.h5' if not os.path.isfile(curl_file): print('writing >>> '+curl_file) ut.generate_curls(curl_file, h5, Triangles, curls) thr=0.50 curls = np.array(curls); n1=curls[:,0]; n2=curls[:,1]; n3=curls[:,2] print('reading interferograms...') print('Number of interferograms: '+ str(ifgram_num)) data = np.zeros((ifgram_num,pixel_num),np.float32) prog_bar = ptime.progress_bar(maxValue=ifgram_num) for ni in range(ifgram_num): ifgram = ifgram_list[ni] d = h5[k][ifgram].get(ifgram)[:].flatten(1) data[ni,:] = d prog_bar.update(ni+1) prog_bar.close() print('reading curls ...') print('number of culrs: '+str(curl_num)) h5curl = h5py.File(curl_file,'r') curl_list = sorted(h5curl[k].keys()) curl_data = np.zeros((curl_num, pixel_num),np.float32) prog_bar = ptime.progress_bar(maxValue=curl_num) for ni in range(curl_num): d = h5curl[k][curl_list[ni]].get(curl_list[ni])[:].flatten(1) curl_data[ni,:] = d.flatten(1) prog_bar.update(ni+1) prog_bar.close() h5curl.close() print('estimating unwrapping error pixel by pixel ...') EstUnwrap = np.zeros((ifgram_num,pixel_num),np.float32) prog_bar = ptime.progress_bar(maxValue=pixel_num) for ni in range(pixel_num): if mask[ni]==1: dU = data[:,ni] unwCurl = np.array(curl_data[:,ni]) ind = np.abs(unwCurl)>=thr; N1 =n1[ind]; N2 =n2[ind]; N3 =n3[ind] indC = np.abs(unwCurl)< thr; Nc1=n1[indC]; Nc2=n2[indC]; Nc3=n3[indC] N =np.hstack([N1, N2, N3]); UniN =np.unique(N) Nc=np.hstack([Nc1,Nc2,Nc3]); UniNc=np.unique(Nc) inter = list(set(UniNc) & set(UniN)) # intersetion UniNc = list(UniNc) for x in inter: UniNc.remove(x) D = np.zeros([len(UniNc),ifgram_num]) for i in range(len(UniNc)): D[i,UniNc[i]]=1 AAA = np.vstack([-2*np.pi*C,D]) AAAA = np.vstack([AAA,0.25*np.eye(ifgram_num)]) ########## # with Tikhonov regularization: LLL = list(np.dot(C,dU)) + list(np.zeros(np.shape(UniNc)[0])) + list(np.zeros(ifgram_num)) ind = np.isnan(AAAA) M1 = pinv(AAAA) M = np.dot(M1,LLL) EstUnwrap[:,ni] = np.round(M[0:ifgram_num])*2.0*np.pi prog_bar.update(ni+1, suffix='%s/%d' % (ni,pixel_num)) prog_bar.close() dataCor = data + EstUnwrap ##### Output if not ifgram_cor_file: ifgram_cor_file = os.path.splitext(ifgram_file)[0]+'_unwCor.h5' print('writing >>> '+ifgram_cor_file) h5unwCor = h5py.File(ifgram_cor_file,'w') gg = h5unwCor.create_group(k) prog_bar = ptime.progress_bar(maxValue=ifgram_num) for i in range(ifgram_num): ifgram = ifgram_list[i] group = gg.create_group(ifgram) dset = group.create_dataset(ifgram, data=np.reshape(dataCor[i,:],[width,length]).T, compression='gzip') for key, value in h5[k][ifgram].attrs.items(): group.attrs[key] = value prog_bar.update(i+1) prog_bar.close() h5unwCor.close() h5.close() return ifgram_cor_file
def unwrap_error_correction_bridging(ifgram_file, mask_file, y_list, x_list, ramp_type='plane',\ ifgram_cor_file=None, save_cor_deramp_file=False): '''Unwrapping error correction with bridging. Inputs: ifgram_file : string, name/path of interferogram(s) to be corrected mask_file : string, name/path of mask file to mark different patches y/x_list : list of int, bonding points in y/x ifgram_cor_file : string, optional, output file name save_cor_deramp_file : bool, optional Output: ifgram_cor_file Example: y_list = [235, 270, 350, 390] x_list = [880, 890, 1200, 1270] unwrap_error_correction_bridging('unwrapIfgram.h5', 'mask_all.h5', y_list, x_list, 'quadratic') ''' ##### Mask and Ramp mask = readfile.read(mask_file)[0] ramp_mask = mask == 1 print('estimate phase ramp during the correction') print('ramp type: '+ramp_type) ##### Bridge Info # Check for i in range(len(x_list)): if mask[y_list[i],x_list[i]] == 0: print('\nERROR: Connecting point (%d,%d) is out of masked area! Select them again!\n' % (y_list[i],x_list[i])) sys.exit(1) print('Number of bridges: '+str(len(x_list)/2)) print('Bonding points coordinates:\nx: '+str(x_list)+'\ny: '+str(y_list)) # Plot Connecting Pair of Points plot_bonding_points = False if plot_bonding_points: point_yx = '' line_yx = '' n_bridge = len(x)/2 for i in range(n_bridge): pair_yx = str(y[2*i])+','+str(x[2*i])+','+str(y[2*i+1])+','+str(x[2*i+1]) if not i == n_bridge-1: point_yx += pair_yx+',' line_yx += pair_yx+';' else: point_yx += pair_yx line_yx += pair_yx try: plot_cmd = 'view.py --point-yx="'+point_yx+'" --line-yx="'+line_yx+\ '" --nodisplay -o bonding_points.png -f '+maskFile print(plot_cmd) os.system(plot_cmd) except: pass # Basic info ext = os.path.splitext(ifgram_file)[1] atr = readfile.read_attribute(ifgram_file) k = atr['FILE_TYPE'] try: ref_y = int(atr['ref_y']) ref_x = int(atr['ref_x']) except: sys.exit('ERROR: Can not find ref_y/x value, input file is not referenced in space!') # output file name if not ifgram_cor_file: ifgram_cor_file = os.path.splitext(ifgram_file)[0]+'_unwCor'+ext ifgram_cor_deramp_file = os.path.splitext(ifgram_cor_file)[0]+'_'+ramp_type+ext ##### HDF5 file if ext == '.h5': ##### Read h5 = h5py.File(ifgram_file,'r') ifgram_list = sorted(h5[k].keys()) ifgram_num = len(ifgram_list) h5out = h5py.File(ifgram_cor_file,'w') group = h5out.create_group(k) print('writing >>> '+ifgram_cor_file) if save_cor_deramp_file: h5out_deramp = h5py.File(ifgram_cor_deramp_file,'w') group_deramp = h5out_deramp.create_group(k) print('writing >>> '+ifgram_cor_deramp_file) ##### Loop print('Number of interferograms: '+str(ifgram_num)) prog_bar = ptime.progress_bar(maxValue=ifgram_num) date12_list = ptime.list_ifgram2date12(ifgram_list) for i in range(ifgram_num): ifgram = ifgram_list[i] data = h5[k][ifgram].get(ifgram)[:] data -= data[ref_y, ref_x] data_deramp, ramp = rm.remove_data_surface(data, ramp_mask, ramp_type) data_derampCor = bridging_data(data_deramp, mask, x_list, y_list) gg = group.create_group(ifgram) dset = gg.create_dataset(ifgram, data=data_derampCor-ramp, compression='gzip') for key, value in h5[k][ifgram].attrs.items(): gg.attrs[key]=value if save_cor_deramp_file: gg_deramp = group_deramp.create_group(ifgram) dset = gg_deramp.create_dataset(ifgram, data=data_derampCor, compression='gzip') for key, value in h5[k][ifgram].attrs.items(): gg_deramp.attrs[key]=value prog_bar.update(i+1, suffix=date12_list[i]) prog_bar.close() h5.close() h5out.close() try: h5out_deramp.close() except: pass #### .unw file elif ext == '.unw': print('read '+ifgram_file) data = readfile.read(ifgram_file)[0] data -= data[ref_y, ref_x] data_deramp,ramp = rm.remove_data_surface(data,ramp_mask,ramp_type) data_derampCor = bridging_data(data_deramp,mask,x_list,y_list) data_cor = data_derampCor - ramp print('writing >>> '+ifgram_cor_file) ifgram_cor_file = writefile.write(data_cor, atr, ifgram_cor_file) if save_cor_deramp_file: print('writing >>> '+ifgram_cor_deramp_file) ifgram_cor_deramp_file = writefile.write(data_derampCor, atr, ifgram_cor_deramp_file) else: sys.exit('Un-supported file type: '+ext) return ifgram_cor_file, ifgram_cor_deramp_file
def mask_file(File, maskFile, outFile=None, inps_dict=None): ''' Mask input File with maskFile Inputs: File/maskFile - string, inps_dict - dictionary including the following options: subset_x/y - list of 2 ints, subset in x/y direction thr - float, threshold/minValue to generate mask Output: outFile - string ''' atr = readfile.read_attribute(File) k = atr['FILE_TYPE'] print('masking ' + k + ' file: ' + File + ' ...') # Read maskFile atrm = readfile.read_attribute(maskFile) km = atrm['FILE_TYPE'] if km not in multi_group_hdf5_file + multi_dataset_hdf5_file: print('reading mask file: ' + maskFile) mask = readfile.read(maskFile)[0] if inps_dict: mask = update_mask(mask, inps_dict) if not outFile: outFile = os.path.splitext(File)[0] + '_masked' + os.path.splitext( File)[1] if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']: h5file = h5py.File(File, 'r') epochList = sorted(h5file[k].keys()) h5out = h5py.File(outFile, 'w') print('writing >>> ' + outFile) ##### Multiple Dataset File if k == 'timeseries': print('number of acquisitions: ' + str(len(epochList))) group = h5out.create_group(k) for d in epochList: print(d) unw = h5file[k].get(d)[:] unw = mask_matrix(unw, mask) dset = group.create_dataset(d, data=unw, compression='gzip') for key, value in atr.items(): group.attrs[key] = value elif k in ['interferograms', 'wrapped', 'coherence']: print('number of interferograms: ' + str(len(epochList))) gg = h5out.create_group(k) # Mask multi group file with multi group coherence file if km == 'coherence': h5mask = h5py.File(maskFile, 'r') cohList = sorted(h5mask[km].keys()) if len(cohList) != len(epochList): sys.exit('ERROR: cohERROR: erence mask file has different\ number of interferograms than input file!') for i in range(len(epochList)): igram = epochList[i] print(igram) unw = h5file[k][igram].get(igram)[:] if km == 'coherence': coh = cohList[i] print(coh) mask = h5mask[km][coh].get(coh)[:] if not inps_dict: mask = update_mask(mask, inps_dict) unw = mask_matrix(unw, mask) group = gg.create_group(igram) dset = group.create_dataset(igram, data=unw, compression='gzip') for key, value in h5file[k][igram].attrs.items(): group.attrs[key] = value ##### Single Dataset File else: unw, atr = readfile.read(File) unw = mask_matrix(unw, mask) print('writing >>> ' + outFile) writefile.write(unw, atr, outFile) try: h5file.close() except: pass try: h5out.close() except: pass try: h5mask.close() except: pass return outFile
def main(argv): try: File = argv[0] except: usage() sys.exit(1) atr = readfile.read_attribute(File) k = atr['FILE_TYPE'] print('input is ' + k + ' file: ' + File) try: matFile = argv[1] except: matFile = os.path.splitext(File)[0] + '.mat' print('writing >>> ' + matFile) ##### h5file = h5py.File(File, 'r') if k in single_dataset_hdf5_file: data = h5file[k].get(k)[:] V = {} V['time_range'] = '' try: V['x_first'] = float(atr['X_FIRST']) V['y_first'] = float(atr['Y_FIRST']) V['x_step'] = float(atr['X_STEP']) V['y_step'] = float(atr['Y_STEP']) V['x_unit'] = atr['X_UNIT'] V['y_unit'] = atr['Y_UNIT'] except: V['x_first'] = 1 V['y_first'] = 1 V['x_step'] = 1 V['y_step'] = 1 V['x_unit'] = '' V['y_unit'] = '' try: V['wavelength'] = float(atr['WAVELENGTH']) except: print('WAVELENGTH was not found') try: V['sat_height'] = float(atr['HEIGHT']) except: print('HEIGHT was not found') try: V['near_range'] = float(atr['STARTING_RANGE']) except: print('STARTING_RANGE was not found') V['far_range'] = '' try: V['near_LookAng'] = float(atr['LOOK_REF1']) except: print('LOOK_REF1 was not found') try: V['far_LookAng'] = float(atr['LOOK_REF2']) except: print('LOOK_REF2 was not found') V['earth_radius'] = '' V['Unit'] = 'm/yr' V['bperptop'] = '' V['bperpbot'] = '' V['sat'] = '' try: V['width'] = int(atr['WIDTH']) except: print('WIDTH was not found') try: V['file_length'] = int(atr['FILE_LENGTH']) except: print('FILE_LENGTH was not found') V['t'] = '' V['date'] = '' V['date_years'] = '' try: V['sat'] = atr['satellite'] except: V['sat'] = '' ######################################################## V['data'] = data sio.savemat(matFile, {k: V}) elif 'timeseries' in k: epochList = sorted(h5file['timeseries'].keys()) data_dict = {} for epoch in epochList: print(epoch) d = h5file['timeseries'].get(epoch) ts = {} ts['data'] = d[0:d.shape[0], 0:d.shape[1]] try: ts['x_first'] = float(atr['X_FIRST']) ts['y_first'] = float(atr['Y_FIRST']) ts['x_step'] = float(atr['X_STEP']) ts['y_step'] = float(atr['Y_STEP']) ts['x_unit'] = atr['X_UNIT'] ts['y_unit'] = atr['Y_UNIT'] except: ts['x_first'] = 1 ts['y_first'] = 1 ts['x_step'] = 1 ts['y_step'] = 1 ts['x_unit'] = '' ts['y_unit'] = '' ts['wavelength'] = float(atr['WAVELENGTH']) ts['sat_height'] = float(atr['HEIGHT']) ts['near_range'] = float(atr['STARTING_RANGE']) ts['far_range'] = float(atr['STARTING_RANGE1']) ts['near_LookAng'] = float(atr['LOOK_REF1']) ts['far_LookAng'] = float(atr['LOOK_REF2']) ts['earth_radius'] = float(atr['EARTH_RADIUS']) ts['Unit'] = 'm' ts['bperptop'] = float(atr['P_BASELINE_TOP_HDR']) ts['bperpbot'] = float(atr['P_BASELINE_BOTTOM_HDR']) ts['sat'] = atr['PLATFORM'] ts['width'] = int(atr['WIDTH']) ts['file_length'] = int(atr['FILE_LENGTH']) ts['t'] = np.round( (yyyymmdd2years(epoch) - yyyymmdd2years(epochList[0])) * 365) ts['date'] = epoch ts['date_years'] = yyyymmdd2years(epoch) data_dict['t' + str(epoch)] = ts # data_dict['Number_of_epochs'] = len(epochList) data_dict['epoch_dates'] = epochList sio.savemat(matFile, {k: data_dict}) h5file.close() return
def main(argv): inps = cmdLineParse() inps.file = ut.get_file_list(inps.file) atr = readfile.read_attribute(inps.file[0]) length = int(atr['FILE_LENGTH']) width = int(atr['WIDTH']) if inps.reset: print( '----------------------------------------------------------------------------' ) for file in inps.file: remove_reference_pixel(file) return ##### Check Input Coordinates # Read ref_y/x/lat/lon from reference/template # priority: Direct Input > Reference File > Template File if inps.template_file: print('reading reference info from template: ' + inps.template_file) inps = read_seed_template2inps(inps.template_file, inps) if inps.reference_file: print('reading reference info from reference: ' + inps.reference_file) inps = read_seed_reference2inps(inps.reference_file, inps) ## Do not use ref_lat/lon input for file in radar-coord #if not 'X_FIRST' in atr.keys() and (inps.ref_lat or inps.ref_lon): # print 'Lat/lon reference input is disabled for file in radar coord.' # inps.ref_lat = None # inps.ref_lon = None # Convert ref_lat/lon to ref_y/x if inps.ref_lat and inps.ref_lon: if 'X_FIRST' in list(atr.keys()): inps.ref_y = subset.coord_geo2radar(inps.ref_lat, atr, 'lat') inps.ref_x = subset.coord_geo2radar(inps.ref_lon, atr, 'lon') else: # Convert lat/lon to az/rg for radar coord file using geomap*.trans file inps.ref_y, inps.ref_x = ut.glob2radar(np.array(inps.ref_lat), np.array(inps.ref_lon),\ inps.trans_file, atr)[0:2] print('Input reference point in lat/lon: ' + str([inps.ref_lat, inps.ref_lon])) print('Input reference point in y/x : ' + str([inps.ref_y, inps.ref_x])) # Do not use ref_y/x outside of data coverage if (inps.ref_y and inps.ref_x and not (0 <= inps.ref_y <= length and 0 <= inps.ref_x <= width)): inps.ref_y = None inps.ref_x = None print('WARNING: input reference point is OUT of data coverage!') print('Continue with other method to select reference point.') # Do not use ref_y/x in masked out area if inps.ref_y and inps.ref_x and inps.mask_file: print('mask: ' + inps.mask_file) mask = readfile.read(inps.mask_file)[0] if mask[inps.ref_y, inps.ref_x] == 0: inps.ref_y = None inps.ref_x = None print('WARNING: input reference point is in masked OUT area!') print('Continue with other method to select reference point.') ##### Select method if inps.ref_y and inps.ref_x: inps.method = 'input-coord' elif inps.coherence_file: if os.path.isfile(inps.coherence_file): inps.method = 'max-coherence' else: inps.coherence_file = None if inps.method == 'manual': inps.parallel = False print('Parallel processing is disabled for manual seeding method.') ##### Seeding file by file # check outfile and parallel option if inps.parallel: num_cores, inps.parallel, Parallel, delayed = ut.check_parallel( len(inps.file)) if len(inps.file) == 1: seed_file_inps(inps.file[0], inps, inps.outfile) elif inps.parallel: #num_cores = min(multiprocessing.cpu_count(), len(inps.file)) #print 'parallel processing using %d cores ...'%(num_cores) Parallel(n_jobs=num_cores)(delayed(seed_file_inps)(file, inps) for file in inps.file) else: for File in inps.file: seed_file_inps(File, inps) print('Done.') return
def seed_file_reference_value(File, outName, refList, ref_y='', ref_x=''): ## Seed Input File with reference value in refList print('Reference value: ') print(refList) ##### IO Info atr = readfile.read_attribute(File) k = atr['FILE_TYPE'] print('file type: ' + k) ##### Multiple Dataset File if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']: ##### Input File Info h5file = h5py.File(File, 'r') epochList = sorted(h5file[k].keys()) epochNum = len(epochList) ##### Check Epoch Number if not epochNum == len(refList): print('\nERROR: Reference value has different epoch number'+\ 'from input file.') print('Reference List epoch number: ' + str(refList)) print('Input file epoch number: ' + str(epochNum)) sys.exit(1) ##### Output File Info h5out = h5py.File(outName, 'w') group = h5out.create_group(k) print('writing >>> ' + outName) prog_bar = ptime.progress_bar(maxValue=epochNum, prefix='seeding: ') ## Loop if k == 'timeseries': print('number of acquisitions: ' + str(epochNum)) for i in range(epochNum): epoch = epochList[i] data = h5file[k].get(epoch)[:] data -= refList[i] dset = group.create_dataset(epoch, data=data, compression='gzip') prog_bar.update(i + 1, suffix=epoch) atr = seed_attributes(atr, ref_x, ref_y) for key, value in atr.items(): group.attrs[key] = value elif k in ['interferograms', 'wrapped', 'coherence']: print('number of interferograms: ' + str(epochNum)) date12_list = ptime.list_ifgram2date12(epochList) for i in range(epochNum): epoch = epochList[i] #print epoch data = h5file[k][epoch].get(epoch)[:] atr = h5file[k][epoch].attrs data -= refList[i] atr = seed_attributes(atr, ref_x, ref_y) gg = group.create_group(epoch) dset = gg.create_dataset(epoch, data=data, compression='gzip') for key, value in atr.items(): gg.attrs[key] = value prog_bar.update(i + 1, suffix=date12_list[i]) ##### Single Dataset File else: print('writing >>> ' + outName) data, atr = readfile.read(File) data -= refList atr = seed_attributes(atr, ref_x, ref_y) writefile.write(data, atr, outName) ##### End & Cleaning try: prog_bar.close() h5file.close() h5out.close() except: pass return outName