def load_single_dataset_hdf5(file_type, infile, outfile, extra_meta_dict=dict()): '''Convert ROI_PAC .dem / .hgt file to hdf5 file Based on load_dem.py written by Emre Havazli Inputs: file_type : string, group name of hdf5 file, i.e. dem, mask infile : string, input ROI_PAC file name outfile : string, output hdf5 file name extra_meta_dict : dict, extra attributes to output file Output: outfile : string, output hdf5 file name ''' if not ut.update_file(outfile, infile): return outfile # Read input file print 'loading file: '+infile data, atr = readfile.read(infile) # Write output file - data print 'writing >>> '+outfile h5 = h5py.File(outfile, 'w') group = h5.create_group(file_type) dset = group.create_dataset(file_type, data=data, compression='gzip') # Write output file - attributes for key, value in atr.iteritems(): group.attrs[key] = value try: group.attrs['PROJECT_NAME'] = extra_meta_dict['project_name'] except: pass key = 'INSAR_PROCESSOR' if key not in atr.keys(): try: atr[key] = extra_meta_dict['insar_processor'] except: pass h5.close() return outfile
def load_single_dataset_hdf5(file_type, infile, outfile, extra_meta_dict=dict()): '''Convert ROI_PAC .dem / .hgt file to hdf5 file Based on load_dem.py written by Emre Havazli Inputs: file_type : string, group name of hdf5 file, i.e. dem, mask infile : string, input ROI_PAC file name outfile : string, output hdf5 file name extra_meta_dict : dict, extra attributes to output file Output: outfile : string, output hdf5 file name ''' atr = readfile.read_attribute(infile) if ut.update_file(outfile, infile): if (os.path.dirname(infile) == os.path.dirname(outfile) and \ os.path.splitext(infile)[1] == os.path.splitext(outfile)[1]): print infile + ' already in working directory with recommended format, no need to re-load.' outfile = infile else: # Read input file print 'loading file: ' + infile data = readfile.read(infile)[0] # Write output file - data print 'writing >>> ' + outfile h5 = h5py.File(outfile, 'w') group = h5.create_group(file_type) dset = group.create_dataset(file_type, data=data, compression='gzip') # Write output file - attributes for key, value in atr.iteritems(): group.attrs[key] = value try: group.attrs['PROJECT_NAME'] = extra_meta_dict['project_name'] except: pass key = 'INSAR_PROCESSOR' if key not in atr.keys(): try: atr[key] = extra_meta_dict['insar_processor'] except: pass h5.close() #if (os.path.abspath(infile) != os.path.abspath(outfile) and \ # os.path.dirname(infile) == os.path.dirname(outfile)): # print 'remove the duplicated, obsolete '+atr['FILE_TYPE']+' file in the same directory' # rmCmd = 'rm '+infile # print rmCmd # os.system(rmCmd) return outfile
def copy_file(targetFile, destDir): '''Copy file and its .rsc/.par/.xml file to destination directory.''' #print '--------------------------------------------' destFile = destDir+'/'+os.path.basename(targetFile) if ut.update_file(destFile, targetFile): print 'copy '+targetFile+' to '+destDir shutil.copy2(targetFile, destDir) try: shutil.copy2(targetFile+'.rsc', destDir) except: pass try: shutil.copy2(targetFile+'.xml', destDir) except: pass try: shutil.copy2(targetFile+'.par', destDir) except: pass return destFile
def main(argv): inps = cmdLineParse() if inps.template_file: inps = read_template2inps(inps.template_file) ##### calculate timeseries of residual Root Mean Square #std_list, date_list = ut.get_residual_std(inps.timeseries_file, inps.mask_file, inps.ramp_type) rms_list, date_list = ut.get_residual_rms(inps.timeseries_file, inps.mask_file, inps.ramp_type) ##### reference_date.txt print '------------------------------------------------------------' ref_idx = np.argmin(rms_list) ref_date = date_list[ref_idx] print 'date with minimum residual RMS: %s - %.4f' % (ref_date, rms_list[ref_idx]) refTxtFile = 'reference_date.txt' if (inps.save_reference_date and \ ut.update_file(refTxtFile, [inps.timeseries_file, inps.mask_file, inps.template_file],\ check_readable=False)): f = open(refTxtFile, 'w') f.write(ref_date + '\n') f.close() print 'save date to file: ' + refTxtFile ##### exclude_date.txt print '------------------------------------------------------------' ex_idx_list = [rms_list.index(i) for i in rms_list if i > inps.min_rms] print 'date(s) with residual RMS > ' + str(inps.min_rms) exTxtFile = 'exclude_date.txt' if ex_idx_list: if (inps.save_exclude_date and \ ut.update_file(exTxtFile, [inps.timeseries_file, inps.mask_file, inps.template_file],\ check_readable=False)): f = open(exTxtFile, 'w') for i in ex_idx_list: print '%s - %.4f' % (date_list[i], rms_list[i]) f.write(date_list[i] + '\n') f.close() print 'save date(s) to file: ' + exTxtFile else: print 'None.' ##### Plot fig_name = os.path.dirname(os.path.abspath(inps.timeseries_file))+\ '/rms_'+os.path.splitext(inps.timeseries_file)[0] if inps.ramp_type != 'no': fig_name += '_' + inps.ramp_type fig_name += '.pdf' if ut.update_file(fig_name, [exTxtFile, refTxtFile, inps.template_file], check_readable=False): if inps.fig_size: fig = plt.figure(figsize=inps.fig_size) else: fig = plt.figure() ax = fig.add_subplot(111) font_size = 12 dates, datevector = ptime.date_list2vector(date_list) try: bar_width = ut.mode(np.diff(dates).tolist()) * 3 / 4 except: bar_width = np.min(np.diff(dates).tolist()) * 3 / 4 x_list = [i - bar_width / 2 for i in dates] rms_list = [i * 1000. for i in rms_list] min_rms = inps.min_rms * 1000. # Plot all dates ax.bar(x_list, rms_list, bar_width.days) #ax.bar(x_list, rms_list, bar_width.days) # Plot reference date #if inps.save_reference_date: ax.bar(x_list[ref_idx], rms_list[ref_idx], bar_width.days, label='Reference date') # Plot exclude dates #if ex_idx_list and inps.save_exclude_date: if ex_idx_list: ex_x_list = [x_list[i] for i in ex_idx_list] ex_rms_list = [rms_list[i] for i in ex_idx_list] ax.bar(ex_x_list, ex_rms_list, bar_width.days, color='darkgray', label='Exclude date(s)') # Plot min_rms line ax, xmin, xmax = ptime.auto_adjust_xaxis_date( ax, datevector, font_size, every_year=inps.tick_year_num) ax.plot(np.array([xmin, xmax]), np.array([min_rms, min_rms]), '--k') # axis format ax = pnet.auto_adjust_yaxis(ax, rms_list + [min_rms], font_size, ymin=0.0) ax.set_xlabel('Time [years]', fontsize=font_size) ax.set_ylabel('Root Mean Square [mm]', fontsize=font_size) ax.yaxis.set_ticks_position('both') ax.tick_params(labelsize=font_size) if inps.save_reference_date or inps.save_exclude_date: plt.legend(fontsize=font_size) # save figure fig.savefig(fig_name, bbox_inches='tight', transparent=True) print 'save figure to file: ' + fig_name return
def ifgram_inversion(ifgramFile='unwrapIfgram.h5', coherenceFile='coherence.h5', meta=None): '''Implementation of the SBAS algorithm. modified from sbas.py written by scott baker, 2012 Inputs: ifgramFile - string, HDF5 file name of the interferograms coherenceFile - string, HDF5 file name of the coherence meta - dict, including the following options: weight_function chunk_size - float, max number of data (ifgram_num*row_num*col_num) to read per loop; to control the memory Output: timeseriesFile - string, HDF5 file name of the output timeseries tempCohFile - string, HDF5 file name of temporal coherence Example: meta = dict() meta['weight_function'] = 'variance' meta['chunk_size'] = 0.5e9 meta['timeseriesFile'] = 'timeseries_var.h5' meta['tempCohFile'] = 'temporalCoherence_var.h5' ifgram_inversion('unwrapIfgram.h5', 'coherence.h5', meta) ''' if 'tempCohFile' not in meta.keys(): meta['tempCohFile'] = 'temporalCoherence.h5' meta['timeseriesStdFile'] = 'timeseriesDecorStd.h5' total = time.time() if not meta: meta = vars(cmdLineParse()) if meta['update_mode'] and not ut.update_file(meta['timeseriesFile'], ifgramFile): return meta['timeseriesFile'], meta['tempCohFile'] ##### Basic Info # length/width atr = readfile.read_attribute(ifgramFile) length = int(atr['FILE_LENGTH']) width = int(atr['WIDTH']) meta['length'] = length meta['width'] = width # ifgram_list h5ifgram = h5py.File(ifgramFile,'r') ifgram_list = sorted(h5ifgram['interferograms'].keys()) #if meta['weight_function'] in ['no','uniform']: # ifgram_list = ut.check_drop_ifgram(h5ifgram) ifgram_list = ut.check_drop_ifgram(h5ifgram) meta['ifgram_list'] = ifgram_list ifgram_num = len(ifgram_list) # date12_list/date8_list/tbase_diff date12_list = ptime.list_ifgram2date12(ifgram_list) m_dates = [i.split('-')[0] for i in date12_list] s_dates = [i.split('-')[1] for i in date12_list] date8_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates)))) date_num = len(date8_list) meta['date8_list'] = date8_list meta['date12_list'] = date12_list tbase_list = ptime.date_list2tbase(date8_list)[0] tbase_diff = np.diff(tbase_list).reshape((-1,1)) meta['tbase_diff'] = tbase_diff print 'number of interferograms: %d' % (ifgram_num) print 'number of acquisitions : %d' % (date_num) print 'number of columns: %d' % (width) print 'number of lines : %d' % (length) ##### ref_y/x/value try: ref_x = int(atr['ref_x']) ref_y = int(atr['ref_y']) print 'reference pixel in y/x: [%d, %d]' % (ref_y, ref_x) ref_value = np.zeros((ifgram_num,1), np.float32) for j in range(ifgram_num): ifgram = ifgram_list[j] dset = h5ifgram['interferograms'][ifgram].get(ifgram) ref_value[j] = dset[ref_y,ref_x] meta['ref_y'] = ref_y meta['ref_x'] = ref_x meta['ref_value'] = ref_value except: if meta['skip_ref']: meta['ref_value'] = 0.0 print 'skip checking reference pixel info - This is for SIMULATION ONLY.' else: print 'ERROR: No ref_x/y found! Can not invert interferograms without reference in space.' print 'run seed_data.py '+ifgramFile+' --mark-attribute for a quick referencing.' sys.exit(1) h5ifgram.close() ##### Rank of Design matrix for weighted inversion A, B = ut.design_matrix(ifgramFile, date12_list) print '-------------------------------------------------------------------------------' if meta['weight_function'] in ['no','uniform']: print 'generic least square inversion with min-norm phase velocity' print ' based on Berardino et al. (2002, IEEE-TGRS)' print ' OLS for pixels with fully connected network' print ' SVD for pixels with partially connected network' if np.linalg.matrix_rank(A) < date_num-1: print 'WARNING: singular design matrix! Inversion result can be biased!' print 'continue using its SVD solution on all pixels' else: print 'weighted least square (WLS) inversion with min-norm phase, pixelwise' if np.linalg.matrix_rank(A) < date_num-1: print 'ERROR: singular design matrix!' print ' Input network of interferograms is not fully connected!' print ' Can not invert the weighted least square solution.' print 'You could try:' print ' 1) Add more interferograms to make the network fully connected:' print ' a.k.a., no multiple subsets nor network islands' print " 2) Use '-w no' option for non-weighted SVD solution." sys.exit(-1) print '-------------------------------------------------------------------------------' ##### Invert time-series phase ##Check parallel environment if meta['weight_function'] in ['no','uniform']: meta['parallel'] = False if meta['parallel']: num_cores, meta['parallel'], Parallel, delayed = ut.check_parallel(1000, print_msg=False) ##Split into chunks to reduce memory usage r_step = meta['chunk_size']/ifgram_num/width #split in lines if meta['weight_function'] not in ['no','uniform']: #more memory usage (coherence) for WLS r_step /= 2.0 if meta['parallel']: r_step /= num_cores r_step = int(ceil_to_1(r_step)) meta['row_step'] = r_step chunk_num = int((length-1)/r_step)+1 if chunk_num > 1: print 'maximum chunk size: %.1E' % (meta['chunk_size']) print 'split %d lines into %d patches for processing' % (length, chunk_num) print ' with each patch up to %d lines' % (r_step) if meta['parallel']: print 'parallel processing using %d cores ...' % (min([num_cores,chunk_num])) ##Computing the inversion box_list = [] for i in range(chunk_num): r0 = i*r_step r1 = min([length, r0+r_step]) box = (0,r0,width,r1) box_list.append(box) box_num = len(box_list) if not meta['parallel']: timeseries = np.zeros((date_num, length, width), np.float32) timeseriesStd = np.zeros((date_num, length, width), np.float32) tempCoh = np.zeros((length, width), np.float32) for i in range(box_num): if box_num > 1: print '\n------- Processing Patch %d out of %d --------------' % (i+1, box_num) box = box_list[i] ts, tcoh, tsStd = ifgram_inversion_patch(ifgramFile, coherenceFile, meta, box) tempCoh[box[1]:box[3],box[0]:box[2]] = tcoh timeseries[:,box[1]:box[3],box[0]:box[2]] = ts timeseriesStd[:,box[1]:box[3],box[0]:box[2]] = tsStd else: ##Temp file list meta['ftemp_base'] = 'timeseries_temp_' temp_file_list = [meta['ftemp_base']+str(i)+'.h5' for i in range(chunk_num)] ##Computation Parallel(n_jobs=num_cores)(delayed(ifgram_inversion_patch)\ (ifgramFile, coherenceFile, meta, box) for box in box_list) ##Concatenate temp files print 'concatenating temporary timeseries files ...' timeseries = np.zeros((date_num, length, width), np.float32) tempCoh = np.zeros((length, width), np.float32) rmCmd = 'rm' for i in range(chunk_num): fname = temp_file_list[i] box = box_list[i] print 'reading '+fname h5temp = h5py.File(fname, 'r') dset = h5temp['timeseries'].get('timeseries') timeseries[:,box[1]:box[3],box[0]:box[2]] = dset[0:-1,:,:] tempCoh[box[1]:box[3],box[0]:box[2]] = dset[-1,:,:] h5temp.close() rmCmd += ' '+fname print rmCmd os.system(rmCmd) print 'converting phase to range' phase2range = -1*float(atr['WAVELENGTH'])/(4.*np.pi) timeseries *= phase2range timeseriesStd *= abs(phase2range) ##### Calculate time-series attributes print 'calculating perpendicular baseline timeseries' pbase, pbase_top, pbase_bottom = ut.perp_baseline_ifgram2timeseries(ifgramFile, ifgram_list) pbase = str(pbase.tolist()).translate(None,'[],') # convert np.array into string separated by white space pbase_top = str(pbase_top.tolist()).translate(None,'[],') pbase_bottom = str(pbase_bottom.tolist()).translate(None,'[],') atr['P_BASELINE_TIMESERIES'] = pbase atr['P_BASELINE_TOP_TIMESERIES'] = pbase_top atr['P_BASELINE_BOTTOM_TIMESERIES'] = pbase_bottom atr['ref_date'] = date8_list[0] atr['FILE_TYPE'] = 'timeseries' atr['UNIT'] = 'm' ##### Output ## 1. Write time-series file meta['timeseriesFile'] = write_timeseries_hdf5_file(timeseries, date8_list, atr,\ timeseriesFile=meta['timeseriesFile']) if not np.all(timeseriesStd == 0.): meta['timeseriesStdFile'] = write_timeseries_hdf5_file(timeseriesStd, date8_list, atr,\ timeseriesFile=meta['timeseriesStdFile']) ## 2. Write Temporal Coherence File print 'writing >>> '+meta['tempCohFile'] atr['FILE_TYPE'] = 'temporal_coherence' atr['UNIT'] = '1' meta['tempCohFile'] = writefile.write(tempCoh, atr, meta['tempCohFile']) print 'Time series inversion took ' + str(time.time()-total) +' secs\nDone.' return meta['timeseriesFile'], meta['tempCohFile']