def main(iargs=None): inps = cmd_line_parse(iargs) if inps.reset: print('--------------------------------------------------') reset_network(inps.file) return inps.file inps.date12_to_drop = get_date12_to_drop(inps) if inps.date12_to_drop is not None: ifgramStack(inps.file).update_drop_ifgram(date12List_to_drop=inps.date12_to_drop) #print('--------------------------------------------------') #ut.nonzero_mask(inps.file) #print('--------------------------------------------------') #ut.temporal_average(inps.file, datasetName='coherence', updateMode=True) # Touch spatial average txt file of coherence if it's existed ut.touch(os.path.splitext(os.path.basename(inps.file))[0]+'_coherence_spatialAvg.txt') # Plot result if inps.plot: print('\nplot modified network and save to file.') plotCmd = 'plot_network.py {} --nodisplay'.format(inps.file) if inps.template_file: plotCmd += ' --template {}'.format(inps.template_file) print(plotCmd) os.system(plotCmd) print('Done.') return
def get_date12_list(fname, dropIfgram=False): """Read Date12 info from input file: Pairs.list or multi-group hdf5 file Inputs: fname - string, path/name of input multi-group hdf5 file or text file dropIfgram - bool, check the "DROP_IFGRAM" attribute or not for multi-group hdf5 file Output: date12_list - list of string in YYMMDD-YYMMDD format Example: date12List = get_date12_list('ifgramStack.h5') date12List = get_date12_list('ifgramStack.h5', dropIfgram=True) date12List = get_date12_list('Pairs.list') """ date12_list = [] ext = os.path.splitext(fname)[1].lower() if ext == '.h5': k = readfile.read_attribute(fname)['FILE_TYPE'] if k == 'ifgramStack': date12_list = ifgramStack(fname).get_date12_list(dropIfgram=dropIfgram) else: return None else: txtContent = np.loadtxt(fname, dtype=bytes).astype(str) if len(txtContent.shape) == 1: txtContent = txtContent.reshape(-1, 1) date12_list = [i for i in txtContent[:, 0]] date12_list = sorted(date12_list) return date12_list
def timeseries2ifgram(ts_file, ifgram_file, out_file='reconUnwrapIfgram.h5'): # read time-series atr = readfile.read_attribute(ts_file) range2phase = -4.*np.pi / float(atr['WAVELENGTH']) print('reading timeseries data from file {} ...'.format(ts_file)) ts_data = readfile.read(ts_file)[0] * range2phase num_date, length, width = ts_data.shape ts_data = ts_data.reshape(num_date, -1) # reconstruct unwrapPhase print('reconstructing the interferograms from timeseries') stack_obj = ifgramStack(ifgram_file) stack_obj.open(print_msg=False) A1 = stack_obj.get_design_matrix4timeseries(stack_obj.get_date12_list(dropIfgram=False))[0] num_ifgram = A1.shape[0] A0 = -1.*np.ones((num_ifgram, 1)) A = np.hstack((A0, A1)) ifgram_est = np.dot(A, ts_data).reshape(num_ifgram, length, width) ifgram_est = np.array(ifgram_est, dtype=ts_data.dtype) del ts_data # write to ifgram file dsDict = {} dsDict['unwrapPhase'] = ifgram_est writefile.write(dsDict, out_file=out_file, ref_file=ifgram_file) return ifgram_file
def get_giant_ifg_list(fnames): m_date_list = [] s_date_list = [] pbase_list = [] ext = os.path.splitext(fnames[0])[1] if ext == '.h5': obj = ifgramStack(fnames[0]) obj.open() m_date_list = obj.mDates[obj.dropIfgram].tolist() s_date_list = obj.sDates[obj.dropIfgram].tolist() pbase_list = obj.pbaseIfgram[obj.dropIfgram].tolist() else: ifgramNum = len(fnames) print('Number of interferograms: %d' % (ifgramNum)) for fname in fnames: atr = readfile.read_attribute(fname) m_date, s_date = ptime.yymmdd(atr['DATE12'].split('-')) pbase = (float(atr['P_BASELINE_TOP_HDR']) + float(atr['P_BASELINE_BOTTOM_HDR'])) / 2. m_date_list.append(m_date) s_date_list.append(s_date) pbase_list.append(pbase) return m_date_list, s_date_list, pbase_list
def get_number_of_nonzero_closure_phase(ifgram_file, dsName='unwrapPhase', step=100): # read ifgramStack file stack_obj = ifgramStack(ifgram_file) stack_obj.open() length, width = stack_obj.length, stack_obj.width date12_list = stack_obj.get_date12_list(dropIfgram=True) num_ifgram = len(date12_list) C = stack_obj.get_design_matrix4triplet(date12_list) ref_phase = stack_obj.get_reference_phase(unwDatasetName=dsName, dropIfgram=True).reshape(num_ifgram, -1) # calculate number of nonzero closure phase closure_int = np.zeros((length, width), np.int16) num_loop = int(np.ceil(length / step)) prog_bar = ptime.progressBar(maxValue=num_loop) for i in range(num_loop): r0 = i * step r1 = min((r0+step), stack_obj.length) box = (0, r0, stack_obj.width, r1) unw = ifginv.read_unwrap_phase(stack_obj, box=box, ref_phase=ref_phase, unwDatasetName=dsName, dropIfgram=True, print_msg=False).reshape(num_ifgram, -1) closure_pha = np.dot(C, unw) cint = np.round((closure_pha - ut.wrap(closure_pha)) / (2.*np.pi)) closure_int[r0:r1, :] = np.sum(cint != 0, axis=0).reshape(-1, width) prog_bar.update(i+1, every=1) prog_bar.close() return closure_int
def calculate_temporal_coherence(ifgram_file, timeseries_file, ifg_num_file=None, chunk_size=100e6): """Calculate temporal coherence based on input timeseries file and interferograms file Parameters: ifgram_file : str, path of interferograms file timeseries_file : str, path of time series file ifg_num_file : str, path of file for number of interferograms used in inversion. chunk_size : float Returns: temp_coh : 2D np.array, temporal coherence in float32 """ # get box list and size info box_list = ifginv.split_into_boxes(ifgram_file, chunk_size=chunk_size) num_box = len(box_list) stack_shape = ifgramStack(ifgram_file).get_size() temp_coh = np.zeros(stack_shape[1:3], np.float32) for i in range(num_box): if num_box > 1: print('\n------- Processing Patch %d out of %d --------------' % (i+1, num_box)) box = box_list[i] temp_cohi = calculate_temporal_coherence_patch(ifgram_file, timeseries_file, box=box, ifg_num_file=ifg_num_file) temp_coh[box[1]:box[3], box[0]:box[2]] = temp_cohi return temp_coh
def update_object(outFile, inObj, box, updateMode=True): """Do not write h5 file if: 1) h5 exists and readable, 2) it contains all date12 from ifgramStackDict, or all datasets from geometryDict""" write_flag = True if updateMode and ut.run_or_skip(outFile, check_readable=True) == 'skip': if inObj.name == 'ifgramStack': in_size = inObj.get_size(box=box)[1:] in_date12_list = inObj.get_date12_list() outObj = ifgramStack(outFile) out_size = outObj.get_size()[1:] out_date12_list = outObj.get_date12_list(dropIfgram=False) if out_size == in_size and set(in_date12_list).issubset(set(out_date12_list)): print(('All date12 exists in file {} with same size as required,' ' no need to re-load.'.format(os.path.basename(outFile)))) write_flag = False elif inObj.name == 'geometry': outObj = geometry(outFile) outObj.open(print_msg=False) if (outObj.get_size() == inObj.get_size(box=box) and all(i in outObj.datasetNames for i in inObj.get_dataset_list())): print(('All datasets exists in file {} with same size as required,' ' no need to re-load.'.format(os.path.basename(outFile)))) write_flag = False return write_flag
def read_network_info(inps): k = readfile.read_attribute(inps.ifgram_file)['FILE_TYPE'] if k != 'ifgramStack': raise ValueError('input file {} is not ifgramStack: {}'.format(inps.ifgram_file, k)) obj = ifgramStack(inps.ifgram_file) obj.open(print_msg=inps.print_msg) inps.date12_list = obj.get_date12_list(dropIfgram=False) date12_kept = obj.get_date12_list(dropIfgram=True) inps.ex_date12_list = sorted(list(set(inps.date12_list) - set(date12_kept))) inps.date_list = obj.get_date_list(dropIfgram=False) vprint('number of all interferograms: {}'.format(len(inps.date12_list))) vprint('number of dropped interferograms: {}'.format(len(inps.ex_date12_list))) vprint('number of kept interferograms: {}'.format(len(inps.date12_list) - len(inps.ex_date12_list))) vprint('number of acquisitions: {}'.format(len(inps.date_list))) if inps.lalo: if not inps.lookup_file: lookup_file = os.path.join(os.path.dirname(inps.ifgram_file), 'geometry*.h5') inps.lookup_file = ut.get_lookup_file(filePattern=lookup_file) coord = ut.coordinate(obj.metadata, lookup_file=inps.lookup_file) inps.yx = coord.geo2radar(inps.lalo[0], inps.lalo[1])[0:2] if not inps.yx: inps.yx = (obj.refY, obj.refX) vprint('plot initial coherence matrix at reference pixel: {}'.format(inps.yx)) return inps
def reset_network(stackFile): """Reset/restore all pairs within the input file by set all DROP_IFGRAM=no""" print("reset dataset 'dropIfgram' to True for all interferograms for file: "+stackFile) obj = ifgramStack(stackFile) obj.open(print_msg=False) if np.all(obj.dropIfgram): print('All dropIfgram are already True, no need to reset.') else: with h5py.File(stackFile, 'r+') as f: f['dropIfgram'][:] = True ut.touch(os.path.splitext(os.path.basename(stackFile))[0]+'_coherence_spatialAvg.txt') return stackFile
def nonzero_mask(File, out_file='maskConnComp.h5', datasetName=None): """Generate mask file for non-zero value of input multi-group hdf5 file""" atr = readfile.read_attribute(File) k = atr['FILE_TYPE'] if k == 'ifgramStack': mask = ifgramStack(File).nonzero_mask(datasetName=datasetName) else: print('Only ifgramStack file is supported for now, input is '+k) return None atr['FILE_TYPE'] = 'mask' writefile.write(mask, out_file=out_file, metadata=atr) return out_file
def reset_network(stackFile): """Reset/restore all pairs within the input file by set all DROP_IFGRAM=no""" print( "reset dataset 'dropIfgram' to True for all interferograms for file: " + stackFile) obj = ifgramStack(stackFile) obj.open(print_msg=False) if np.all(obj.dropIfgram): print('All dropIfgram are already True, no need to reset.') else: with h5py.File(stackFile, 'r+') as f: f['dropIfgram'][:] = True ut.touch('coherenceSpatialAvg.txt') return stackFile
def manual_select_pairs_to_remove(stackFile): """Manually select interferograms to remove""" print('\n-------------------------------------------------------------') print('Manually select interferograms to remove') print('1) click two dates/points to select one pair of interferogram') print('2) repeat until you select all pairs you would like to remove') print('3) close the figure to continue the program ...') print('-------------------------------------------------------------\n') obj = ifgramStack(stackFile) obj.open() date12ListAll = obj.date12List pbase = obj.get_perp_baseline_timeseries(dropIfgram=False) dateList = obj.dateList datesNum = mdates.date2num(np.array(ptime.date_list2vector(dateList)[0])) date12ListKept = obj.get_date12_list(dropIfgram=True) date12ListDropped = sorted(list(set(date12ListAll) - set(date12ListKept))) # Display the network fig = plt.figure() ax = fig.add_subplot(111) ax = pp.plot_network(ax, date12ListAll, dateList, list(pbase), date12List_drop=date12ListDropped) print('display the network of interferogram of file: '+stackFile) date_click = [] date12_click = [] def onclick(event): idx = nearest_neighbor(event.xdata, event.ydata, datesNum, pbase) print('click at '+dateList[idx]) date_click.append(dateList[idx]) if len(date_click) % 2 == 0 and date_click[-2] != date_click[-1]: [mDate, sDate] = sorted(date_click[-2:]) mIdx = dateList.index(mDate) sIdx = dateList.index(sDate) date12 = mDate+'_'+sDate if date12 in date12ListAll: print('select date12: '+date12) date12_click.append(date12) ax.plot([datesNum[mIdx], datesNum[sIdx]], [pbase[mIdx], pbase[sIdx]], 'r', lw=4) else: print(date12+' is not existed in input file') plt.draw() cid = fig.canvas.mpl_connect('button_press_event', onclick) plt.show() if not ut.yes_or_no('Proceed to drop the ifgrams/date12?'): date12_click = None return date12_click
def run_or_skip(inps, dsNameDict, out_file): flag = 'run' # check 1 - update mode status if not inps.updateMode: return flag # check 2 - output file existance if ut.run_or_skip(out_file, check_readable=True) == 'run': return flag # check 3 - output dataset info in_size = (inps.length, inps.width) if 'unwrapPhase' in dsNameDict.keys(): # compare date12 and size ds = gdal.Open(inps.unwFile, gdal.GA_ReadOnly) in_date12_list = [ ds.GetRasterBand(i + 1).GetMetadata("unwrappedPhase")['Dates'] for i in range(inps.num_pair) ] in_date12_list = ['_'.join(d.split('_')[::-1]) for d in in_date12_list] out_obj = ifgramStack(out_file) out_obj.open(print_msg=False) out_size = (out_obj.length, out_obj.width) out_date12_list = out_obj.get_date12_list(dropIfgram=False) if out_size == in_size and set(in_date12_list).issubset( set(out_date12_list)): print(('All date12 exists in file {} with same size as required,' ' no need to re-load.'.format(os.path.basename(out_file)))) flag = 'skip' elif 'height' in dsNameDict.keys(): # compare dataset names and size in_dsNames = list(dsNameDict.keys()) out_obj = geometry(out_file) out_obj.open(print_msg=False) out_size = (out_obj.length, out_obj.width) out_dsNames = out_obj.datasetNames if out_size == in_size and set(in_dsNames).issubset(set(out_dsNames)): print(('All datasets exists in file {} with same size as required,' ' no need to re-load.'.format(os.path.basename(out_file)))) flag = 'skip' return flag
def get_nonzero_phase_closure(ifgram_file, out_file=None, thres=0.1, unwDatasetName='unwrapPhase'): """Calculate/Read number of non-zero phase closure Parameters: ifgram_file : string, path of ifgram stack file out_file : string, path of num non-zero phase closure file Returns: num_nonzero_closure : 2D np.array in size of (length, width) """ if not out_file: out_file = 'numNonzeroPhaseClosure_{}.h5'.format(unwDatasetName) if os.path.isfile(out_file) and readfile.read_attribute(out_file): print('1. read number of nonzero phase closure from file: {}'.format(out_file)) num_nonzero_closure = readfile.read(out_file)[0] else: obj = ifgramStack(ifgram_file) obj.open(print_msg=False) length, width = obj.length, obj.width ref_phase = obj.get_reference_phase(unwDatasetName=unwDatasetName, dropIfgram=False) C = obj.get_design_matrix4triplet(obj.get_date12_list(dropIfgram=False)) # calculate phase closure line by line to save memory usage num_nonzero_closure = np.zeros((length, width), np.float32) print('1. calculating phase closure of all pixels from dataset - {} ...'.format(unwDatasetName)) line_step = 10 num_loop = int(np.ceil(length / line_step)) prog_bar = ptime.progressBar(maxValue=num_loop) for i in range(num_loop): # read phase i0, i1 = i*line_step, min(length, (i+1)*line_step) box = (0, i0, width, i1) pha_data = ifginv.read_unwrap_phase(obj, box, ref_phase, unwDatasetName=unwDatasetName, dropIfgram=False, print_msg=False) # calculate phase closure pha_closure = np.dot(C, pha_data) pha_closure = np.abs(pha_closure - ut.wrap(pha_closure)) # get number of non-zero phase closure num_nonzero = np.sum(pha_closure >= thres, axis=0) num_nonzero_closure[i0:i1, :] = num_nonzero.reshape(i1-i0, width) prog_bar.update(i+1, every=1, suffix='{}/{} lines'.format((i+1)*line_step, length)) prog_bar.close() atr = dict(obj.metadata) atr['FILE_TYPE'] = 'mask' atr['UNIT'] = 1 writefile.write(num_nonzero_closure, out_file=out_file, metadata=atr) return num_nonzero_closure
def run_or_skip(inps): print('-' * 50) print('update mode: ON') flag = 'skip' # check output files vs input dataset if not all(os.path.isfile(i) for i in inps.outfile): flag = 'run' print('1) NOT ALL output files found: {}.'.format(inps.outfile)) else: print('1) output files already exist: {}.'.format(inps.outfile)) with h5py.File(inps.ifgramStackFile, 'r') as f: ti = float(f[inps.obsDatasetName].attrs.get( 'MODIFICATION_TIME', os.path.getmtime(inps.ifgramStackFile))) to = min(os.path.getmtime(i) for i in inps.outfile) if ti > to: flag = 'run' print( '2) output files are NOT newer than input dataset: {}.'.format( inps.obsDatasetName)) else: print('2) output dataset is newer than input dataset: {}.'.format( inps.obsDatasetName)) # check configuration if flag == 'skip': meta_keys = ['REF_Y', 'REF_X'] atr_ifg = readfile.read_attribute(inps.ifgramStackFile) atr_ts = readfile.read_attribute(inps.tsFile) inps.numIfgram = len( ifgramStack(inps.ifgramStackFile).get_date12_list(dropIfgram=True)) if any( str(vars(inps)[key]) != atr_ts.get(key_prefix + key, 'None') for key in configKeys): flag = 'run' print('3) NOT all key configration parameters are the same: {}'. format(configKeys)) elif any(atr_ts[key] != atr_ifg[key] for key in meta_keys): flag = 'run' print('3) NOT all the metadata are the same: {}'.format(meta_keys)) else: print( '3) all key configuration parameters are the same: {}.'.format( configKeys)) # result print('run or skip: {}.'.format(flag)) return flag
def calculate_temporal_coherence_patch(ifgram_file, timeseries_file, box=None, ifg_num_file=None): atr = readfile.read_attribute(timeseries_file) if not box: box = (0, 0, int(atr['WIDTH']), int(atr['LENGTH'])) # Read timeseries data ts_obj = timeseries(timeseries_file) ts_obj.open(print_msg=False) print('reading timeseries data from file: {}'.format(timeseries_file)) ts_data = ts_obj.read(box=box, print_msg=False).reshape(ts_obj.numDate, -1) ts_data = ts_data[1:, :] ts_data *= -4*np.pi/float(atr['WAVELENGTH']) # Read ifgram data stack_obj = ifgramStack(ifgram_file) stack_obj.open(print_msg=False) A = stack_obj.get_design_matrix4timeseries(stack_obj.get_date12_list(dropIfgram=True))[0] print('reading unwrapPhase data from file: {}'.format(ifgram_file)) ifgram_data = stack_obj.read(datasetName='unwrapPhase', box=box).reshape(A.shape[0], -1) ref_value = stack_obj.get_reference_phase(dropIfgram=True).reshape((-1, 1)) ifgram_data -= np.tile(ref_value, (1, ifgram_data.shape[1])) ifgram_diff = ifgram_data - np.dot(A, ts_data) del ts_data pixel_num = ifgram_data.shape[1] temp_coh = np.zeros((pixel_num), np.float32) # (fast) nasty solution, which used all phase value including invalid zero phase if not ifg_num_file: temp_coh = np.abs(np.sum(np.exp(1j*ifgram_diff), axis=0)) / ifgram_diff.shape[0] # (slow) same solution as ifgram_inversion.py, considering: # 1) invalid zero phase in ifgram # 2) design matrix rank deficiency. else: print('considering different number of interferograms used in network inversion for each pixel') ifg_num_map = readfile.read(ifg_num_file, box=box)[0].flatten() prog_bar = ptime.progressBar(maxValue=pixel_num) for i in range(pixel_num): if ifg_num_map[i] > 0: idx = ifgram_data[:, i] != 0. temp_diff = ifgram_diff[idx, i] temp_coh[i] = np.abs(np.sum(np.exp(1j*temp_diff), axis=0)) / temp_diff.shape[0] prog_bar.update(i+1, every=1000, suffix='{}/{}'.format(i+1, pixel_num)) prog_bar.close() temp_coh = np.reshape(temp_coh, (box[3]-box[1], box[2]-box[0])) return temp_coh
def write_hdf5_file_patch(ifgram_file, data, box=None, dsName='unwrapPhase_phaseClosure'): """Write a patch of 3D dataset into an existing h5 file. Parameters: ifgram_file : string, name/path of output hdf5 file data : 3D np.array to be written box : tuple of 4 int, indicating of (x0, y0, x1, y1) of data in file dsName : output dataset name Returns: ifgram_file """ num_ifgram, length, width = ifgramStack(ifgram_file).get_size( dropIfgram=False) if not box: box = (0, 0, width, length) num_row = box[3] - box[1] num_col = box[2] - box[0] # write to existing HDF5 file print('open {} with r+ mode'.format(ifgram_file)) f = h5py.File(ifgram_file, 'r+') # get h5py.Dataset msg = 'dataset /{d} of {t:<10} in size of {s}'.format(d=dsName, t=str(data.dtype), s=(num_ifgram, box[3], box[2])) if dsName in f.keys(): print('update ' + msg) ds = f[dsName] else: print('create ' + msg) ds = f.create_dataset(dsName, (num_ifgram, num_row, num_col), maxshape=(None, None, None), chunks=True, compression=None) # resize h5py.Dataset if current size is not enough if ds.shape != (num_ifgram, length, width): ds.resize((num_ifgram, box[3], box[2])) # write data to file ds[:, box[1]:box[3], box[0]:box[2]] = data ds.attrs['MODIFICATION_TIME'] = str(time.time()) f.close() print('close {}'.format(ifgram_file)) return ifgram_file
def write2hdf5_file(ifgram_file, metadata, ts, temp_coh, num_inv_ifg=None, suffix='', inps=None): stack_obj = ifgramStack(ifgram_file) stack_obj.open(print_msg=False) date_list = stack_obj.get_date_list(dropIfgram=True) # File 1 - timeseries.h5 ts_file = '{}{}.h5'.format(suffix, os.path.splitext(inps.outfile[0])[0]) metadata['REF_DATE'] = date_list[0] metadata['FILE_TYPE'] = 'timeseries' metadata['UNIT'] = 'm' print('-' * 50) print('calculating perpendicular baseline timeseries') pbase = stack_obj.get_perp_baseline_timeseries(dropIfgram=True) ts_obj = timeseries(ts_file) ts_obj.write2hdf5(data=ts, dates=date_list, bperp=pbase, metadata=metadata) # File 2 - temporalCoherence.h5 out_file = '{}{}.h5'.format(suffix, os.path.splitext(inps.outfile[1])[0]) metadata['FILE_TYPE'] = 'temporalCoherence' metadata['UNIT'] = '1' print('-' * 50) writefile.write(temp_coh, out_file=out_file, metadata=metadata) ## File 3 - timeseriesDecorStd.h5 #if not np.all(ts_std == 0.): # out_file = 'timeseriesDecorStd{}.h5'.format(suffix) # metadata['FILE_TYPE'] = 'timeseries' # metadata['UNIT'] = 'm' # phase2range = -1*float(stack_obj.metadata['WAVELENGTH'])/(4.*np.pi) # ts_std *= abs(phase2range) # print('-'*50) # writefile.write(ts_std, out_file=out_file, metadata=metadata, ref_file=ts_file) # File 3 - numInvIfgram.h5 out_file = 'numInvIfgram{}.h5'.format(suffix) metadata['FILE_TYPE'] = 'mask' metadata['UNIT'] = '1' print('-' * 50) writefile.write(num_inv_ifg, out_file=out_file, metadata=metadata) return
def print_date_list(fname, disp_ifgram='all', disp_num=False, print_msg=False): """Print time/date info of file""" k = readfile.read_attribute(fname)['FILE_TYPE'] dateList = None if k in ['timeseries']: dateList = timeseries(fname).get_date_list() elif k == 'HDFEOS': dateList = HDFEOS(fname).get_date_list() elif k == 'giantTimeseries': dateList = giantTimeseries(fname).get_date_list() elif k in ['giantIfgramStack']: dateList = giantIfgramStack(fname).get_date12_list() elif k in ['ifgramStack']: obj = ifgramStack(fname) obj.open(print_msg=False) dateListAll = obj.get_date12_list(dropIfgram=False) dateListKept = obj.get_date12_list(dropIfgram=True) # show dropped ifgram or not if disp_ifgram == 'all': dateList = list(dateListAll) elif disp_ifgram == 'kept': dateList = list(dateListKept) else: dateList = sorted(list(set(dateListAll) - set(dateListKept))) else: print('--date option can not be applied to {} file, ignore it.'.format( k)) # print list info if print_msg and dateList is not None: for d in dateList: if disp_num: if k in ['ifgramStack']: num = dateListAll.index(d) else: num = dateList.index(d) msg = '{}\t{}'.format(d, num) else: msg = d print(msg) return dateList
def print_date_list(fname, disp_ifgram='all', disp_num=False, print_msg=False): """Print time/date info of file""" k = readfile.read_attribute(fname)['FILE_TYPE'] dateList = None if k in ['timeseries']: dateList = timeseries(fname).get_date_list() elif k == 'HDFEOS': dateList = HDFEOS(fname).get_date_list() elif k == 'giantTimeseries': dateList = giantTimeseries(fname).get_date_list() elif k in ['giantIfgramStack']: dateList = giantIfgramStack(fname).get_date12_list() elif k in ['ifgramStack']: obj = ifgramStack(fname) obj.open(print_msg=False) dateListAll = obj.get_date12_list(dropIfgram=False) dateListKept = obj.get_date12_list(dropIfgram=True) # show dropped ifgram or not if disp_ifgram == 'all': dateList = list(dateListAll) elif disp_ifgram == 'kept': dateList = list(dateListKept) else: dateList = sorted(list(set(dateListAll) - set(dateListKept))) else: print('--date option can not be applied to {} file, ignore it.'.format(k)) # print list info if print_msg and dateList is not None: for d in dateList: if disp_num: if k in ['ifgramStack']: num = dateListAll.index(d) else: num = dateList.index(d) msg = '{}\t{}'.format(d, num) else: msg = d print(msg) return dateList
def read_input_index_list(idxList, stackFile=None): """Read ['2','3:5','10'] into ['2','3','4','5','10']""" idxListOut = [] for idx in idxList: c = sorted([int(i) for i in idx.split(':')]) if len(c) == 2: idxListOut += list(range(c[0], c[1]+1)) elif len(c) == 1: idxListOut.append(c[0]) else: print('Unrecoganized input: '+idx) idxListOut = sorted(set(idxListOut)) if stackFile: obj = ifgramStack(stackFile) obj.open(print_msg=False) idxListOut = [i for i in idxListOut if i < obj.numIfgram] obj.close(print_msg=False) return idxListOut
def main(iargs=None): inps = cmd_line_parse(iargs) if not inps.datasetNameOut: inps.datasetNameOut = '{}_bridging'.format(inps.datasetNameIn) # update mode checking atr = readfile.read_attribute(inps.ifgram_file) if inps.update and atr['FILE_TYPE'] == 'ifgramStack': obj = ifgramStack(inps.ifgram_file) obj.open(print_msg=False) if inps.datasetNameOut in obj.datasetNames: print(("update mode is enabled AND {} already exists" " skip this step.").format(inps.datasetNameOut)) return inps.ifgram_file start_time = time.time() # get mask_cc from phase closure mask_cc_file = detect_unwrap_error(ifgram_file=inps.ifgram_file, mask_file=inps.maskFile, mask_cc_file='maskConnComp.h5', unwDatasetName=inps.datasetNameIn, cutoff=inps.cutoff) # run bridging bridges = search_bridge(mask_cc_file, radius=inps.bridgePtsRadius) run_unwrap_error_bridge(inps.ifgram_file, mask_cc_file, bridges, dsNameIn=inps.datasetNameIn, dsNameOut=inps.datasetNameOut, ramp_type=inps.ramp) if inps.run_closure: print('') inps.datasetNameIn = inps.datasetNameOut inps.datasetNameOut = '{}_phaseClosure'.format(inps.datasetNameIn) run_unwrap_error_closure(inps, dsNameIn=inps.datasetNameIn, dsNameOut=inps.datasetNameOut, fast_mode=True) m, s = divmod(time.time()-start_time, 60) print('\ntime used: {:02.0f} mins {:02.1f} secs\nDone.'.format(m, s)) return inps.ifgram_file
def main(iargs=None): inps = cmd_line_parse(iargs) if not inps.datasetNameOut: inps.datasetNameOut = '{}_bridging'.format(inps.datasetNameIn) # update mode checking atr = readfile.read_attribute(inps.ifgram_file) if inps.update and atr['FILE_TYPE'] == 'ifgramStack': obj = ifgramStack(inps.ifgram_file) obj.open(print_msg=False) if inps.datasetNameOut in obj.datasetNames: print(("update mode is enabled AND {} already exists" " skip this step.").format(inps.datasetNameOut)) return inps.ifgram_file start_time = time.time() # get mask_cc from phase closure mask_cc_file = detect_unwrap_error(ifgram_file=inps.ifgram_file, mask_file=inps.maskFile, mask_cc_file='maskConnComp.h5', unwDatasetName=inps.datasetNameIn, cutoff=inps.cutoff) # run bridging bridges = search_bridge(mask_cc_file, radius=inps.bridgePtsRadius) run_unwrap_error_bridge(inps.ifgram_file, mask_cc_file, bridges, dsNameIn=inps.datasetNameIn, dsNameOut=inps.datasetNameOut, ramp_type=inps.ramp) if inps.run_closure: print('') inps.datasetNameIn = inps.datasetNameOut inps.datasetNameOut = '{}_phaseClosure'.format(inps.datasetNameIn) run_unwrap_error_closure(inps, dsNameIn=inps.datasetNameIn, dsNameOut=inps.datasetNameOut, fast_mode=True) m, s = divmod(time.time() - start_time, 60) print('\ntime used: {:02.0f} mins {:02.1f} secs\nDone.'.format(m, s)) return inps.ifgram_file
def read_input_index_list(idxList, stackFile=None): """Read ['2','3:5','10'] into ['2','3','4','5','10']""" idxListOut = [] for idx in idxList: c = sorted([int(i) for i in idx.split(':')]) if len(c) == 2: idxListOut += list(range(c[0], c[1] + 1)) elif len(c) == 1: idxListOut.append(c[0]) else: print('Unrecoganized input: ' + idx) idxListOut = sorted(set(idxListOut)) if stackFile: obj = ifgramStack(stackFile) obj.open(print_msg=False) idxListOut = [i for i in idxListOut if i < obj.numIfgram] obj.close(print_msg=False) return idxListOut
def main(argv): inps = cmdLineParse() input_file = inps.input_file Step = inps.step if inps.out_file: OUT = inps.out_file else: OUT = 'datelist_step' + str(inps.step) + '.txt' if os.path.isfile(OUT): print('delete exist output file ...') os.remove(OUT) if 'ifgramStack' in input_file: stack_obj = ifgramStack(ifgram_file) stack_obj.open(print_msg=False) date_list = stack_obj.get_date_list(dropIfgram=False) elif 'timeseries' in input_file: tsobj = timeseries(input_file) date_list = tsobj.get_date_list() #print(date_list) #print(date_list) N = len(date_list) kk = np.arange(0, N, 1) k0 = np.arange(0, N, Step) k1 = k0 #print(k1) #print(len(k1)) if inps.exclude_date: k1 = [i for i in kk if i not in k0] for i in range(len(k1)): #print(i) date0 = date_list[k1[i]] call_str = 'echo ' + date0 + ' >> ' + OUT #print(call_str) os.system(call_str) print('Generate text file done.') sys.exit(1)
def write_hdf5_file_patch(ifgram_file, data, box=None, dsName='unwrapPhase_phaseClosure'): """Write a patch of 3D dataset into an existing h5 file. Parameters: ifgram_file : string, name/path of output hdf5 file data : 3D np.array to be written box : tuple of 4 int, indicating of (x0, y0, x1, y1) of data in file dsName : output dataset name Returns: ifgram_file """ num_ifgram, length, width = ifgramStack(ifgram_file).get_size(dropIfgram=False) if not box: box = (0, 0, width, length) num_row = box[3] - box[1] num_col = box[2] - box[0] # write to existing HDF5 file print('open {} with r+ mode'.format(ifgram_file)) f = h5py.File(ifgram_file, 'r+') # get h5py.Dataset msg = 'dataset /{d} of {t:<10} in size of {s}'.format(d=dsName, t=str(data.dtype), s=(num_ifgram, box[3], box[2])) if dsName in f.keys(): print('update '+msg) ds = f[dsName] else: print('create '+msg) ds = f.create_dataset(dsName, (num_ifgram, num_row, num_col), maxshape=(None, None, None), chunks=True, compression=None) # resize h5py.Dataset if current size is not enough if ds.shape != (num_ifgram, length, width): ds.resize((num_ifgram, box[3], box[2])) # write data to file ds[:, box[1]:box[3], box[0]:box[2]] = data ds.attrs['MODIFICATION_TIME'] = str(time.time()) f.close() print('close {}'.format(ifgram_file)) return ifgram_file
def select_network_candidate(inps): date_list, tbase_list, pbase_list, dop_list = read_baseline_info( baseline_file=inps.baseline_file, reference_file=inps.referenceFile) # Pair selection from reference if inps.referenceFile: log('select initial network from reference file: {}'.format( inps.referenceFile)) stack_obj = ifgramStack(inps.referenceFile) date12_list = stack_obj.get_date12_list(dropIfgram=True) date12_list = ptime.yymmdd_date12(date12_list) # Pais selection from method elif inps.baseline_file: log('select initial network with method: {}'.format(inps.method)) if inps.method == 'all': date12_list = pnet.select_pairs_all(date_list) elif inps.method == 'delaunay': date12_list = pnet.select_pairs_delaunay(date_list, pbase_list, inps.norm) elif inps.method == 'star': date12_list = pnet.select_pairs_star(date_list) elif inps.method == 'sequential': date12_list = pnet.select_pairs_sequential(date_list, inps.connNum) elif inps.method == 'hierarchical': date12_list = pnet.select_pairs_hierarchical( date_list, pbase_list, inps.tempPerpList) elif inps.method == 'mst': date12_list = pnet.select_pairs_mst(date_list, pbase_list) else: raise Exception('Unrecoganized select method: ' + inps.method) log('initial number of interferograms: {}'.format(len(date12_list))) inps.date12_list = date12_list inps.date_list = date_list inps.tbase_list = tbase_list inps.pbase_list = pbase_list inps.dop_list = dop_list return inps
def update_object(outFile, inObj, box, updateMode=True, xstep=1, ystep=1): """Do not write h5 file if: 1) h5 exists and readable, 2) it contains all date12 from ifgramStackDict, or all datasets from geometryDict""" write_flag = True if updateMode and ut.run_or_skip(outFile, check_readable=True) == 'skip': if inObj.name == 'ifgramStack': in_size = inObj.get_size(box=box, xstep=xstep, ystep=ystep)[1:] in_date12_list = inObj.get_date12_list() outObj = ifgramStack(outFile) out_size = outObj.get_size()[1:] out_date12_list = outObj.get_date12_list(dropIfgram=False) if out_size == in_size and set(in_date12_list).issubset( set(out_date12_list)): print(( 'All date12 exists in file {} with same size as required,' ' no need to re-load.'.format(os.path.basename(outFile)))) write_flag = False elif inObj.name == 'geometry': in_size = inObj.get_size(box=box, xstep=xstep, ystep=ystep) in_dset_list = inObj.get_dataset_list() outObj = geometry(outFile) outObj.open(print_msg=False) out_size = outObj.get_size() out_dset_list = outObj.datasetNames if out_size == in_size and set(in_dset_list).issubset( set(out_dset_list)): print(( 'All datasets exists in file {} with same size as required,' ' no need to re-load.'.format(os.path.basename(outFile)))) write_flag = False return write_flag
def split2boxes(ifgram_file, memory_size=4, print_msg=True): """Split into chunks in rows to reduce memory usage Parameters: dataset_shape - tuple of 3 int memory_size - float, max memory to use in GB print_msg - bool Returns: box_list - list of tuple of 4 int num_box - int, number of boxes """ ifg_obj = ifgramStack(ifgram_file) ifg_obj.open(print_msg=False) # 1st dimension size: defo obs (phase / offset) + weight + time-series num_epoch = ifg_obj.numIfgram * 2 + ifg_obj.numDate + 5 length, width = ifg_obj.length, ifg_obj.width # split in lines based on the input memory limit y_step = (memory_size * (1e3**3)) / (num_epoch * width * 4) # calibrate based on experience y_step = int(ut.round_to_1(y_step * 0.6)) num_box = int((length - 1) / y_step) + 1 if print_msg and num_box > 1: print('maximum memory size: %.1E GB' % memory_size) print('split %d lines into %d patches for processing' % (length, num_box)) print(' with each patch up to %d lines' % y_step) # y_step / num_box --> box_list box_list = [] for i in range(num_box): y0 = i * y_step y1 = min([length, y0 + y_step]) box = (0, y0, width, y1) box_list.append(box) return box_list, num_box
def timeseries2ifgram(ts_file, ifgram_file, out_file='reconUnwrapIfgram.h5'): # read time-series atr = readfile.read_attribute(ts_file) range2phase = -4. * np.pi / float(atr['WAVELENGTH']) print('reading timeseries data from file {} ...'.format(ts_file)) ts_data = readfile.read(ts_file)[0] * range2phase num_date, length, width = ts_data.shape ts_data = ts_data.reshape(num_date, -1) # reconstruct unwrapPhase print('reconstructing the interferograms from timeseries') stack_obj = ifgramStack(ifgram_file) stack_obj.open(print_msg=False) date12_list = stack_obj.get_date12_list(dropIfgram=False) A = stack_obj.get_design_matrix4timeseries(date12_list, refDate='no')[0] ifgram_est = np.dot(A, ts_data).reshape(A.shape[0], length, width) ifgram_est = np.array(ifgram_est, dtype=ts_data.dtype) del ts_data # write to ifgram file dsDict = {} dsDict['unwrapPhase'] = ifgram_est writefile.write(dsDict, out_file=out_file, ref_file=ifgram_file) return ifgram_file
def run_unwrap_error_phase_closure(ifgram_file, common_regions, water_mask_file=None, ccName='connectComponent', dsNameIn='unwrapPhase', dsNameOut='unwrapPhase_phaseClosure'): print('-'*50) print('correct unwrapping error in {} with phase closure ...'.format(ifgram_file)) stack_obj = ifgramStack(ifgram_file) stack_obj.open() length, width = stack_obj.length, stack_obj.width ref_y, ref_x = stack_obj.refY, stack_obj.refX date12_list = stack_obj.get_date12_list(dropIfgram=False) num_ifgram = len(date12_list) shape_out = (num_ifgram, length, width) # read water mask if water_mask_file and os.path.isfile(water_mask_file): print('read water mask from file:', water_mask_file) water_mask = readfile.read(water_mask_file)[0] else: water_mask = None # prepare output data writing print('open {} with r+ mode'.format(ifgram_file)) f = h5py.File(ifgram_file, 'r+') print('input dataset:', dsNameIn) print('output dataset:', dsNameOut) if dsNameOut in f.keys(): ds = f[dsNameOut] print('access /{d} of np.float32 in size of {s}'.format(d=dsNameOut, s=shape_out)) else: ds = f.create_dataset(dsNameOut, shape_out, maxshape=(None, None, None), chunks=True, compression=None) print('create /{d} of np.float32 in size of {s}'.format(d=dsNameOut, s=shape_out)) # correct unwrap error ifgram by ifgram prog_bar = ptime.progressBar(maxValue=num_ifgram) for i in range(num_ifgram): date12 = date12_list[i] # read unwrap phase to be updated unw_cor = np.squeeze(f[dsNameIn][i, :, :]).astype(np.float32) unw_cor -= unw_cor[ref_y, ref_x] # update kept interferograms only if stack_obj.dropIfgram[i]: # get local region info from connectComponent cc = np.squeeze(f[ccName][i, :, :]) if water_mask is not None: cc[water_mask == 0] = 0 cc_obj = connectComponent(conncomp=cc, metadata=stack_obj.metadata) cc_obj.label() local_regions = measure.regionprops(cc_obj.labelImg) # matching regions and correct unwrap error idx_common = common_regions[0].date12_list.index(date12) for local_reg in local_regions: local_mask = cc_obj.labelImg == local_reg.label U = 0 for common_reg in common_regions: y = common_reg.sample_coords[:,0] x = common_reg.sample_coords[:,1] if all(local_mask[y, x]): U = common_reg.int_ambiguity[idx_common] break unw_cor[local_mask] += 2. * np.pi * U # write to hdf5 file ds[i, :, :] = unw_cor prog_bar.update(i+1, suffix=date12) prog_bar.close() ds.attrs['MODIFICATION_TIME'] = str(time.time()) f.close() print('close {} file.'.format(ifgram_file)) return ifgram_file
def check_loaded_dataset(work_dir='./', print_msg=True, relpath=False): """Check the result of loading data for the following two rules: 1. file existance 2. file attribute readability Parameters: work_dir : string, MintPy working directory print_msg : bool, print out message Returns: True, if all required files and dataset exist; otherwise, ERROR If True, PROCESS, SLC folder could be removed. stack_file : geom_file : lookup_file : Example: work_dir = os.path.expandvars('./FernandinaSenDT128/mintpy') ut.check_loaded_dataset(work_dir) """ load_complete = True if not work_dir: work_dir = os.getcwd() work_dir = os.path.abspath(work_dir) # 1. interferograms stack file: unwrapPhase, coherence ds_list = ['unwrapPhase', 'rangeOffset', 'azimuthOffset'] flist = [os.path.join(work_dir, 'inputs/ifgramStack.h5')] stack_file = is_file_exist(flist, abspath=True) if stack_file is not None: obj = ifgramStack(stack_file) obj.open(print_msg=False) if all(x not in obj.datasetNames for x in ds_list): msg = 'required dataset is missing in file {}:'.format(stack_file) msg += '\n' + ' OR '.join(ds_list) raise ValueError(msg) # check coherence for phase stack if 'unwrapPhase' in obj.datasetNames and 'coherence' not in obj.datasetNames: print('WARNING: "coherence" is missing in file {}'.format( stack_file)) else: raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), './inputs/ifgramStack.h5') atr = readfile.read_attribute(stack_file) # 2. geom_file: height if 'X_FIRST' in atr.keys(): flist = [os.path.join(work_dir, 'inputs/geometryGeo.h5')] else: flist = [os.path.join(work_dir, 'inputs/geometryRadar.h5')] geom_file = is_file_exist(flist, abspath=True) if geom_file is not None: obj = geometry(geom_file) obj.open(print_msg=False) dname = geometryDatasetNames[0] if dname not in obj.datasetNames: raise ValueError( 'required dataset "{}" is missing in file {}'.format( dname, geom_file)) else: raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), './inputs/geometry*.h5') # 3. lookup_file: latitude,longitude or rangeCoord,azimuthCoord # could be different than geometry file in case of roipac and gamma flist = [os.path.join(work_dir, 'inputs/geometry*.h5')] lookup_file = get_lookup_file(flist, abspath=True, print_msg=print_msg) if 'X_FIRST' not in atr.keys(): if lookup_file is not None: obj = geometry(lookup_file) obj.open(print_msg=False) if atr['PROCESSOR'] in ['isce', 'doris']: dnames = geometryDatasetNames[1:3] elif atr['PROCESSOR'] in ['gamma', 'roipac']: dnames = geometryDatasetNames[3:5] else: raise AttributeError('InSAR processor: {}'.format( atr['PROCESSOR'])) for dname in dnames: if dname not in obj.datasetNames: load_complete = False raise Exception( 'required dataset "{}" is missing in file {}'.format( dname, lookup_file)) else: raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), './inputs/geometry*.h5') else: print("Input data seems to be geocoded. Lookup file not needed.") if relpath: stack_file = os.path.relpath(stack_file) if stack_file else stack_file geom_file = os.path.relpath(geom_file) if geom_file else geom_file lookup_file = os.path.relpath( lookup_file) if lookup_file else lookup_file # print message if print_msg: print(('Loaded dataset are processed by ' 'InSAR software: {}'.format(atr['PROCESSOR']))) if 'X_FIRST' in atr.keys(): print('Loaded dataset is in GEO coordinates') else: print('Loaded dataset is in RADAR coordinates') print('Interferograms Stack: {}'.format(stack_file)) print('Geometry File : {}'.format(geom_file)) print('Lookup Table File : {}'.format(lookup_file)) if load_complete: print('-' * 50) print( 'All data needed found/loaded/copied. Processed 2-pass InSAR data can be removed.' ) print('-' * 50) return load_complete, stack_file, geom_file, lookup_file
def get_common_region_int_ambiguity(ifgram_file, cc_mask_file, water_mask_file=None, num_sample=100, dsNameIn='unwrapPhase'): """Solve the phase unwrapping integer ambiguity for the common regions among all interferograms Parameters: ifgram_file : str, path of interferogram stack file cc_mask_file : str, path of common connected components file water_mask_file : str, path of water mask file num_sample : int, number of pixel sampled for each region dsNameIn : str, dataset name of the unwrap phase to be corrected Returns: common_regions : list of skimage.measure._regionprops._RegionProperties object modified by adding two more variables: sample_coords : 2D np.ndarray in size of (num_sample, 2) in int64 format int_ambiguity : 1D np.ndarray in size of (num_ifgram,) in int format """ print('-' * 50) print( 'calculating the integer ambiguity for the common regions defined in', cc_mask_file) # stack info stack_obj = ifgramStack(ifgram_file) stack_obj.open() date12_list = stack_obj.get_date12_list(dropIfgram=True) num_ifgram = len(date12_list) C = matrix( ifgramStack.get_design_matrix4triplet(date12_list).astype(float)) ref_phase = stack_obj.get_reference_phase(unwDatasetName=dsNameIn, dropIfgram=True).reshape( num_ifgram, -1) # prepare common label print('read common mask from', cc_mask_file) cc_mask = readfile.read(cc_mask_file)[0] if water_mask_file is not None and os.path.isfile(water_mask_file): water_mask = readfile.read(water_mask_file)[0] print('refine common mask based on water mask file', water_mask_file) cc_mask[water_mask == 0] = 0 label_img, num_label = connectComponent.get_large_label(cc_mask, min_area=2.5e3, print_msg=True) common_regions = measure.regionprops(label_img) print('number of common regions:', num_label) # add sample_coords / int_ambiguity print('number of samples per region:', num_sample) print('solving the phase-unwrapping integer ambiguity for {}'.format( dsNameIn)) print( '\tbased on the closure phase of interferograms triplets (Yunjun et al., 2019)' ) print( '\tusing the L1-norm regularzed least squares approximation (LASSO) ...' ) for i in range(num_label): common_reg = common_regions[i] # sample_coords idx = sorted( np.random.choice(common_reg.area, num_sample, replace=False)) common_reg.sample_coords = common_reg.coords[idx, :].astype(int) # solve for int_ambiguity U = np.zeros((num_ifgram, num_sample)) if common_reg.label == label_img[stack_obj.refY, stack_obj.refX]: print('{}/{} skip calculation for the reference region'.format( i + 1, num_label)) else: prog_bar = ptime.progressBar(maxValue=num_sample, prefix='{}/{}'.format( i + 1, num_label)) for j in range(num_sample): # read unwrap phase y, x = common_reg.sample_coords[j, :] unw = ifginv.read_unwrap_phase(stack_obj, box=(x, y, x + 1, y + 1), ref_phase=ref_phase, unwDatasetName=dsNameIn, dropIfgram=True, print_msg=False).reshape( num_ifgram, -1) # calculate closure_int closure_pha = np.dot(C, unw) closure_int = matrix( np.round( (closure_pha - ut.wrap(closure_pha)) / (2. * np.pi))) # solve for U U[:, j] = np.round( l1regls(-C, closure_int, alpha=1e-2, show_progress=0)).flatten() prog_bar.update(j + 1, every=5) prog_bar.close() # add int_ambiguity common_reg.int_ambiguity = np.median(U, axis=1) common_reg.date12_list = date12_list #sort regions by size to facilitate the region matching later common_regions.sort(key=lambda x: x.area, reverse=True) # plot sample result fig_size = pp.auto_figure_size(label_img.shape, disp_cbar=False) fig, ax = plt.subplots(figsize=fig_size) ax.imshow(label_img, cmap='jet') for common_reg in common_regions: ax.plot(common_reg.sample_coords[:, 1], common_reg.sample_coords[:, 0], 'k.', ms=2) pp.auto_flip_direction(stack_obj.metadata, ax, print_msg=False) out_img = 'common_region_sample.png' fig.savefig(out_img, bbox_inches='tight', transparent=True, dpi=300) print('saved common regions and sample pixels to file', out_img) return common_regions
def prepare4multi_subplots(inps, metadata): """Prepare for multiple subplots: 1) check multilook to save memory 2) read existed reference pixel info for unwrapPhase 3) read dropIfgram info 4) read and prepare DEM for background """ inps.dsetFamilyList = sorted(list(set(i.split('-')[0] for i in inps.dset))) # Update multilook parameters with new num and col number if inps.multilook and inps.multilook_num == 1: # Do not auto multilook mask and lookup table file auto_multilook = True for dsFamily in inps.dsetFamilyList: if any(i in dsFamily.lower() for i in ['mask', 'coord']): auto_multilook = False if auto_multilook: inps.multilook, inps.multilook_num = check_multilook_input( inps.pix_box, inps.fig_row_num, inps.fig_col_num) if inps.msk is not None: inps.msk = multilook_data(inps.msk, inps.multilook_num, inps.multilook_num) # Reference pixel for timeseries and ifgramStack # metadata = read_attribute(inps.file) inps.file_ref_yx = None if inps.key in ['ifgramStack'] and 'REF_Y' in metadata.keys(): ref_y, ref_x = int(metadata['REF_Y']), int(metadata['REF_X']) length, width = int(metadata['LENGTH']), int(metadata['WIDTH']) if 0 <= ref_y < length and 0 <= ref_x < width: inps.file_ref_yx = [ref_y, ref_x] vprint('consider reference pixel in y/x: {}'.format( inps.file_ref_yx)) if inps.dsetNum > 10: inps.ref_marker_size /= 10. elif inps.dsetNum > 100: inps.ref_marker_size /= 20. # inps.disp_ref_pixel = False # vprint('turn off reference pixel plot for more than 10 datasets to display') # Check dropped interferograms inps.dropDatasetList = [] if inps.key == 'ifgramStack' and inps.disp_title: obj = ifgramStack(inps.file) obj.open(print_msg=False) dropDate12List = obj.get_drop_date12_list() for i in inps.dsetFamilyList: inps.dropDatasetList += [ '{}-{}'.format(i, j) for j in dropDate12List ] vprint( "mark interferograms with 'dropIfgram=False' in red colored title") # Read DEM if inps.dem_file: dem_metadata = read_attribute(inps.dem_file) if all(dem_metadata[i] == metadata[i] for i in ['LENGTH', 'WIDTH']): vprint('reading DEM: {} ... '.format( os.path.basename(inps.dem_file))) dem = read(inps.dem_file, datasetName='height', box=inps.pix_box, print_msg=False)[0] if inps.multilook: dem = multilook_data(dem, inps.multilook_num, inps.multilook_num) (inps.dem_shade, inps.dem_contour, inps.dem_contour_seq) = pp.prepare_dem_background( dem=dem, inps=inps, print_msg=inps.print_msg) else: inps.dem_file = None inps.transparency = 1.0 vprint( 'Input DEM file has different size than data file, ignore it.') return inps
def main(iargs=None): """ generates interferograms and coherence images in GeoTiff format """ inps = putils.cmd_line_parse(iargs) if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_obj = JOB_SUBMIT(inps) job_name = 'ifgramStack_to_ifgram_and_coherence' job_file_name = job_name if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) sys.exit(0) out_dir = inps.work_dir + '/' + pathObj.tiffdir if not os.path.isdir(out_dir): os.makedirs(out_dir) try: file = glob.glob(inps.work_dir + '/mintpy/inputs/ifgramStack.h5')[0] except: raise Exception('ERROR in ' + os.path.basename(__file__) + ': file ifgramStack.h5 not found') # modify network so that only one connection left arg_string = file + ' --max-conn-num 1' print('modify_network.py', arg_string) mintpy.modify_network.main(arg_string.split()) if not os.path.isdir(inps.work_dir + '/mintpy/geo'): os.makedirs(inps.work_dir + '/mintpy/geo') # geocode ifgramStack geo_file = os.path.dirname( os.path.dirname(file)) + '/geo/geo_' + os.path.basename(file) lookup_file = os.path.dirname( os.path.dirname(file)) + '/inputs/geometryRadar.h5' template_file = os.path.dirname( os.path.dirname(file)) + '/smallbaselineApp_template.txt' arg_string = file + ' -t ' + template_file + ' -l ' + lookup_file + ' -o ' + geo_file print('geocode.py', arg_string) mintpy.geocode.main(arg_string.split()) # loop over all interferograms obj = ifgramStack(geo_file) obj.open() date12_list = obj.get_date12_list() # dummy_data, atr = readfile.read(geo_file) for i in range(len(date12_list)): date_str = date12_list[i] print('Working on ... ' + date_str) data_coh = readfile.read(file, datasetName='coherence-' + date_str)[0] data_unw = readfile.read(file, datasetName='unwrapPhase-' + date_str)[0] fname_coh = out_dir + '/coherence_' + date_str + '.tif' fname_unw = out_dir + '/interferogram_' + date_str + '.tif' create_geotiff(obj, data=data_coh, outfile=fname_coh, type='coherence', work_dir=inps.work_dir) create_geotiff(obj, data=data_unw, outfile=fname_unw, type='interferogram', work_dir=inps.work_dir) return
def read_network_info(inps): ext = os.path.splitext(inps.file)[1] # 1. Read dateList and pbaseList if ext in ['.h5', '.he5']: k = readfile.read_attribute(inps.file)['FILE_TYPE'] print('reading temporal/spatial baselines from {} file: {}'.format(k, inps.file)) if k == 'ifgramStack': inps.dateList = ifgramStack(inps.file).get_date_list(dropIfgram=False) inps.pbaseList = ifgramStack(inps.file).get_perp_baseline_timeseries(dropIfgram=False) elif k == 'timeseries': obj = timeseries(inps.file) obj.open(print_msg=False) inps.dateList = obj.dateList inps.pbaseList = obj.pbase else: raise ValueError('input file is not ifgramStack/timeseries, can not read temporal/spatial baseline info.') else: print('reading temporal/spatial baselines from list file: '+inps.bl_list_file) inps.dateList, inps.pbaseList = pnet.read_baseline_file(inps.bl_list_file)[0:2] print('number of acquisitions: {}'.format(len(inps.dateList))) # 2. Read All Date12/Ifgrams/Pairs inps.date12List = pnet.get_date12_list(inps.file) print('reading interferograms info from file: {}'.format(inps.file)) print('number of interferograms: {}'.format(len(inps.date12List))) if inps.save_list: txtFile = os.path.splitext(os.path.basename(inps.file))[0]+'_date12List.txt' np.savetxt(txtFile, inps.date12List, fmt='%s') print('save pairs/date12 info to file: '+txtFile) # Optional: Read dropped date12 / date inps.dateList_drop = [] inps.date12List_drop = [] if ext in ['.h5', '.he5'] and k == 'ifgramStack': inps.date12List_keep = ifgramStack(inps.file).get_date12_list(dropIfgram=True) inps.date12List_drop = sorted(list(set(inps.date12List) - set(inps.date12List_keep))) print('-'*50) print('number of interferograms marked as drop: {}'.format(len(inps.date12List_drop))) print('number of interferograms marked as keep: {}'.format(len(inps.date12List_keep))) mDates = [i.split('_')[0] for i in inps.date12List_keep] sDates = [i.split('_')[1] for i in inps.date12List_keep] inps.dateList_keep = sorted(list(set(mDates + sDates))) inps.dateList_drop = sorted(list(set(inps.dateList) - set(inps.dateList_keep))) print('number of acquisitions marked as drop: {}'.format(len(inps.dateList_drop))) if len(inps.dateList_drop) > 0: print(inps.dateList_drop) # Optional: Read Coherence List inps.cohList = None if ext in ['.h5', '.he5'] and k == 'ifgramStack': inps.cohList, cohDate12List = ut.spatial_average(inps.file, datasetName='coherence', maskFile=inps.maskFile, saveList=True, checkAoi=False) if all(np.isnan(inps.cohList)): inps.cohList = None print('WARNING: all coherence value are nan! Do not use this and continue.') if set(cohDate12List) > set(inps.date12List): print('extract coherence value for all pair/date12 in input file') inps.cohList = [inps.cohList[cohDate12List.index(i)] for i in inps.date12List] elif set(cohDate12List) < set(inps.date12List): inps.cohList = None print('WARNING: not every pair/date12 from input file is in coherence file') print('turn off the color plotting of interferograms based on coherence') return inps
def read_network_info(inps): ext = os.path.splitext(inps.file)[1] print('read temporal/spatial baseline info from file:', inps.file) ## 1. Read date, pbase, date12 and coherence if ext == '.h5': inps.date12List = ifgramStack( inps.file).get_date12_list(dropIfgram=False) inps.dateList = ifgramStack(inps.file).get_date_list(dropIfgram=False) inps.pbaseList = ifgramStack( inps.file).get_perp_baseline_timeseries(dropIfgram=False) inps.cohList = ut.spatial_average(inps.file, datasetName='coherence', maskFile=inps.maskFile, saveList=True, checkAoi=False)[0] elif ext == '.txt': inps.date12List = np.loadtxt(inps.file, dtype=bytes).astype(str)[:, 0].tolist() # date12List --> dateList mDates = [i.split('_')[0] for i in inps.date12List] sDates = [i.split('_')[1] for i in inps.date12List] inps.dateList = sorted(list(set(mDates + sDates))) # pbase12List + date12List --> pbaseList pbase12List = np.loadtxt(inps.file, dtype=bytes).astype(float)[:, 3] A = ifgramStack.get_design_matrix4timeseries(inps.date12List)[0] inps.pbaseList = np.zeros(len(inps.dateList), dtype=np.float32) inps.pbaseList[1:] = np.linalg.lstsq(A, np.array(pbase12List), rcond=None)[0] # cohList inps.cohList = np.loadtxt(inps.file, dtype=bytes).astype(float)[:, 1] else: raise ValueError('un-recognized input file extention:', ext) print('number of acquisitions: {}'.format(len(inps.dateList))) print('number of interferograms: {}'.format(len(inps.date12List))) # Optional: Read dropped date12 / date inps.dateList_drop = [] inps.date12List_drop = [] if ext == '.h5': inps.date12List_keep = ifgramStack( inps.file).get_date12_list(dropIfgram=True) inps.date12List_drop = sorted( list(set(inps.date12List) - set(inps.date12List_keep))) print('-' * 50) print('number of interferograms marked as drop: {}'.format( len(inps.date12List_drop))) print('number of interferograms marked as keep: {}'.format( len(inps.date12List_keep))) mDates = [i.split('_')[0] for i in inps.date12List_keep] sDates = [i.split('_')[1] for i in inps.date12List_keep] inps.dateList_keep = sorted(list(set(mDates + sDates))) inps.dateList_drop = sorted( list(set(inps.dateList) - set(inps.dateList_keep))) print('number of acquisitions marked as drop: {}'.format( len(inps.dateList_drop))) if len(inps.dateList_drop) > 0: print(inps.dateList_drop) return inps
def calc_num_nonzero_integer_closure_phase(ifgram_file, mask_file=None, dsName='unwrapPhase', out_file=None, step=50, update_mode=True): """Calculate the number of non-zero integer ambiguity of closure phase. T_int as shown in equation (8-9) and inline in Yunjun et al. (2019, CAGEO). Parameters: ifgram_file - str, path of interferogram stack file mask_file - str, path of mask file dsName - str, unwrapped phase dataset name used to calculate the closure phase out_file - str, custom output filename step - int, number of row in each block to calculate T_int update_mode - bool Returns: out_file - str, custom output filename Example: calc_num_nonzero_integer_closure_phase('inputs/ifgramStack.h5', mask_file='waterMask.h5') """ # default output file path if out_file is None: out_dir = os.path.dirname(os.path.dirname(ifgram_file)) if dsName == 'unwrapPhase': # skip the default dsName in output filename out_file = 'numNonzeroIntClosure.h5' else: out_file = 'numNonzeroIntClosure4{}.h5'.format(dsName) out_file = os.path.join(out_dir, out_file) if update_mode and os.path.isfile(out_file): print('output file "{}" already exists, skip re-calculating.'.format( out_file)) return out_file # read ifgramStack file stack_obj = ifgramStack(ifgram_file) stack_obj.open() length, width = stack_obj.length, stack_obj.width date12_list = stack_obj.get_date12_list(dropIfgram=True) num_ifgram = len(date12_list) C = stack_obj.get_design_matrix4triplet(date12_list) ref_phase = stack_obj.get_reference_phase(unwDatasetName=dsName, dropIfgram=True).reshape( num_ifgram, -1) print('get design matrix for the interferogram triplets in size of {}'. format(C.shape)) # calculate number of nonzero closure phase num_loop = int(np.ceil(length / step)) num_nonzero_closure = np.zeros((length, width), dtype=np.float32) msg = 'calcualting the number of triplets with non-zero integer ambiguity of closure phase ...' msg += '\n block by block with size up to {}, {} blocks in total'.format( (step, width), num_loop) print(msg) prog_bar = ptime.progressBar(maxValue=num_loop) for i in range(num_loop): # box r0 = i * step r1 = min((r0 + step), stack_obj.length) box = (0, r0, stack_obj.width, r1) # read data unw = ifginv.read_unwrap_phase(stack_obj, box=box, ref_phase=ref_phase, obsDatasetName=dsName, dropIfgram=True, print_msg=False).reshape( num_ifgram, -1) # calculate based on equation (8-9) and T_int equation inline. closure_pha = np.dot(C, unw) closure_int = np.round( (closure_pha - ut.wrap(closure_pha)) / (2. * np.pi)) num_nonzero_closure[r0:r1, :] = np.sum(closure_int != 0, axis=0).reshape(-1, width) prog_bar.update(i + 1, every=1) prog_bar.close() # mask if mask_file is not None: print('masking with file', mask_file) mask = readfile.read(mask_file)[0] num_nonzero_closure[mask == 0] = np.nan # write to disk print('write to file', out_file) meta = dict(stack_obj.metadata) meta['FILE_TYPE'] = 'mask' meta['UNIT'] = '1' writefile.write(num_nonzero_closure, out_file, meta) # plot plot_num_nonzero_integer_closure_phase(out_file) return out_file
def get_date12_to_drop(inps): """Get date12 list to dropped Return [] if no ifgram to drop, thus KEEP ALL ifgrams; None if nothing to change, exit without doing anything. """ obj = ifgramStack(inps.file) obj.open() date12ListAll = obj.date12List dateList = obj.dateList print('number of interferograms: {}'.format(len(date12ListAll))) # Get date12_to_drop date12_to_drop = [] # reference file if inps.referenceFile: date12_to_keep = ifgramStack( inps.referenceFile).get_date12_list(dropIfgram=True) print('--------------------------------------------------') print('use reference pairs info from file: {}'.format( inps.referenceFile)) print('number of interferograms in reference: {}'.format( len(date12_to_keep))) tempList = sorted(list(set(date12ListAll) - set(date12_to_keep))) date12_to_drop += tempList print('date12 not in reference file: ({})\n{}'.format( len(tempList), tempList)) # coherence file if inps.coherenceBased: print('--------------------------------------------------') print('use coherence-based network modification') coord = ut.coordinate(obj.metadata, lookup_file=inps.lookupFile) if inps.aoi_geo_box and inps.lookupFile: print('input AOI in (lon0, lat1, lon1, lat0): {}'.format( inps.aoi_geo_box)) inps.aoi_pix_box = coord.bbox_geo2radar(inps.aoi_geo_box) if inps.aoi_pix_box: inps.aoi_pix_box = coord.check_box_within_data_coverage( inps.aoi_pix_box) print('input AOI in (x0,y0,x1,y1): {}'.format(inps.aoi_pix_box)) # Calculate spatial average coherence cohList = ut.spatial_average(inps.file, datasetName='coherence', maskFile=inps.maskFile, box=inps.aoi_pix_box, saveList=True)[0] coh_date12_list = list( np.array(date12ListAll)[np.array(cohList) >= inps.minCoherence]) # MST network if inps.keepMinSpanTree: print( 'Get minimum spanning tree (MST) of interferograms with inverse of coherence.' ) msg = ('Drop ifgrams with ' '1) average coherence < {} AND ' '2) not in MST network: '.format(inps.minCoherence)) mst_date12_list = pnet.threshold_coherence_based_mst( date12ListAll, cohList) mst_date12_list = ptime.yyyymmdd_date12(mst_date12_list) else: msg = 'Drop ifgrams with average coherence < {}: '.format( inps.minCoherence) mst_date12_list = [] tempList = sorted( list(set(date12ListAll) - set(coh_date12_list + mst_date12_list))) date12_to_drop += tempList msg += '({})'.format(len(tempList)) if len(tempList) <= 200: msg += '\n{}'.format(tempList) print(msg) # temp baseline threshold if inps.tempBaseMax: tempIndex = np.abs(obj.tbaseIfgram) > inps.tempBaseMax tempList = list(np.array(date12ListAll)[tempIndex]) date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with temporal baseline > {} days: ({})\n{}'.format( inps.tempBaseMax, len(tempList), tempList)) # perp baseline threshold if inps.perpBaseMax: tempIndex = np.abs(obj.pbaseIfgram) > inps.perpBaseMax tempList = list(np.array(date12ListAll)[tempIndex]) date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with perp baseline > {} meters: ({})\n{}'.format( inps.perpBaseMax, len(tempList), tempList)) # connection number threshold if inps.connNumMax: seq_date12_list = pnet.select_pairs_sequential(dateList, inps.connNumMax) seq_date12_list = ptime.yyyymmdd_date12(seq_date12_list) tempList = [i for i in date12ListAll if i not in seq_date12_list] date12_to_drop += tempList print('--------------------------------------------------') msg = 'Drop ifgrams with temporal baseline beyond {} neighbors: ({})'.format( inps.connNumMax, len(tempList)) if len(tempList) <= 200: msg += '\n{}'.format(tempList) print(msg) # excludeIfgIndex if inps.excludeIfgIndex: tempList = [date12ListAll[i] for i in inps.excludeIfgIndex] date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with the following index number: {}'.format( len(tempList))) for i in range(len(tempList)): print('{} : {}'.format(i, tempList[i])) #len(tempList), zip(inps.excludeIfgIndex, tempList))) # excludeDate if inps.excludeDate: tempList = [ i for i in date12ListAll if any(j in inps.excludeDate for j in i.split('_')) ] date12_to_drop += tempList print('-' * 50 + '\nDrop ifgrams including the following dates: ({})\n{}'.format( len(tempList), inps.excludeDate)) print('-' * 30 + '\n{}'.format(tempList)) # startDate if inps.startDate: minDate = int(inps.startDate) tempList = [ i for i in date12ListAll if any( int(j) < minDate for j in i.split('_')) ] date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with date earlier than: {} ({})\n{}'.format( inps.startDate, len(tempList), tempList)) # endDate if inps.endDate: maxDate = int(inps.endDate) tempList = [ i for i in date12ListAll if any( int(j) > maxDate for j in i.split('_')) ] date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with date later than: {} ({})\n{}'.format( inps.endDate, len(tempList), tempList)) # Manually drop pairs if inps.manual: tempList = manual_select_pairs_to_remove(inps.file) if tempList is None: return None tempList = [i for i in tempList if i in date12ListAll] print('date12 selected to remove: ({})\n{}'.format( len(tempList), tempList)) date12_to_drop += tempList # drop duplicate date12 and sort in order date12_to_drop = sorted(list(set(date12_to_drop))) date12_to_keep = sorted(list(set(date12ListAll) - set(date12_to_drop))) print('--------------------------------------------------') print('number of interferograms to remove: {}'.format(len(date12_to_drop))) print('number of interferograms to keep : {}'.format(len(date12_to_keep))) date_to_keep = [d for date12 in date12_to_keep for d in date12.split('_')] date_to_keep = sorted(list(set(date_to_keep))) date_to_drop = sorted(list(set(dateList) - set(date_to_keep))) if len(date_to_drop) > 0: print('number of acquisitions to remove: {}\n{}'.format( len(date_to_drop), date_to_drop)) date12ListKept = obj.get_date12_list(dropIfgram=True) date12ListDropped = sorted(list(set(date12ListAll) - set(date12ListKept))) if date12_to_drop == date12ListDropped: print( 'Calculated date12 to drop is the same as exsiting marked input file, skip updating file.' ) date12_to_drop = None elif date12_to_drop == date12ListAll: raise Exception( 'Zero interferogram left! Please adjust your setting and try again.' ) return date12_to_drop
def get_slice_list(fname): """Get list of 2D slice existed in file (for display)""" fbase, fext = os.path.splitext(os.path.basename(fname)) fext = fext.lower() atr = read_attribute(fname) k = atr['FILE_TYPE'] global slice_list # HDF5 Files if fext in ['.h5', '.he5']: with h5py.File(fname, 'r') as f: d1_list = [i for i in f.keys() if isinstance(f[i], h5py.Dataset)] if k == 'timeseries' and k in d1_list: obj = timeseries(fname) obj.open(print_msg=False) slice_list = obj.sliceList elif k in ['geometry'] and k not in d1_list: obj = geometry(fname) obj.open(print_msg=False) slice_list = obj.sliceList elif k in ['ifgramStack']: obj = ifgramStack(fname) obj.open(print_msg=False) slice_list = obj.sliceList elif k in ['HDFEOS']: obj = HDFEOS(fname) obj.open(print_msg=False) slice_list = obj.sliceList elif k in ['giantTimeseries']: obj = giantTimeseries(fname) obj.open(print_msg=False) slice_list = obj.sliceList elif k in ['giantIfgramStack']: obj = giantIfgramStack(fname) obj.open(print_msg=False) slice_list = obj.sliceList else: ## Find slice by walking through the file structure length, width = int(atr['LENGTH']), int(atr['WIDTH']) def get_hdf5_2d_dataset(name, obj): global slice_list if isinstance(obj, h5py.Dataset) and obj.shape[-2:] == (length, width): if obj.ndim == 2: slice_list.append(name) else: warnings.warn('file has un-defined {}D dataset: {}'.format(obj.ndim, name)) slice_list = [] with h5py.File(fname, 'r') as f: f.visititems(get_hdf5_2d_dataset) # Binary Files else: if fext.lower() in ['.trans', '.utm_to_rdc']: slice_list = ['rangeCoord', 'azimuthCoord'] elif fbase.startswith('los'): slice_list = ['incidenceAngle', 'azimuthAngle'] elif atr.get('number_bands', '1') == '2' and 'unw' not in k: slice_list = ['band1', 'band2'] else: slice_list = [''] return slice_list
def calc_num_triplet_with_nonzero_integer_ambiguity(ifgram_file, mask_file=None, dsName='unwrapPhase', out_file=None, max_memory=4, update_mode=True): """Calculate the number of triplets with non-zero integer ambiguity of closure phase. T_int as shown in equation (8-9) and inline in Yunjun et al. (2019, CAGEO). Parameters: ifgram_file - str, path of interferogram stack file mask_file - str, path of mask file dsName - str, unwrapped phase dataset name used to calculate the closure phase out_file - str, custom output filename update_mode - bool Returns: out_file - str, custom output filename Example: calc_num_triplet_with_nonzero_integer_ambiguity('inputs/ifgramStack.h5', mask_file='waterMask.h5') """ # default output file path out_dir = os.path.dirname(os.path.dirname(ifgram_file)) if out_file is None: if dsName == 'unwrapPhase': # skip the default dsName in output filename out_file = 'numTriNonzeroIntAmbiguity.h5' else: out_file = 'numTriNonzeroIntAmbiguity4{}.h5'.format(dsName) out_file = os.path.join(out_dir, out_file) # update mode if update_mode and os.path.isfile(out_file): print('update mode: ON') print('1) output file "{}" already exists'.format(out_file)) flag = 'skip' # check modification time with h5py.File(ifgram_file, 'r') as f: ti = float(f[dsName].attrs.get('MODIFICATION_TIME', os.path.getmtime(ifgram_file))) to = os.path.getmtime(out_file) if ti > to: print('2) output file is NOT newer than input dataset') flag = 'run' else: print('2) output file is newer than input dataset') # check REF_Y/X key_list = ['REF_Y', 'REF_X'] atri = readfile.read_attribute(ifgram_file) atro = readfile.read_attribute(out_file) if not all(atri[i] == atro[i] for i in key_list): print('3) NOT all key configurations are the same: {}'.format( key_list)) flag = 'run' else: print( '3) all key configurations are the same: {}'.format(key_list)) print('run or skip: {}.'.format(flag)) if flag == 'skip': return out_file # read ifgramStack file stack_obj = ifgramStack(ifgram_file) stack_obj.open() length, width = stack_obj.length, stack_obj.width date12_list = stack_obj.get_date12_list(dropIfgram=True) num_ifgram = len(date12_list) C = stack_obj.get_design_matrix4triplet(date12_list) if C is None: msg = 'No triangles found from ifgramStack file: {}!'.format( ifgram_file) msg += '\n Skip calculating the number of triplets with non-zero integer ambiguity.' print(msg) return None else: print('get design matrix for the interferogram triplets in size of {}'. format(C.shape)) # calculate number of nonzero closure phase ds_size = (C.shape[0] * 2 + C.shape[1]) * length * width * 4 num_loop = int(np.ceil(ds_size * 2 / (max_memory * 1024**3))) step = int(np.rint(length / num_loop / 10) * 10) num_loop = int(np.ceil(length / step)) num_nonzero_closure = np.zeros((length, width), dtype=np.float32) msg = 'calculating the number of triplets with non-zero integer ambiguity of closure phase ...' msg += '\n block by block with size up to {}, {} blocks in total'.format( (step, width), num_loop) print(msg) ref_phase = stack_obj.get_reference_phase(unwDatasetName=dsName, dropIfgram=True).reshape( num_ifgram, -1) prog_bar = ptime.progressBar(maxValue=num_loop) for i in range(num_loop): # box r0 = i * step r1 = min((r0 + step), stack_obj.length) box = (0, r0, stack_obj.width, r1) # read data unw = ifginv.read_unwrap_phase(stack_obj, box=box, ref_phase=ref_phase, obs_ds_name=dsName, dropIfgram=True, print_msg=False).reshape( num_ifgram, -1) # calculate based on equation (8-9) and T_int equation inline. closure_pha = np.dot(C, unw) closure_int = np.round( (closure_pha - ut.wrap(closure_pha)) / (2. * np.pi)) num_nonzero_closure[r0:r1, :] = np.sum(closure_int != 0, axis=0).reshape(-1, width) prog_bar.update(i + 1, every=1, suffix='line {} / {}'.format(r0, length)) prog_bar.close() # mask if mask_file is not None: mask = readfile.read(mask_file)[0] num_nonzero_closure[mask == 0] = np.nan print('mask out pixels with zero in file:', mask_file) coh_file = os.path.join(out_dir, 'avgSpatialCoh.h5') if os.path.isfile(coh_file): coh = readfile.read(coh_file)[0] num_nonzero_closure[coh == 0] = np.nan print('mask out pixels with zero in file:', coh_file) # write to disk print('write to file', out_file) meta = dict(stack_obj.metadata) meta['FILE_TYPE'] = 'mask' meta['UNIT'] = '1' writefile.write(num_nonzero_closure, out_file, meta) # plot plot_num_triplet_with_nonzero_integer_ambiguity(out_file) return out_file
def get_common_region_int_ambiguity(ifgram_file, cc_mask_file, water_mask_file=None, num_sample=100, dsNameIn='unwrapPhase'): """Solve the phase unwrapping integer ambiguity for the common regions among all interferograms Parameters: ifgram_file : str, path of interferogram stack file cc_mask_file : str, path of common connected components file water_mask_file : str, path of water mask file num_sample : int, number of pixel sampled for each region dsNameIn : str, dataset name of the unwrap phase to be corrected Returns: common_regions : list of skimage.measure._regionprops._RegionProperties object modified by adding two more variables: sample_coords : 2D np.ndarray in size of (num_sample, 2) in int64 format int_ambiguity : 1D np.ndarray in size of (num_ifgram,) in int format """ print('-'*50) print('calculating the integer ambiguity for the common regions defined in', cc_mask_file) # stack info stack_obj = ifgramStack(ifgram_file) stack_obj.open() date12_list = stack_obj.get_date12_list(dropIfgram=True) num_ifgram = len(date12_list) C = matrix(ifgramStack.get_design_matrix4triplet(date12_list).astype(float)) ref_phase = stack_obj.get_reference_phase(unwDatasetName=dsNameIn, dropIfgram=True).reshape(num_ifgram, -1) # prepare common label print('read common mask from', cc_mask_file) cc_mask = readfile.read(cc_mask_file)[0] if water_mask_file is not None and os.path.isfile(water_mask_file): water_mask = readfile.read(water_mask_file)[0] print('refine common mask based on water mask file', water_mask_file) cc_mask[water_mask == 0] = 0 label_img, num_label = connectComponent.get_large_label(cc_mask, min_area=2.5e3, print_msg=True) common_regions = measure.regionprops(label_img) print('number of common regions:', num_label) # add sample_coords / int_ambiguity print('number of samples per region:', num_sample) print('solving the phase-unwrapping integer ambiguity for {}'.format(dsNameIn)) print('\tbased on the closure phase of interferograms triplets (Yunjun et al., 2019)') print('\tusing the L1-norm regularzed least squares approximation (LASSO) ...') for i in range(num_label): common_reg = common_regions[i] # sample_coords idx = sorted(np.random.choice(common_reg.area, num_sample, replace=False)) common_reg.sample_coords = common_reg.coords[idx, :].astype(int) # solve for int_ambiguity U = np.zeros((num_ifgram, num_sample)) if common_reg.label == label_img[stack_obj.refY, stack_obj.refX]: print('{}/{} skip calculation for the reference region'.format(i+1, num_label)) else: prog_bar = ptime.progressBar(maxValue=num_sample, prefix='{}/{}'.format(i+1, num_label)) for j in range(num_sample): # read unwrap phase y, x = common_reg.sample_coords[j, :] unw = ifginv.read_unwrap_phase(stack_obj, box=(x, y, x+1, y+1), ref_phase=ref_phase, unwDatasetName=dsNameIn, dropIfgram=True, print_msg=False).reshape(num_ifgram, -1) # calculate closure_int closure_pha = np.dot(C, unw) closure_int = matrix(np.round((closure_pha - ut.wrap(closure_pha)) / (2.*np.pi))) # solve for U U[:,j] = np.round(l1regls(-C, closure_int, alpha=1e-2, show_progress=0)).flatten() prog_bar.update(j+1, every=5) prog_bar.close() # add int_ambiguity common_reg.int_ambiguity = np.median(U, axis=1) common_reg.date12_list = date12_list #sort regions by size to facilitate the region matching later common_regions.sort(key=lambda x: x.area, reverse=True) # plot sample result fig_size = pp.auto_figure_size(label_img.shape, disp_cbar=False) fig, ax = plt.subplots(figsize=fig_size) ax.imshow(label_img, cmap='jet') for common_reg in common_regions: ax.plot(common_reg.sample_coords[:,1], common_reg.sample_coords[:,0], 'k.', ms=2) pp.auto_flip_direction(stack_obj.metadata, ax, print_msg=False) out_img = 'common_region_sample.png' fig.savefig(out_img, bbox_inches='tight', transparent=True, dpi=300) print('saved common regions and sample pixels to file', out_img) return common_regions
def ifgram_inversion_patch(ifgram_file, box=None, ref_phase=None, obs_ds_name='unwrapPhase', weight_func='var', water_mask_file=None, min_norm_velocity=True, mask_ds_name=None, mask_threshold=0.4, min_redundancy=1.0): """Invert one patch of an ifgram stack into timeseries. Parameters: box - tuple of 4 int, indicating (x0, y0, x1, y1) of the area of interest or None for the whole image ifgram_file - str, interferograms stack HDF5 file, e.g. ./inputs/ifgramStack.h5 ref_phase - 1D array in size of (num_ifgram), or None obs_ds_name - str, dataset to feed the inversion. weight_func - str, weight function, choose in ['no', 'fim', 'var', 'coh'] water_mask_file - str, water mask filename if available, to skip inversion on water min_norm_velocity - bool, minimize the residual phase or phase velocity mask_ds_name - str, dataset name in ifgram_file used to mask unwrapPhase pixelwisely mask_threshold - float, min coherence of pixels if mask_dataset_name='coherence' min_redundancy - float, the min number of ifgrams for every acquisition. Returns: ts - 3D array in size of (num_date, num_row, num_col) temp_coh - 2D array in size of (num_row, num_col) num_inv_ifg - 2D array in size of (num_row, num_col) box - tuple of 4 int Example: ifgram_inversion_patch('ifgramStack.h5', box=(0,200,1316,400)) """ stack_obj = ifgramStack(ifgram_file) stack_obj.open(print_msg=False) # debug #y, x = 258, 454 #box = (x, y, x+1, y+1) ## 1. input info # size if box: num_row = box[3] - box[1] num_col = box[2] - box[0] else: num_row = stack_obj.length num_col = stack_obj.width num_pixel = num_row * num_col # get tbase_diff date_list = stack_obj.get_date_list(dropIfgram=True) num_date = len(date_list) tbase = np.array(ptime.date_list2tbase(date_list)[0], np.float32) / 365.25 tbase_diff = np.diff(tbase).reshape(-1, 1) # design matrix date12_list = stack_obj.get_date12_list(dropIfgram=True) A, B = stack_obj.get_design_matrix4timeseries(date12_list=date12_list)[0:2] # prep for decor std time-series #if os.path.isfile('reference_date.txt'): # ref_date = str(np.loadtxt('reference_date.txt', dtype=bytes).astype(str)) #else: # ref_date = date_list[0] #Astd = stack_obj.get_design_matrix4timeseries(date12_list=date12_list, refDate=ref_date)[0] #ref_idx = date_list.index(ref_date) #time_idx = [i for i in range(num_date)] #time_idx.remove(ref_idx) # skip zero value in the network inversion for phase if 'phase' in obs_ds_name.lower(): skip_zero_value = True else: skip_zero_value = False # 1.1 read / calcualte weight if weight_func in ['no', 'sbas']: weight = None else: weight = calc_weight(stack_obj, box, weight_func=weight_func, dropIfgram=True, chunk_size=100000) # 1.2 read / mask unwrapPhase / offset pha_data = read_unwrap_phase(stack_obj, box, ref_phase, obs_ds_name=obs_ds_name, dropIfgram=True) pha_data = mask_unwrap_phase(pha_data, stack_obj, box, dropIfgram=True, mask_ds_name=mask_ds_name, mask_threshold=mask_threshold) # 1.3 mask of pixels to invert mask = np.ones(num_pixel, np.bool_) # 1.3.1 - Water Mask if water_mask_file: print('skip pixels on water with mask from file: {}'.format( os.path.basename(water_mask_file))) atr_msk = readfile.read_attribute(water_mask_file) len_msk, wid_msk = int(atr_msk['LENGTH']), int(atr_msk['WIDTH']) if (len_msk, wid_msk) != (stack_obj.length, stack_obj.width): raise ValueError( 'Input water mask file has different size from ifgramStack file.' ) dsName = [ i for i in readfile.get_dataset_list(water_mask_file) if i in ['waterMask', 'mask'] ][0] waterMask = readfile.read(water_mask_file, datasetName=dsName, box=box)[0].flatten() mask *= np.array(waterMask, dtype=np.bool_) del waterMask # 1.3.2 - Mask for Zero Phase in ALL ifgrams if 'phase' in obs_ds_name.lower(): print('skip pixels with zero/nan value in all interferograms') with warnings.catch_warnings(): # ignore warning message for all-NaN slices warnings.simplefilter("ignore", category=RuntimeWarning) phase_stack = np.nanmean(pha_data, axis=0) mask *= np.multiply(~np.isnan(phase_stack), phase_stack != 0.) del phase_stack # 1.3.3 invert pixels on mask 1+2 num_pixel2inv = int(np.sum(mask)) idx_pixel2inv = np.where(mask)[0] print('number of pixels to invert: {} out of {} ({:.1f}%)'.format( num_pixel2inv, num_pixel, num_pixel2inv / num_pixel * 100)) ## 2. inversion # 2.1 initiale the output matrices ts = np.zeros((num_date, num_pixel), np.float32) #ts_std = np.zeros((num_date, num_pixel), np.float32) temp_coh = np.zeros(num_pixel, np.float32) num_inv_ifg = np.zeros(num_pixel, np.int16) # return directly if there is nothing to invert if num_pixel2inv < 1: ts = ts.reshape(num_date, num_row, num_col) #ts_std = ts_std.reshape(num_date, num_row, num_col) temp_coh = temp_coh.reshape(num_row, num_col) num_inv_ifg = num_inv_ifg.reshape(num_row, num_col) return ts, temp_coh, num_inv_ifg # 2.2 un-weighted inversion (classic SBAS) if weight_func in ['no', 'sbas']: # a. mask for Non-Zero Phase in ALL ifgrams (share one B in sbas inversion) if 'phase' in obs_ds_name.lower(): mask_all_net = np.all(pha_data, axis=0) mask_all_net *= mask else: mask_all_net = np.array(mask) mask_part_net = mask ^ mask_all_net del mask # b. invert once for all pixels with obs in all ifgrams if np.sum(mask_all_net) > 0: print(('inverting pixels with valid phase in all ifgrams' ' ({:.0f} pixels) ...').format(np.sum(mask_all_net))) tsi, tcohi, num_ifgi = estimate_timeseries( A, B, tbase_diff, ifgram=pha_data[:, mask_all_net], weight_sqrt=None, min_norm_velocity=min_norm_velocity, min_redundancy=min_redundancy, skip_zero_value=skip_zero_value) ts[:, mask_all_net] = tsi temp_coh[mask_all_net] = tcohi num_inv_ifg[mask_all_net] = num_ifgi # c. pixel-by-pixel for pixels with obs not in all ifgrams if np.sum(mask_part_net) > 0: print(('inverting pixels with valid phase in some ifgrams' ' ({:.0f} pixels) ...').format(np.sum(mask_part_net))) num_pixel2inv = int(np.sum(mask_part_net)) idx_pixel2inv = np.where(mask_part_net)[0] prog_bar = ptime.progressBar(maxValue=num_pixel2inv) for i in range(num_pixel2inv): idx = idx_pixel2inv[i] tsi, tcohi, num_ifgi = estimate_timeseries( A, B, tbase_diff, ifgram=pha_data[:, idx], weight_sqrt=None, min_norm_velocity=min_norm_velocity, min_redundancy=min_redundancy, skip_zero_value=skip_zero_value) ts[:, idx] = tsi.flatten() temp_coh[idx] = tcohi num_inv_ifg[idx] = num_ifgi prog_bar.update(i + 1, every=2000, suffix='{}/{} pixels'.format( i + 1, num_pixel2inv)) prog_bar.close() # 2.3 weighted inversion - pixel-by-pixel else: print('inverting network of interferograms into time-series ...') prog_bar = ptime.progressBar(maxValue=num_pixel2inv) for i in range(num_pixel2inv): idx = idx_pixel2inv[i] tsi, tcohi, num_ifgi = estimate_timeseries( A, B, tbase_diff, ifgram=pha_data[:, idx], weight_sqrt=weight[:, idx], min_norm_velocity=min_norm_velocity, min_redundancy=min_redundancy, skip_zero_value=skip_zero_value) ts[:, idx] = tsi.flatten() temp_coh[idx] = tcohi num_inv_ifg[idx] = num_ifgi prog_bar.update(i + 1, every=2000, suffix='{}/{} pixels'.format(i + 1, num_pixel2inv)) prog_bar.close() del weight del pha_data ## 3. prepare output # 3.1 reshape ts = ts.reshape(num_date, num_row, num_col) #ts_std = ts_std.reshape(num_date, num_row, num_col) temp_coh = temp_coh.reshape(num_row, num_col) num_inv_ifg = num_inv_ifg.reshape(num_row, num_col) # 3.2 convert displacement unit to meter if obs_ds_name.startswith('unwrapPhase'): phase2range = -1 * float( stack_obj.metadata['WAVELENGTH']) / (4. * np.pi) ts *= phase2range print('converting LOS phase unit from radian to meter') elif obs_ds_name == 'azimuthOffset': az_pixel_size = ut.azimuth_ground_resolution(stack_obj.metadata) ts *= az_pixel_size print('converting azimuth offset unit from pixel ({:.2f} m) to meter'. format(az_pixel_size)) elif obs_ds_name == 'rangeOffset': rg_pixel_size = float(stack_obj.metadata['RANGE_PIXEL_SIZE']) ts *= rg_pixel_size print('converting range offset unit from pixel ({:.2f} m) to meter'. format(rg_pixel_size)) return ts, temp_coh, num_inv_ifg, box
def spatial_average(File, datasetName='coherence', maskFile=None, box=None, saveList=False, checkAoi=True): """Read/Calculate Spatial Average of input file. If input file is text file, read it directly; If input file is data matrix file: If corresponding text file exists with the same mask file/AOI info, read it directly; Otherwise, calculate it from data file. Only non-nan pixel is considered. Parameters: File : string, path of input file maskFile : string, path of mask file, e.g. maskTempCoh.h5 box : 4-tuple defining the left, upper, right, and lower pixel coordinate saveList : bool, save (list of) mean value into text file Returns: meanList : list for float, average value in space for each epoch of input file dateList : list of string for date info date12_list, e.g. 101120-110220, for interferograms/coherence date8_list, e.g. 20101120, for timeseries file name, e.g. velocity.h5, for all the other file types Example: meanList = spatial_average('inputs/ifgramStack.h5')[0] meanList, date12_list = spatial_average('inputs/ifgramStack.h5', maskFile='maskTempCoh.h5', saveList=True) """ def read_text_file(fname): txtContent = np.loadtxt(fname, dtype=bytes).astype(str) meanList = [float(i) for i in txtContent[:, 1]] dateList = [i for i in txtContent[:, 0]] return meanList, dateList # Baic File Info atr = readfile.read_attribute(File) k = atr['FILE_TYPE'] if not box: box = (0, 0, int(atr['WIDTH']), int(atr['LENGTH'])) # If input is text file suffix = '' if k == 'ifgramStack': suffix += '_'+datasetName suffix += '_spatialAvg.txt' if File.endswith(suffix): print('Input file is spatial average txt already, read it directly') meanList, dateList = read_text_file(File) return meanList, dateList # Read existing txt file only if 1) data file is older AND 2) same AOI txtFile = os.path.splitext(os.path.basename(File))[0]+suffix file_line = '# Data file: {}\n'.format(os.path.basename(File)) mask_line = '# Mask file: {}\n'.format(maskFile) aoi_line = '# AOI box: {}\n'.format(box) try: # Read AOI line from existing txt file fl = open(txtFile, 'r') lines = fl.readlines() fl.close() if checkAoi: try: aoi_line_orig = [i for i in lines if '# AOI box:' in i][0] except: aoi_line_orig = '' else: aoi_line_orig = aoi_line try: mask_line_orig = [i for i in lines if '# Mask file:' in i][0] except: mask_line_orig = '' if (aoi_line_orig == aoi_line and mask_line_orig == mask_line and run_or_skip(out_file=txtFile, in_file=[File, maskFile], check_readable=False) == 'skip'): print(txtFile+' already exists, read it directly') meanList, dateList = read_text_file(txtFile) return meanList, dateList except: pass # Calculate mean coherence list if k == 'ifgramStack': obj = ifgramStack(File) obj.open(print_msg=False) meanList, dateList = obj.spatial_average(datasetName=datasetName, maskFile=maskFile, box=box) pbase = obj.pbaseIfgram tbase = obj.tbaseIfgram obj.close() elif k == 'timeseries': meanList, dateList = timeseries(File).spatial_average(maskFile=maskFile, box=box) else: data = readfile.read(File, box=box)[0] if maskFile and os.path.isfile(maskFile): print('mask from file: '+maskFile) mask = readfile.read(maskFile, datasetName='mask', box=box)[0] data[mask == 0.] = np.nan meanList = np.nanmean(data) dateList = [os.path.basename(File)] # Write mean coherence list into text file if saveList: print('write average value in space into text file: '+txtFile) fl = open(txtFile, 'w') # Write comments fl.write(file_line+mask_line+aoi_line) # Write data list numLine = len(dateList) if k == 'ifgramStack': fl.write('#\tDATE12\t\tMean\tBtemp/days\tBperp/m\t\tNum\n') for i in range(numLine): fl.write('%s\t%.4f\t%8.0f\t%8.1f\t%d\n' % (dateList[i], meanList[i], tbase[i], pbase[i], i)) else: fl.write('#\tDATE12\t\tMean\n') for i in range(numLine): fl.write('%s\t%.4f\n' % (dateList[i], meanList[i])) fl.close() if len(meanList) == 1: meanList = meanList[0] dateList = dateList[0] return meanList, dateList
def get_slice_list(fname): """Get list of 2D slice existed in file (for display)""" fbase, fext = os.path.splitext(os.path.basename(fname)) fext = fext.lower() atr = read_attribute(fname) k = atr['FILE_TYPE'] global slice_list # HDF5 Files if fext in ['.h5', '.he5']: with h5py.File(fname, 'r') as f: d1_list = [i for i in f.keys() if isinstance(f[i], h5py.Dataset)] if k == 'timeseries' and k in d1_list: obj = timeseries(fname) obj.open(print_msg=False) slice_list = obj.sliceList elif k in ['geometry'] and k not in d1_list: obj = geometry(fname) obj.open(print_msg=False) slice_list = obj.sliceList elif k in ['ifgramStack']: obj = ifgramStack(fname) obj.open(print_msg=False) slice_list = obj.sliceList elif k in ['HDFEOS']: obj = HDFEOS(fname) obj.open(print_msg=False) slice_list = obj.sliceList elif k in ['giantTimeseries']: obj = giantTimeseries(fname) obj.open(print_msg=False) slice_list = obj.sliceList elif k in ['giantIfgramStack']: obj = giantIfgramStack(fname) obj.open(print_msg=False) slice_list = obj.sliceList else: ## Find slice by walking through the file structure length, width = int(atr['LENGTH']), int(atr['WIDTH']) def get_hdf5_2d_dataset(name, obj): global slice_list if isinstance(obj, h5py.Dataset) and obj.shape[-2:] == (length, width): if obj.ndim == 2: slice_list.append(name) else: warnings.warn( 'file has un-defined {}D dataset: {}'.format( obj.ndim, name)) slice_list = [] with h5py.File(fname, 'r') as f: f.visititems(get_hdf5_2d_dataset) # Binary Files else: if fext.lower() in ['.trans', '.utm_to_rdc']: slice_list = ['rangeCoord', 'azimuthCoord'] elif fbase.startswith('los'): slice_list = ['incidenceAngle', 'azimuthAngle'] elif atr.get('number_bands', '1') == '2' and 'unw' not in k: slice_list = ['band1', 'band2'] else: slice_list = [''] return slice_list
def diff_file(file1, file2, outFile=None, force=False): """Subtraction/difference of two input files""" if not outFile: fbase, fext = os.path.splitext(file1) if len(file2) > 1: raise ValueError( 'Output file name is needed for more than 2 files input.') outFile = '{}_diff_{}{}'.format( fbase, os.path.splitext(os.path.basename(file2[0]))[0], fext) print('{} - {} --> {}'.format(file1, file2, outFile)) # Read basic info atr1 = readfile.read_attribute(file1) k1 = atr1['FILE_TYPE'] atr2 = readfile.read_attribute(file2[0]) k2 = atr2['FILE_TYPE'] print('input files are: {} and {}'.format(k1, k2)) if k1 == 'timeseries': if k2 not in ['timeseries', 'giantTimeseries']: raise Exception( 'Input multiple dataset files are not the same file type!') if len(file2) > 1: raise Exception( ('Only 2 files substraction is supported for time-series file,' ' {} input.'.format(len(file2) + 1))) obj1 = timeseries(file1) obj1.open() if k2 == 'timeseries': obj2 = timeseries(file2[0]) unit_fac = 1. elif k2 == 'giantTimeseries': obj2 = giantTimeseries(file2[0]) unit_fac = 0.001 obj2.open() ref_date, ref_y, ref_x = _check_reference(obj1.metadata, obj2.metadata) # check dates shared by two timeseries files dateListShared = [i for i in obj1.dateList if i in obj2.dateList] dateShared = np.ones((obj1.numDate), dtype=np.bool_) if dateListShared != obj1.dateList: print('WARNING: {} does not contain all dates in {}'.format( file2, file1)) if force: dateExcluded = list(set(obj1.dateList) - set(dateListShared)) print( 'Continue and enforce the differencing for their shared dates only.' ) print( '\twith following dates are ignored for differencing:\n{}'. format(dateExcluded)) dateShared[np.array( [obj1.dateList.index(i) for i in dateExcluded])] = 0 else: raise Exception( 'To enforce the differencing anyway, use --force option.') # consider different reference_date/pixel data2 = readfile.read(file2[0], datasetName=dateListShared)[0] * unit_fac if ref_date: data2 -= np.tile(data2[dateListShared.index(ref_date), :, :], (data2.shape[0], 1, 1)) if ref_y and ref_x: data2 -= np.tile(data2[:, ref_y, ref_x].reshape(-1, 1, 1), (1, data2.shape[1], data2.shape[2])) data = obj1.read() mask = data == 0. data[dateShared] -= data2 data[mask] = 0. # Do not change zero phase value del data2 writefile.write(data, out_file=outFile, ref_file=file1) elif all(i == 'ifgramStack' for i in [k1, k2]): obj1 = ifgramStack(file1) obj1.open() obj2 = ifgramStack(file2[0]) obj2.open() dsNames = list(set(obj1.datasetNames) & set(obj2.datasetNames)) if len(dsNames) == 0: raise ValueError('no common dataset between two files!') dsName = [i for i in ifgramDatasetNames if i in dsNames][0] # read data print('reading {} from file {} ...'.format(dsName, file1)) data1 = readfile.read(file1, datasetName=dsName)[0] print('reading {} from file {} ...'.format(dsName, file2[0])) data2 = readfile.read(file2[0], datasetName=dsName)[0] # consider reference pixel if 'unwrapphase' in dsName.lower(): print('referencing to pixel ({},{}) ...'.format( obj1.refY, obj1.refX)) ref1 = data1[:, obj1.refY, obj1.refX] ref2 = data2[:, obj2.refY, obj2.refX] for i in range(data1.shape[0]): data1[i, :][data1[i, :] != 0.] -= ref1[i] data2[i, :][data2[i, :] != 0.] -= ref2[i] # operation and ignore zero values data1[data1 == 0] = np.nan data2[data2 == 0] = np.nan data = data1 - data2 del data1, data2 data[np.isnan(data)] = 0. # write to file dsDict = {} dsDict[dsName] = data writefile.write(dsDict, out_file=outFile, ref_file=file1) # Sing dataset file else: data1 = readfile.read(file1)[0] data = np.array(data1, data1.dtype) for fname in file2: data2 = readfile.read(fname)[0] data = np.array(data, dtype=np.float32) - np.array( data2, dtype=np.float32) data = np.array(data, data1.dtype) print('writing >>> ' + outFile) writefile.write(data, out_file=outFile, metadata=atr1) return outFile
def ifgram_inversion(inps=None): """Phase triangulatino of small baseline interferograms Parameters: inps - namespace Example: inps = cmd_line_parse() ifgram_inversion(inps) """ if not inps: inps = cmd_line_parse() start_time = time.time() ## 1. input info stack_obj = ifgramStack(inps.ifgramStackFile) stack_obj.open(print_msg=False) date12_list = stack_obj.get_date12_list(dropIfgram=True) date_list = stack_obj.get_date_list(dropIfgram=True) length, width = stack_obj.length, stack_obj.width # 1.1 read values on the reference pixel inps.refPhase = stack_obj.get_reference_phase( unwDatasetName=inps.obsDatasetName, skip_reference=inps.skip_ref, dropIfgram=True) # 1.2 design matrix A = stack_obj.get_design_matrix4timeseries(date12_list)[0] num_ifgram, num_date = A.shape[0], A.shape[1] + 1 inps.numIfgram = num_ifgram # 1.3 print key setup info msg = '-------------------------------------------------------------------------------\n' if inps.minNormVelocity: suffix = 'deformation velocity' else: suffix = 'deformation phase' msg += 'least-squares solution with L2 min-norm on: {}\n'.format(suffix) msg += 'minimum redundancy: {}\n'.format(inps.minRedundancy) msg += 'weight function: {}\n'.format(inps.weightFunc) if inps.maskDataset: if inps.maskDataset in ['coherence', 'offsetSNR']: suffix = '{} < {}'.format(inps.maskDataset, inps.maskThreshold) else: suffix = '{} == 0'.format(inps.maskDataset) msg += 'mask out pixels with: {}\n'.format(suffix) else: msg += 'mask: no\n' if np.linalg.matrix_rank(A) < A.shape[1]: msg += '***WARNING: the network is NOT fully connected.\n' msg += '\tInversion result can be biased!\n' msg += '\tContinue to use SVD to resolve the offset between different subsets.\n' msg += '-------------------------------------------------------------------------------' print(msg) print('number of interferograms: {}'.format(num_ifgram)) print('number of acquisitions : {}'.format(num_date)) print('number of lines : {}'.format(length)) print('number of columns : {}'.format(width)) ## 2. prepare output # 2.1 metadata meta = dict(stack_obj.metadata) for key in configKeys: meta[key_prefix + key] = str(vars(inps)[key]) # 2.2 instantiate time-series dsNameDict = { "date": (np.dtype('S8'), (num_date, )), "bperp": (np.float32, (num_date, )), "timeseries": (np.float32, (num_date, length, width)), } meta['FILE_TYPE'] = 'timeseries' meta['UNIT'] = 'm' meta['REF_DATE'] = date_list[0] ts_obj = timeseries(inps.tsFile) ts_obj.layout_hdf5(dsNameDict, meta) # write date time-series date_list_utf8 = [dt.encode('utf-8') for dt in date_list] writefile.write_hdf5_block(inps.tsFile, date_list_utf8, datasetName='date') # write bperp time-series pbase = stack_obj.get_perp_baseline_timeseries(dropIfgram=True) writefile.write_hdf5_block(inps.tsFile, pbase, datasetName='bperp') # 2.3 instantiate temporal coherence dsNameDict = {"temporalCoherence": (np.float32, (length, width))} meta['FILE_TYPE'] = 'temporalCoherence' meta['UNIT'] = '1' meta.pop('REF_DATE') writefile.layout_hdf5(inps.tempCohFile, dsNameDict, metadata=meta) # 2.4 instantiate number of inverted observations dsNameDict = {"mask": (np.float32, (length, width))} meta['FILE_TYPE'] = 'mask' meta['UNIT'] = '1' writefile.layout_hdf5(inps.numInvFile, dsNameDict, metadata=meta) ## 3. run the inversion / estimation and write to disk # 3.1 split ifgram_file into blocks to save memory box_list, num_box = split2boxes(inps.ifgramStackFile, memory_size=inps.memorySize) # 3.2 prepare the input arguments for *_patch() data_kwargs = { "ifgram_file": inps.ifgramStackFile, "ref_phase": inps.refPhase, "obs_ds_name": inps.obsDatasetName, "weight_func": inps.weightFunc, "min_norm_velocity": inps.minNormVelocity, "water_mask_file": inps.waterMaskFile, "mask_ds_name": inps.maskDataset, "mask_threshold": inps.maskThreshold, "min_redundancy": inps.minRedundancy } # 3.3 invert / write block-by-block for i, box in enumerate(box_list): box_width = box[2] - box[0] box_length = box[3] - box[1] if num_box > 1: print('\n------- processing patch {} out of {} --------------'. format(i + 1, num_box)) print('box width: {}'.format(box_width)) print('box length: {}'.format(box_length)) # update box argument in the input data data_kwargs['box'] = box if inps.cluster == 'no': # non-parallel ts, temp_coh, num_inv_ifg = ifgram_inversion_patch( **data_kwargs)[:-1] else: # parallel print('\n\n------- start parallel processing using Dask -------') # initiate the output data ts = np.zeros((num_date, box_length, box_width), np.float32) temp_coh = np.zeros((box_length, box_width), np.float32) num_inv_ifg = np.zeros((box_length, box_width), np.float32) # initiate dask cluster and client cluster_obj = cluster.DaskCluster(inps.cluster, inps.numWorker, config_name=inps.config) cluster_obj.open() # run dask ts, temp_coh, num_inv_ifg = cluster_obj.run( func=ifgram_inversion_patch, func_data=data_kwargs, results=[ts, temp_coh, num_inv_ifg]) # close dask cluster and client cluster_obj.close() print('------- finished parallel processing -------\n\n') # write the block to disk # with 3D block in [z0, z1, y0, y1, x0, x1] # and 2D block in [y0, y1, x0, x1] # time-series - 3D block = [0, num_date, box[1], box[3], box[0], box[2]] writefile.write_hdf5_block(inps.tsFile, data=ts, datasetName='timeseries', block=block) # temporal coherence - 2D block = [box[1], box[3], box[0], box[2]] writefile.write_hdf5_block(inps.tempCohFile, data=temp_coh, datasetName='temporalCoherence', block=block) # number of inverted obs - 2D writefile.write_hdf5_block(inps.numInvFile, data=num_inv_ifg, datasetName='mask', block=block) m, s = divmod(time.time() - start_time, 60) print('time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) # 3.4 update output data on the reference pixel if not inps.skip_ref: # grab ref_y/x ref_y = int(stack_obj.metadata['REF_Y']) ref_x = int(stack_obj.metadata['REF_X']) print('-' * 50) print('update values on the reference pixel: ({}, {})'.format( ref_y, ref_x)) print('set temporal coherence on the reference pixel to 1.') with h5py.File(inps.tempCohFile, 'r+') as f: f['temporalCoherence'][ref_y, ref_x] = 1. print('set # of observations on the reference pixel as {}'.format( num_ifgram)) with h5py.File(inps.numInvFile, 'r+') as f: f['mask'][ref_y, ref_x] = num_ifgram m, s = divmod(time.time() - start_time, 60) print('time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) return
def run_unwrap_error_bridge(ifgram_file, water_mask_file, ramp_type=None, radius=50, ccName='connectComponent', dsNameIn='unwrapPhase', dsNameOut='unwrapPhase_bridging'): """Run unwrapping error correction with bridging Parameters: ifgram_file : str, path of ifgram stack file water_mask_file : str, path of water mask file ramp_type : str, name of phase ramp to be removed during the phase jump estimation ccName : str, dataset name of connected components dsNameIn : str, dataset name of unwrap phase to be corrected dsNameOut : str, dataset name of unwrap phase to be saved after correction Returns: ifgram_file : str, path of ifgram stack file """ print('-'*50) print('correct unwrapping error in {} with bridging ...'.format(ifgram_file)) if ramp_type is not None: print('estimate and remove a {} ramp while calculating phase offset'.format(ramp_type)) # read water mask if water_mask_file and os.path.isfile(water_mask_file): print('read water mask from file:', water_mask_file) water_mask = readfile.read(water_mask_file)[0] else: water_mask = None # file info atr = readfile.read_attribute(ifgram_file) length, width = int(atr['LENGTH']), int(atr['WIDTH']) k = atr['FILE_TYPE'] # correct unwrap error ifgram by ifgram if k == 'ifgramStack': date12_list = ifgramStack(ifgram_file).get_date12_list(dropIfgram=False) num_ifgram = len(date12_list) shape_out = (num_ifgram, length, width) # prepare output data writing print('open {} with r+ mode'.format(ifgram_file)) f = h5py.File(ifgram_file, 'r+') print('input dataset:', dsNameIn) print('output dataset:', dsNameOut) if dsNameOut in f.keys(): ds = f[dsNameOut] print('access /{d} of np.float32 in size of {s}'.format(d=dsNameOut, s=shape_out)) else: ds = f.create_dataset(dsNameOut, shape_out, maxshape=(None, None, None), chunks=True, compression=None) print('create /{d} of np.float32 in size of {s}'.format(d=dsNameOut, s=shape_out)) # correct unwrap error ifgram by ifgram prog_bar = ptime.progressBar(maxValue=num_ifgram) for i in range(num_ifgram): # read unwrapPhase and connectComponent date12 = date12_list[i] unw = np.squeeze(f[dsNameIn][i, :, :]) cc = np.squeeze(f[ccName][i, :, :]) if water_mask is not None: cc[water_mask == 0] = 0 # bridging cc_obj = connectComponent(conncomp=cc, metadata=atr) cc_obj.label() cc_obj.find_mst_bridge() unw_cor = cc_obj.unwrap_conn_comp(unw, ramp_type=ramp_type) # write to hdf5 file ds[i, :, :] = unw_cor prog_bar.update(i+1, suffix=date12) prog_bar.close() ds.attrs['MODIFICATION_TIME'] = str(time.time()) f.close() print('close {} file.'.format(ifgram_file)) if k == '.unw': # read unwrap phase unw = readfile.read(ifgram_file)[0] # read connected components cc_files0 = [ifgram_file+'.conncomp', os.path.splitext(ifgram_file)[0]+'_snap_connect.byt'] cc_files = [i for i in cc_files0 if os.path.isfile(i)] if len(cc_files) == 0: raise FileNotFoundError(cc_files0) cc = readfile.read(cc_files[0])[0] if water_mask is not None: cc[water_mask == 0] = 0 # bridging cc_obj = connectComponent(conncomp=cc, metadata=atr) cc_obj.label() cc_obj.find_mst_bridge() unw_cor = cc_obj.unwrap_conn_comp(unw, ramp_type=ramp_type) # write to hdf5 file out_file = '{}_unwCor{}'.format(os.path.splitext(ifgram_file)[0], os.path.splitext(ifgram_file)[1]) print('writing >>> {}'.format(out_file)) writefile.write(unw_cor, out_file=out_file, ref_file=ifgram_file) return ifgram_file
def cmd_line_parse(iargs=None): parser = create_parser() inps = parser.parse_args(args=iargs) # check input file type atr = readfile.read_attribute(inps.ifgramStackFile) if atr['FILE_TYPE'] not in ['ifgramStack']: raise ValueError( 'input is {} file, support ifgramStack file only.'.format( atr['FILE_TYPE'])) if inps.templateFile: inps, template = read_template2inps(inps.templateFile, inps) else: template = dict() # --cluster and --num-worker option inps.numWorker = str( cluster.DaskCluster.format_num_worker(inps.cluster, inps.numWorker)) if inps.cluster != 'no' and inps.numWorker == '1': print( 'WARNING: number of workers is 1, turn OFF parallel processing and continue' ) inps.cluster = 'no' # --water-mask option if inps.waterMaskFile and not os.path.isfile(inps.waterMaskFile): inps.waterMaskFile = None # --dset option if not inps.obsDatasetName: inps.obsDatasetName = 'unwrapPhase' # determine suffix based on unwrapping error correction method obs_suffix_map = { 'bridging': '_bridging', 'phase_closure': '_phaseClosure', 'bridging+phase_closure': '_bridging_phaseClosure' } key = 'mintpy.unwrapError.method' if key in template.keys() and template[key]: unw_err_method = template[key].lower().replace( ' ', '') # fix potential typo inps.obsDatasetName += obs_suffix_map[unw_err_method] print('phase unwrapping error correction "{}" is turned ON'.format( unw_err_method)) print('use dataset "{}" by default'.format(inps.obsDatasetName)) # check if input observation dataset exists. stack_obj = ifgramStack(inps.ifgramStackFile) stack_obj.open(print_msg=False) if inps.obsDatasetName not in stack_obj.datasetNames: msg = 'input dataset name "{}" not found in file: {}'.format( inps.obsDatasetName, inps.ifgramStackFile) raise ValueError(msg) # --skip-ref option if 'offset' in inps.obsDatasetName.lower(): inps.skip_ref = True # --output option if not inps.outfile: if inps.obsDatasetName.startswith('unwrapPhase'): inps.outfile = [ 'timeseries.h5', 'temporalCoherence.h5', 'numInvIfgram.h5' ] elif inps.obsDatasetName.startswith('azimuthOffset'): inps.outfile = [ 'timeseriesAz.h5', 'temporalCoherenceAz.h5', 'numInvOffset.h5' ] elif inps.obsDatasetName.startswith('rangeOffset'): inps.outfile = [ 'timeseriesRg.h5', 'temporalCoherenceRg.h5', 'numInvOffset.h5' ] else: raise ValueError( 'un-recognized input observation dataset name: {}'.format( inps.obsDatasetName)) inps.tsFile, inps.tempCohFile, inps.numInvFile = inps.outfile return inps
def diff_file(file1, file2, outFile=None, force=False): """Subtraction/difference of two input files""" if not outFile: fbase, fext = os.path.splitext(file1) if len(file2) > 1: raise ValueError('Output file name is needed for more than 2 files input.') outFile = '{}_diff_{}{}'.format(fbase, os.path.splitext(os.path.basename(file2[0]))[0], fext) print('{} - {} --> {}'.format(file1, file2, outFile)) # Read basic info atr1 = readfile.read_attribute(file1) k1 = atr1['FILE_TYPE'] atr2 = readfile.read_attribute(file2[0]) k2 = atr2['FILE_TYPE'] print('input files are: {} and {}'.format(k1, k2)) if k1 == 'timeseries': if k2 not in ['timeseries', 'giantTimeseries']: raise Exception('Input multiple dataset files are not the same file type!') if len(file2) > 1: raise Exception(('Only 2 files substraction is supported for time-series file,' ' {} input.'.format(len(file2)+1))) obj1 = timeseries(file1) obj1.open() if k2 == 'timeseries': obj2 = timeseries(file2[0]) unit_fac = 1. elif k2 == 'giantTimeseries': obj2 = giantTimeseries(file2[0]) unit_fac = 0.001 obj2.open() ref_date, ref_y, ref_x = _check_reference(obj1.metadata, obj2.metadata) # check dates shared by two timeseries files dateListShared = [i for i in obj1.dateList if i in obj2.dateList] dateShared = np.ones((obj1.numDate), dtype=np.bool_) if dateListShared != obj1.dateList: print('WARNING: {} does not contain all dates in {}'.format(file2, file1)) if force: dateExcluded = list(set(obj1.dateList) - set(dateListShared)) print('Continue and enforce the differencing for their shared dates only.') print('\twith following dates are ignored for differencing:\n{}'.format(dateExcluded)) dateShared[np.array([obj1.dateList.index(i) for i in dateExcluded])] = 0 else: raise Exception('To enforce the differencing anyway, use --force option.') # consider different reference_date/pixel data2 = readfile.read(file2[0], datasetName=dateListShared)[0] * unit_fac if ref_date: data2 -= np.tile(data2[obj2.dateList.index(ref_date), :, :], (data2.shape[0], 1, 1)) if ref_y and ref_x: data2 -= np.tile(data2[:, ref_y, ref_x].reshape(-1, 1, 1), (1, data2.shape[1], data2.shape[2])) data = obj1.read() mask = data == 0. data[dateShared] -= data2 data[mask] = 0. # Do not change zero phase value del data2 writefile.write(data, out_file=outFile, ref_file=file1) elif all(i == 'ifgramStack' for i in [k1, k2]): obj1 = ifgramStack(file1) obj1.open() obj2 = ifgramStack(file2[0]) obj2.open() dsNames = list(set(obj1.datasetNames) & set(obj2.datasetNames)) if len(dsNames) == 0: raise ValueError('no common dataset between two files!') dsName = [i for i in ifgramDatasetNames if i in dsNames][0] # read data print('reading {} from file {} ...'.format(dsName, file1)) data1 = readfile.read(file1, datasetName=dsName)[0] print('reading {} from file {} ...'.format(dsName, file2[0])) data2 = readfile.read(file2[0], datasetName=dsName)[0] # consider reference pixel if 'unwrapphase' in dsName.lower(): print('referencing to pixel ({},{}) ...'.format(obj1.refY, obj1.refX)) ref1 = data1[:, obj1.refY, obj1.refX] ref2 = data2[:, obj2.refY, obj2.refX] for i in range(data1.shape[0]): data1[i,:][data1[i, :] != 0.] -= ref1[i] data2[i,:][data2[i, :] != 0.] -= ref2[i] # operation and ignore zero values data1[data1 == 0] = np.nan data2[data2 == 0] = np.nan data = data1 - data2 del data1, data2 data[np.isnan(data)] = 0. # write to file dsDict = {} dsDict[dsName] = data writefile.write(dsDict, out_file=outFile, ref_file=file1) # Sing dataset file else: data1 = readfile.read(file1)[0] data = np.array(data1, data1.dtype) for fname in file2: data2 = readfile.read(fname)[0] data = np.array(data, dtype=np.float32) - np.array(data2, dtype=np.float32) data = np.array(data, data1.dtype) print('writing >>> '+outFile) writefile.write(data, out_file=outFile, metadata=atr1) return outFile
def run_deramp(fname, ramp_type, mask_file=None, out_file=None, datasetName=None): """ Remove ramp from each 2D matrix of input file Parameters: fname : str, data file to be derampped ramp_type : str, name of ramp to be estimated. mask_file : str, file of mask of pixels used for ramp estimation out_file : str, output file name datasetName : str, output dataset name, for ifgramStack file type only Returns: out_file : str, output file name """ print('remove {} ramp from file: {}'.format(ramp_type, fname)) if not out_file: fbase, fext = os.path.splitext(fname) out_file = '{}_ramp{}'.format(fbase, fext) start_time = time.time() atr = readfile.read_attribute(fname) # mask if os.path.isfile(mask_file): mask = readfile.read(mask_file, datasetName='mask')[0] print('read mask file: '+mask_file) else: mask = np.ones((int(atr['LENGTH']), int(atr['WIDTH']))) print('use mask of the whole area') # deramping k = atr['FILE_TYPE'] if k == 'timeseries': print('reading data ...') data = readfile.read(fname)[0] print('estimating phase ramp ...') data = deramp(data, mask, ramp_type=ramp_type, metadata=atr)[0] writefile.write(data, out_file, ref_file=fname) elif k == 'ifgramStack': obj = ifgramStack(fname) obj.open(print_msg=False) if not datasetName: datasetName = 'unwrapPhase' with h5py.File(fname, 'a') as f: ds = f[datasetName] dsNameOut = '{}_ramp'.format(datasetName) if dsNameOut in f.keys(): dsOut = f[dsNameOut] print('access HDF5 dataset /{}'.format(dsNameOut)) else: dsOut = f.create_dataset(dsNameOut, shape=(obj.numIfgram, obj.length, obj.width), dtype=np.float32, chunks=True, compression=None) print('create HDF5 dataset /{}'.format(dsNameOut)) prog_bar = ptime.progressBar(maxValue=obj.numIfgram) for i in range(obj.numIfgram): data = ds[i, :, :] data = deramp(data, mask, ramp_type=ramp_type, metadata=atr)[0] dsOut[i, :, :] = data prog_bar.update(i+1, suffix='{}/{}'.format(i+1, obj.numIfgram)) prog_bar.close() print('finished writing to file: {}'.format(fname)) # Single Dataset File else: data = readfile.read(fname)[0] data = deramp(data, mask, ramp_type, metadata=atr)[0] print('writing >>> {}'.format(out_file)) writefile.write(data, out_file=out_file, ref_file=fname) m, s = divmod(time.time()-start_time, 60) print('time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) return out_file
def run_unwrap_error_phase_closure(ifgram_file, common_regions, water_mask_file=None, ccName='connectComponent', dsNameIn='unwrapPhase', dsNameOut='unwrapPhase_phaseClosure'): print('-' * 50) print('correct unwrapping error in {} with phase closure ...'.format( ifgram_file)) stack_obj = ifgramStack(ifgram_file) stack_obj.open() length, width = stack_obj.length, stack_obj.width ref_y, ref_x = stack_obj.refY, stack_obj.refX date12_list = stack_obj.get_date12_list(dropIfgram=False) num_ifgram = len(date12_list) shape_out = (num_ifgram, length, width) # read water mask if water_mask_file and os.path.isfile(water_mask_file): print('read water mask from file:', water_mask_file) water_mask = readfile.read(water_mask_file)[0] else: water_mask = None # prepare output data writing print('open {} with r+ mode'.format(ifgram_file)) f = h5py.File(ifgram_file, 'r+') print('input dataset:', dsNameIn) print('output dataset:', dsNameOut) if dsNameOut in f.keys(): ds = f[dsNameOut] print('access /{d} of np.float32 in size of {s}'.format(d=dsNameOut, s=shape_out)) else: ds = f.create_dataset(dsNameOut, shape_out, maxshape=(None, None, None), chunks=True, compression=None) print('create /{d} of np.float32 in size of {s}'.format(d=dsNameOut, s=shape_out)) # correct unwrap error ifgram by ifgram prog_bar = ptime.progressBar(maxValue=num_ifgram) for i in range(num_ifgram): date12 = date12_list[i] # read unwrap phase to be updated unw_cor = np.squeeze(f[dsNameIn][i, :, :]).astype(np.float32) unw_cor -= unw_cor[ref_y, ref_x] # update kept interferograms only if stack_obj.dropIfgram[i]: # get local region info from connectComponent cc = np.squeeze(f[ccName][i, :, :]) if water_mask is not None: cc[water_mask == 0] = 0 cc_obj = connectComponent(conncomp=cc, metadata=stack_obj.metadata) cc_obj.label() local_regions = measure.regionprops(cc_obj.labelImg) # matching regions and correct unwrap error idx_common = common_regions[0].date12_list.index(date12) for local_reg in local_regions: local_mask = cc_obj.labelImg == local_reg.label U = 0 for common_reg in common_regions: y = common_reg.sample_coords[:, 0] x = common_reg.sample_coords[:, 1] if all(local_mask[y, x]): U = common_reg.int_ambiguity[idx_common] break unw_cor[local_mask] += 2. * np.pi * U # write to hdf5 file ds[i, :, :] = unw_cor prog_bar.update(i + 1, suffix=date12) prog_bar.close() ds.attrs['MODIFICATION_TIME'] = str(time.time()) f.close() print('close {} file.'.format(ifgram_file)) return ifgram_file
def temporal_average(File, datasetName='coherence', updateMode=False, outFile=None): """Calculate temporal average of multi-temporal dataset, equivalent to stacking For ifgramStakc/unwrapPhase, return average phase velocity Parameters: File : string, file to be averaged in time datasetName : string, dataset to be read from input file, for multiple datasets file - ifgramStack - only e.g.: coherence, unwrapPhase updateMode : bool outFile : string, output filename None for auto output filename False for do not save as output file Returns: dataMean : 2D array outFile : string, output file name Examples: avgPhaseVel = ut.temporal_average('ifgramStack.h5', datasetName='unwrapPhase')[0] ut.temporal_average('ifgramStack.h5', datasetName='coherence', outFile='avgSpatialCoh.h5', updateMode=True) """ atr = readfile.read_attribute(File, datasetName=datasetName) k = atr['FILE_TYPE'] if k not in ['ifgramStack', 'timeseries']: print('WARNING: input file is not multi-temporal file: {}, return itself.'.format(File)) data = readfile.read(File)[0] return data, File # Default output filename if outFile is None: ext = os.path.splitext(File)[1] if not outFile: if k == 'ifgramStack': if datasetName == 'coherence': outFile = 'avgSpatialCoh.h5' elif 'unwrapPhase' in datasetName: outFile = 'avgPhaseVelocity.h5' else: outFile = 'avg{}.h5'.format(datasetName) elif k == 'timeseries': if k in File: processMark = os.path.basename(File).split('timeseries')[1].split(ext)[0] outFile = 'avgDisplacement{}.h5'.format(processMark) else: outFile = 'avg{}.h5'.format(File) if updateMode and os.path.isfile(outFile): dataMean = readfile.read(outFile)[0] return dataMean, outFile # Calculate temporal average if k == 'ifgramStack': dataMean = ifgramStack(File).temporal_average(datasetName=datasetName) if 'unwrapPhase' in datasetName: atr['FILE_TYPE'] = 'velocity' atr['UNIT'] = 'm/year' else: atr['FILE_TYPE'] = datasetName elif k == 'timeseries': dataMean = timeseries(File).temporal_average() atr['FILE_TYPE'] = 'displacement' if outFile: writefile.write(dataMean, out_file=outFile, metadata=atr) return dataMean, outFile
def detect_unwrap_error(ifgram_file, mask_file, mask_cc_file='maskConnComp.h5', unwDatasetName='unwrapPhase', cutoff=1., min_num_pixel=1e4): """Detect unwrapping error based on phase closure and extract coherent conn comps based on its histogram distribution Check: https://en.wikipedia.org/wiki/Otsu%27s_method from skimage.filters import threshold_otsu Parameters: ifgram_file : string, path of ifgram stack file mask_file : string, path of mask file, e.g. waterMask.h5, maskConnComp.h5 mask_cc_file: string, path of mask file for coherent conn comps cutoff : float, cutoff value for the mean number of nonzero phase closure to be selected as coherent conn comps candidate min_num_pixel : float, min number of pixels left after morphology operation to be determined as coherent conn comps Returns: mask_cc_file : string, path of mask file for coherent conn comps """ print('-'*50) print('detect unwraping error based on phase closure') obj = ifgramStack(ifgram_file) obj.open(print_msg=False) C = obj.get_design_matrix4triplet(obj.get_date12_list(dropIfgram=False)) num_nonzero_closure = get_nonzero_phase_closure(ifgram_file, unwDatasetName=unwDatasetName) # get histogram of num_nonzero_phase_closure mask = readfile.read(mask_file)[0] mask *= num_nonzero_closure != 0. fig, ax = plt.subplots(nrows=1, ncols=2, figsize=[12, 4]) num4disp = np.array(num_nonzero_closure, dtype=np.float32) num4disp[mask == 0] = np.nan im = ax[0].imshow(num4disp) ax[0].set_xlabel('Range [pix.]') ax[0].set_ylabel('Azimuth [pix.]') ax[0] = pp.auto_flip_direction(obj.metadata, ax=ax[0], print_msg=False) cbar = fig.colorbar(im, ax=ax[0]) cbar.set_label('number of non-zero phase closure') print('2. extract coherent conn comps with unwrap error based on histogram distribution') max_nonzero_closure = int(np.max(num_nonzero_closure[mask])) bin_value, bin_edge = ax[1].hist(num_nonzero_closure[mask].flatten(), range=(0, max_nonzero_closure), log=True, bins=max_nonzero_closure)[0:2] ax[1].set_xlabel('number of non-zero phase closure') ax[1].set_ylabel('number of pixels') if 'Closure' not in unwDatasetName: print('eliminate pixels with number of nonzero phase closure < 5% of total phase closure number') print('\twhich can be corrected using phase closure alone.') bin_value[:int(C.shape[0]*0.05)] = 0. bin_value_thres = ut.median_abs_deviation_threshold(bin_value, cutoff=cutoff) print('median abs deviation cutoff value: {}'.format(cutoff)) plt.plot([0, max_nonzero_closure], [bin_value_thres, bin_value_thres]) out_img = 'numUnwErr_stat.png' fig.savefig(out_img, bbox_inches='tight', transparent=True, dpi=300) print('save unwrap error detection result to {}'.format(out_img)) # histogram --> candidates of coherence conn comps --> mask_cc # find pixel clusters sharing similar number of non-zero phase closure print('searching connected components with more than {} pixels'.format(min_num_pixel)) bin_label, n_bins = ndimage.label(bin_value > bin_value_thres) mask_cc = np.zeros(num_nonzero_closure.shape, dtype=np.int16) # first conn comp - reference conn comp with zero non-zero phase closure num_cc = 1 mask_cc1 = num_nonzero_closure == 0. mask_cc1s = ut.get_all_conn_components(mask_cc1, min_num_pixel=min_num_pixel) for mask_cc1 in mask_cc1s: mask_cc += mask_cc1 # other conn comps - target conn comps to be corrected for unwrap error for i in range(n_bins): idx = np.where(bin_label == i+1)[0] mask_cci0 = np.multiply(num_nonzero_closure >= bin_edge[idx[0]], num_nonzero_closure < bin_edge[idx[-1]+1]) mask_ccis = ut.get_all_conn_components(mask_cci0, min_num_pixel=min_num_pixel) if mask_ccis: for mask_cci in mask_ccis: num_cc += 1 mask_cc += mask_cci * num_cc fig, ax = plt.subplots(nrows=1, ncols=2, figsize=[8, 4]) im = ax[0].imshow(mask_cci0) im = ax[1].imshow(mask_cci) fig.savefig('mask_cc{}.png'.format(num_cc), bbox_inches='tight', transparent=True, dpi=300) # save to hdf5 file num_bridge = num_cc - 1 atr = dict(obj.metadata) atr['FILE_TYPE'] = 'mask' atr['UNIT'] = 1 writefile.write(mask_cc, out_file=mask_cc_file, metadata=atr) # plot and save figure to img file out_img = '{}.png'.format(os.path.splitext(mask_cc_file)[0]) fig, ax = plt.subplots(figsize=[6, 8]) im = ax.imshow(mask_cc) ax = pp.auto_flip_direction(atr, ax=ax, print_msg=False) divider = make_axes_locatable(ax) cax = divider.append_axes("right", "3%", pad="3%") cbar = plt.colorbar(im, cax=cax, ticks=np.arange(num_bridge+2)) fig.savefig(out_img, bbox_inches='tight', transparent=True, dpi=300) print('save to {}'.format(out_img)) return mask_cc_file
def get_date12_to_drop(inps): """Get date12 list to dropped Return [] if no ifgram to drop, thus KEEP ALL ifgrams; None if nothing to change, exit without doing anything. """ obj = ifgramStack(inps.file) obj.open() date12ListAll = obj.date12List dateList = obj.dateList print('number of interferograms: {}'.format(len(date12ListAll))) # Get date12_to_drop date12_to_drop = [] # reference file if inps.referenceFile: date12_to_keep = ifgramStack(inps.referenceFile).get_date12_list(dropIfgram=True) print('--------------------------------------------------') print('use reference pairs info from file: {}'.format(inps.referenceFile)) print('number of interferograms in reference: {}'.format(len(date12_to_keep))) tempList = sorted(list(set(date12ListAll) - set(date12_to_keep))) date12_to_drop += tempList print('date12 not in reference file: ({})\n{}'.format(len(tempList), tempList)) # coherence file if inps.coherenceBased: print('--------------------------------------------------') print('use coherence-based network modification') coord = ut.coordinate(obj.metadata, lookup_file=inps.lookupFile) if inps.aoi_geo_box and inps.lookupFile: print('input AOI in (lon0, lat1, lon1, lat0): {}'.format(inps.aoi_geo_box)) inps.aoi_pix_box = coord.bbox_geo2radar(inps.aoi_geo_box) if inps.aoi_pix_box: inps.aoi_pix_box = coord.check_box_within_data_coverage(inps.aoi_pix_box) print('input AOI in (x0,y0,x1,y1): {}'.format(inps.aoi_pix_box)) # Calculate spatial average coherence cohList = ut.spatial_average(inps.file, datasetName='coherence', maskFile=inps.maskFile, box=inps.aoi_pix_box, saveList=True)[0] coh_date12_list = list(np.array(date12ListAll)[np.array(cohList) >= inps.minCoherence]) # MST network if inps.keepMinSpanTree: print('Get minimum spanning tree (MST) of interferograms with inverse of coherence.') msg = ('Drop ifgrams with ' '1) average coherence < {} AND ' '2) not in MST network: '.format(inps.minCoherence)) mst_date12_list = pnet.threshold_coherence_based_mst(date12ListAll, cohList) mst_date12_list = ptime.yyyymmdd_date12(mst_date12_list) else: msg = 'Drop ifgrams with average coherence < {}: '.format(inps.minCoherence) mst_date12_list = [] tempList = sorted(list(set(date12ListAll) - set(coh_date12_list + mst_date12_list))) date12_to_drop += tempList msg += '({})'.format(len(tempList)) if len(tempList) <= 200: msg += '\n{}'.format(tempList) print(msg) # temp baseline threshold if inps.tempBaseMax: tempIndex = np.abs(obj.tbaseIfgram) > inps.tempBaseMax tempList = list(np.array(date12ListAll)[tempIndex]) date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with temporal baseline > {} days: ({})\n{}'.format( inps.tempBaseMax, len(tempList), tempList)) # perp baseline threshold if inps.perpBaseMax: tempIndex = np.abs(obj.pbaseIfgram) > inps.perpBaseMax tempList = list(np.array(date12ListAll)[tempIndex]) date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with perp baseline > {} meters: ({})\n{}'.format( inps.perpBaseMax, len(tempList), tempList)) # connection number threshold if inps.connNumMax: seq_date12_list = pnet.select_pairs_sequential(dateList, inps.connNumMax) seq_date12_list = ptime.yyyymmdd_date12(seq_date12_list) tempList = [i for i in date12ListAll if i not in seq_date12_list] date12_to_drop += tempList print('--------------------------------------------------') msg = 'Drop ifgrams with temporal baseline beyond {} neighbors: ({})'.format( inps.connNumMax, len(tempList)) if len(tempList) <= 200: msg += '\n{}'.format(tempList) print(msg) # excludeIfgIndex if inps.excludeIfgIndex: tempList = [date12ListAll[i] for i in inps.excludeIfgIndex] date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with the following index number: {}'.format(len(tempList))) for i in range(len(tempList)): print('{} : {}'.format(i, tempList[i])) #len(tempList), zip(inps.excludeIfgIndex, tempList))) # excludeDate if inps.excludeDate: tempList = [i for i in date12ListAll if any(j in inps.excludeDate for j in i.split('_'))] date12_to_drop += tempList print('-'*50+'\nDrop ifgrams including the following dates: ({})\n{}'.format( len(tempList), inps.excludeDate)) print('-'*30+'\n{}'.format(tempList)) # startDate if inps.startDate: minDate = int(inps.startDate) tempList = [i for i in date12ListAll if any(int(j) < minDate for j in i.split('_'))] date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with date earlier than: {} ({})\n{}'.format( inps.startDate, len(tempList), tempList)) # endDate if inps.endDate: maxDate = int(inps.endDate) tempList = [i for i in date12ListAll if any(int(j) > maxDate for j in i.split('_'))] date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with date later than: {} ({})\n{}'.format( inps.endDate, len(tempList), tempList)) # Manually drop pairs if inps.manual: tempList = manual_select_pairs_to_remove(inps.file) if tempList is None: return None tempList = [i for i in tempList if i in date12ListAll] print('date12 selected to remove: ({})\n{}'.format(len(tempList), tempList)) date12_to_drop += tempList # drop duplicate date12 and sort in order date12_to_drop = sorted(list(set(date12_to_drop))) date12_to_keep = sorted(list(set(date12ListAll) - set(date12_to_drop))) print('--------------------------------------------------') print('number of interferograms to remove: {}'.format(len(date12_to_drop))) print('number of interferograms to keep : {}'.format(len(date12_to_keep))) date_to_keep = [d for date12 in date12_to_keep for d in date12.split('_')] date_to_keep = sorted(list(set(date_to_keep))) date_to_drop = sorted(list(set(dateList) - set(date_to_keep))) if len(date_to_drop) > 0: print('number of acquisitions to remove: {}\n{}'.format(len(date_to_drop), date_to_drop)) date12ListKept = obj.get_date12_list(dropIfgram=True) date12ListDropped = sorted(list(set(date12ListAll) - set(date12ListKept))) if date12_to_drop == date12ListDropped: print('Calculated date12 to drop is the same as exsiting marked input file, skip updating file.') date12_to_drop = None elif date12_to_drop == date12ListAll: raise Exception('Zero interferogram left! Please adjust your setting and try again.') return date12_to_drop