def main(argv): inps = cmdLineParse() print '\n*************** Stacking ******************' for File in inps.file: ut.get_file_stack(File, inps.mask_file) return
def seed_file_inps(File, inps=None, outFile=None): '''Seed input file with option from input namespace Return output file name if succeed; otherwise, return None ''' # Optional inputs if not outFile: outFile = 'Seeded_' + os.path.basename(File) if not inps: inps = cmdLineParse(['']) print '----------------------------------------------------' print 'seeding file: ' + File # Get stack and mask stack = ut.get_file_stack(File, inps.mask_file) mask = ~np.isnan(stack) if np.nansum(mask) == 0.0: print '\n*****************************************************' print 'ERROR:' print 'There is no pixel that has valid phase value in all datasets.' print 'Check the file!' print 'Seeding failed' sys.exit(1) atr = readfile.read_attribute(File) # 1. Reference using global average if inps.method == 'global-average': print '\n---------------------------------------------------------' print 'Automatically Seeding using Global Spatial Average Value ' print '---------------------------------------------------------' print 'Calculating the global spatial average value for each epoch'+\ ' of all valid pixels ...' width = int(atr['WIDTH']) length = int(atr['FILE_LENGTH']) box = (0, 0, width, length) meanList = ut.spatial_average(File, mask, box)[0] inps.ref_y = '' inps.ref_x = '' outFile = seed_file_reference_value(File, outFile, meanList, inps.ref_y, inps.ref_x) return outFile # 2. Reference using specific pixel # 2.1 Find reference y/x if not inps.ref_y or not inps.ref_x: if inps.coherence_file: inps.method = 'max-coherence' inps.ref_y, inps.ref_x = select_max_coherence_yx( inps.coherence_file, mask, inps.min_coherence) elif inps.method == 'random': inps.ref_y, inps.ref_x = random_select_reference_yx(mask) elif inps.method == 'manual': inps = manual_select_reference_yx(stack, inps) # 2.2 Seeding file with reference y/x if inps.ref_y and inps.ref_x and mask[inps.ref_y, inps.ref_x]: if inps.mark_attribute: re_select = True try: ref_x_orig == int(atr['ref_x']) ref_y_orig == int(atr['ref_y']) if inps.ref_x == ref_x_orig and inps.ref_y == ref_y_orig: re_select = False print 'Same reference pixel is already selected/saved in file, skip updating file attributes' except: pass if re_select: print 'Add/update ref_x/y attribute to file: ' + File atr_ref = dict() atr_ref['ref_x'] = str(inps.ref_x) atr_ref['ref_y'] = str(inps.ref_y) print atr_ref outFile = ut.add_attribute(File, atr_ref) else: print 'Referencing input file to pixel in y/x: (%d, %d)' % ( inps.ref_y, inps.ref_x) box = (inps.ref_x, inps.ref_y, inps.ref_x + 1, inps.ref_y + 1) refList = ut.spatial_average(File, mask, box)[0] outFile = seed_file_reference_value(File, outFile, refList, inps.ref_y, inps.ref_x) else: raise ValueError('Can not find reference y/x or Nan value.') return outFile
def seed_file_inps(File, inps=None, outFile=None): '''Seed input file with option from input namespace Return output file name if succeed; otherwise, return None ''' # Optional inputs if not outFile: outFile = 'Seeded_'+os.path.basename(File) if not inps: inps = cmdLineParse(['']) print '----------------------------------------------------' print 'seeding file: '+File # Get stack and mask stack = ut.get_file_stack(File, inps.mask_file) mask = ~np.isnan(stack) if np.nansum(mask) == 0.0: print '\n*****************************************************' print 'ERROR:' print 'There is no pixel that has valid phase value in all datasets.' print 'Check the file!' print 'Seeding failed' sys.exit(1) # 1. Reference using global average if inps.method == 'global-average': print '\n---------------------------------------------------------' print 'Automatically Seeding using Global Spatial Average Value ' print '---------------------------------------------------------' print 'Calculating the global spatial average value for each epoch'+\ ' of all valid pixels ...' atr = readfile.read_attribute(File) width = int(atr['WIDTH']) length = int(atr['FILE_LENGTH']) box = (0,0,width,length) meanList = ut.spatial_average(File, mask, box) inps.ref_y = '' inps.ref_x = '' outFile = seed_file_reference_value(File, outFile, meanList, inps.ref_y, inps.ref_x) return outFile # 2. Reference using specific pixel # 2.1 Find reference y/x if not inps.ref_y or not inps.ref_x: if inps.coherence_file: inps.method = 'max-coherence' inps.ref_y, inps.ref_x = select_max_coherence_yx(inps.coherence_file, mask) elif inps.method == 'random': inps.ref_y, inps.ref_x = random_select_reference_yx(mask) elif inps.method == 'manual': inps = manual_select_reference_yx(stack, inps) # 2.2 Seeding file with reference y/x if inps.ref_y and inps.ref_x: print 'Seed file with input reference y/x ...' if mask[inps.ref_y, inps.ref_x]: print 'Referencing input file to pixel in y/x: (%d, %d)'%(inps.ref_y, inps.ref_x) box = (inps.ref_x, inps.ref_y, inps.ref_x+1, inps.ref_y+1) refList = ut.spatial_average(File, mask, box) outFile = seed_file_reference_value(File, outFile, refList, inps.ref_y, inps.ref_x) else: print '\nInput reference y/x has NaN value in file stacking, skip seeding.' return None else: sys.exit('ERROR: can not find reference y/x! Seeding FAILED.') return outFile
def ifgram_inversion_patch(ifgramFile, coherenceFile, meta, box=None): ''' Inputs: ifgramFile - string, interferograms hdf5 file coherenceFile - string, coherence hdf5 file box - 4-tuple, left, upper, right, and lower pixel coordinate of area of interest meta - dict, including the following attributes: #Interferograms length/width - int, file size for each interferogram ifgram_list - list of string, interferogram dataset name date12_list - list of string, YYMMDD-YYMMDD ref_value - np.array in size of (ifgram_num, 1) reference pixel coordinate in row/column number ref_y/x - int, reference pixel coordinate in row/column number #Time-series date8_list - list of string in YYYYMMDD tbase_diff - np.array in size of (date_num-1, 1), differential temporal baseline #Inversion weight_function - no, fim, var, coh Outputs: ts - 3D np.array in size of (date_num, row_num, col_num) temp_coh - 2D np.array in size of (row_num, col_num) tsStd - 3D np.array in size of (date_num, row_num, col_num) ''' ##### Get patch size/index if not box: box = (0,0,meta['width'],meta['length']) c0,r0,c1,r1 = box print 'processing %8d/%d lines ...' % (r1, meta['length']) ## Initiate output data matrixs row_num = r1-r0 col_num = c1-c0 pixel_num = row_num * col_num date_num = len(meta['date8_list']) ts = np.zeros((date_num, pixel_num), np.float32) tsStd = np.zeros((date_num, pixel_num), np.float32) temp_coh = np.zeros(pixel_num, np.float32) ##### Mask for pixels to invert mask = np.ones(pixel_num, np.bool_) ## 1 - Water Mask if meta['water_mask_file']: print 'skip pixels on water with mask from file: %s' % (os.path.basename(meta['water_mask_file'])) try: waterMask = readfile.read(meta['water_mask_file'], epoch='waterMask')[0][r0:r1,c0:c1].flatten() except: waterMask = readfile.read(meta['water_mask_file'], epoch='mask')[0][r0:r1,c0:c1].flatten() mask *= np.array(waterMask, np.bool_) ## 2 - Mask for Zero Phase in ALL ifgrams print 'skip pixels with zero/nan value in all interferograms' ifgram_stack = ut.get_file_stack(ifgramFile)[r0:r1,c0:c1].flatten() mask *= ~np.isnan(ifgram_stack) mask *= ifgram_stack != 0. ## Invert pixels on mask 1+2 pixel_num2inv = np.sum(mask) pixel_idx2inv = np.where(mask)[0] print 'number of pixels to invert: %s out of %s' % (pixel_num2inv, pixel_num) if pixel_num2inv < 1: ts = ts.reshape(date_num, row_num, col_num) temp_coh = temp_coh.reshape(row_num, col_num) tsStd = tsStd.reshape(date_num, row_num, col_num) return ts, temp_coh, tsStd ##### Read interferograms ifgram_num = len(meta['ifgram_list']) ifgram_data = np.zeros((ifgram_num, pixel_num), np.float32) date12_list = meta['date12_list'] if meta['skip_zero_phase']: print 'skip zero phase value (masked out and filled during phase unwrapping)' atr = readfile.read_attribute(ifgramFile) h5ifgram = h5py.File(ifgramFile,'r') for j in range(ifgram_num): ifgram = meta['ifgram_list'][j] d = h5ifgram['interferograms'][ifgram].get(ifgram)[r0:r1,c0:c1].flatten() if meta['skip_zero_phase']: d[d != 0.] -= meta['ref_value'][j] else: d -= meta['ref_value'][j] ifgram_data[j] = d sys.stdout.write('\rreading interferograms %s/%s ...' % (j+1, ifgram_num)) sys.stdout.flush() print ' ' h5ifgram.close() #ifgram_data -= meta['ref_value'] ## 3 - Mask for Non-Zero Phase in ALL ifgrams (share one B in sbas inversion) maskAllNet = np.all(ifgram_data, axis=0) maskAllNet *= mask maskPartNet = mask ^ maskAllNet ##### Design matrix A,B = ut.design_matrix(ifgramFile, date12_list) try: ref_date = str(np.loadtxt('reference_date.txt', dtype=str)) except: ref_date = meta['date8_list'][0] #print 'calculate decorrelation noise covariance with reference date = %s' % (ref_date) refIdx = meta['date8_list'].index(ref_date) timeIdx = [i for i in range(date_num)] timeIdx.remove(refIdx) Astd = ut.design_matrix(ifgramFile, date12_list, referenceDate=ref_date)[0] ##### Inversion if meta['weight_function'] in ['no','uniform']: if np.sum(maskAllNet) > 0: print 'inverting pixels with valid phase in all ifgrams with OLS (%.0f pixels) ...' % (np.sum(maskAllNet)) ts1, tempCoh1 = network_inversion_sbas(B, ifgram_data[:,maskAllNet], meta['tbase_diff'], skipZeroPhase=False) ts[1:,maskAllNet] = ts1 temp_coh[maskAllNet] = tempCoh1 if np.sum(maskPartNet) > 0: print 'inverting pixels with valid phase in part of ifgrams with SVD ...' pixel_num2inv = np.sum(maskPartNet) pixel_idx2inv = np.where(maskPartNet)[0] prog_bar = ptime.progress_bar(maxValue=pixel_num2inv) for i in range(pixel_num2inv): idx = pixel_idx2inv[i] ts1, tempCoh1 = network_inversion_sbas(B, ifgram_data[:,idx], meta['tbase_diff'], meta['skip_zero_phase']) ts[1:, idx] = ts1.flatten() temp_coh[idx] = tempCoh1 prog_bar.update(i+1, every=100, suffix=str(i+1)+'/'+str(pixel_num2inv)+' pixels') prog_bar.close() else: ##### Read coherence coh_data = np.zeros((ifgram_num, pixel_num), np.float32) h5coh = h5py.File(coherenceFile,'r') coh_list = sorted(h5coh['coherence'].keys()) coh_list = ut.check_drop_ifgram(h5coh) for j in range(ifgram_num): ifgram = coh_list[j] d = h5coh['coherence'][ifgram].get(ifgram)[r0:r1,c0:c1] d[np.isnan(d)] = 0. coh_data[j] = d.flatten() sys.stdout.write('\rreading coherence %s/%s ...' % (j+1, ifgram_num)) sys.stdout.flush() print ' ' h5coh.close() ##### Calculate Weight matrix weight = np.array(coh_data, np.float64) L = int(atr['ALOOKS']) * int(atr['RLOOKS']) epsilon = 1e-4 if meta['weight_function'].startswith('var'): print 'convert coherence to weight using inverse of phase variance' print ' with phase PDF for distributed scatterers from Tough et al. (1995)' weight = 1.0 / coherence2phase_variance_ds(weight, L, print_msg=True) elif meta['weight_function'].startswith(('lin','coh','cor')): print 'use coherence as weight directly (Perissin & Wang, 2012; Tong et al., 2016)' weight[weight < epsilon] = epsilon elif meta['weight_function'].startswith(('fim','fisher')): print 'convert coherence to weight using Fisher Information Index (Seymour & Cumming, 1994)' weight = coherence2fisher_info_index(weight, L) else: print 'Un-recognized weight function: %s' % meta['weight_function'] sys.exit(-1) ##### Weighted Inversion pixel by pixel print 'inverting time series ...' prog_bar = ptime.progress_bar(maxValue=pixel_num2inv) for i in range(pixel_num2inv): idx = pixel_idx2inv[i] ts1, tempCoh1, tsStd1 = network_inversion_wls(A, ifgram_data[:,idx], weight[:,idx], Astd=Astd,\ skipZeroPhase=meta['skip_zero_phase']) ts[1:, idx] = ts1.flatten() temp_coh[idx] = tempCoh1 tsStd[timeIdx, idx] = tsStd1.flatten() prog_bar.update(i+1, every=100, suffix=str(i+1)+'/'+str(pixel_num2inv)+' pixels') prog_bar.close() ts = ts.reshape(date_num, row_num, col_num) temp_coh = temp_coh.reshape(row_num, col_num) tsStd = tsStd.reshape(date_num, row_num, col_num) ##Write to temp hdf5 files for parallel processing if meta['parallel']: fname = meta['ftemp_base']+str(int(r0/meta['row_step']))+'.h5' print 'writing >>> '+fname h5temp = h5py.File(fname, 'w') group = h5temp.create_group('timeseries') dset = group.create_dataset('timeseries', shape=(date_num+1, row_num, col_num), dtype=np.float32) dset[0:-1,:,:] = ts dset[1,:,:] = temp_coh h5temp.close() return else: return ts, temp_coh, tsStd
def timeseries_inversion(ifgramFile='unwrapIfgram.h5', coherenceFile='coherence.h5', inps_dict=None): '''Implementation of the SBAS algorithm. modified from sbas.py written by scott baker, 2012 Inputs: ifgramFile - string, HDF5 file name of the interferograms coherenceFile - string, HDF5 file name of the coherence inps_dict - dict, including the following options: weight_function min_coherence max_coherence Output: timeseriesFile - string, HDF5 file name of the output timeseries tempCohFile - string, HDF5 file name of temporal coherence ''' total = time.time() if not inps_dict: inps_dict = vars(cmdLineParse()) weight_func = inps_dict['weight_function'] min_coh = inps_dict['min_coherence'] max_coh = inps_dict['max_coherence'] # Basic Info atr = readfile.read_attribute(ifgramFile) length = int(atr['FILE_LENGTH']) width = int(atr['WIDTH']) pixel_num = length * width h5ifgram = h5py.File(ifgramFile, 'r') ifgram_list = sorted(h5ifgram['interferograms'].keys()) if inps_dict['weight_function'] == 'no': ifgram_list = ut.check_drop_ifgram(h5ifgram, atr, ifgram_list) ifgram_num = len(ifgram_list) # Convert ifgram_list to date12/8_list date12_list = ptime.list_ifgram2date12(ifgram_list) m_dates = [i.split('-')[0] for i in date12_list] s_dates = [i.split('-')[1] for i in date12_list] date8_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates)))) date_num = len(date8_list) tbase_list = ptime.date_list2tbase(date8_list)[0] tbase_diff = np.diff(tbase_list).reshape((date_num - 1, 1)) print 'number of interferograms: ' + str(ifgram_num) print 'number of acquisitions : ' + str(date_num) print 'number of pixels: ' + str(pixel_num) # Reference pixel in space try: ref_x = int(atr['ref_x']) ref_y = int(atr['ref_y']) print 'reference pixel in y/x: [%d, %d]' % (ref_y, ref_x) except: print 'ERROR: No ref_x/y found! Can not inverse interferograms without reference in space.' print 'run seed_data.py ' + ifgramFile + ' --mark-attribute for a quick referencing.' sys.exit(1) ##### Read Interferograms print 'reading interferograms ...' ifgram_data = np.zeros((ifgram_num, pixel_num), np.float32) prog_bar = ptime.progress_bar(maxValue=ifgram_num) for j in range(ifgram_num): ifgram = ifgram_list[j] d = h5ifgram['interferograms'][ifgram].get(ifgram)[:] #d[d != 0.] -= d[ref_y, ref_x] d -= d[ref_y, ref_x] ifgram_data[j] = d.flatten() prog_bar.update(j + 1, suffix=date12_list[j]) h5ifgram.close() prog_bar.close() #####---------------------- Inversion ----------------------##### # Design matrix A, B = ut.design_matrix(ifgramFile, date12_list) if weight_func == 'no': print 'generalized inversion using SVD (Berardino et al., 2002, IEEE-TGRS)' print 'inversing time series ...' B_inv = np.array(np.linalg.pinv(B), np.float32) ts_rate = np.dot(B_inv, ifgram_data) ts1 = ts_rate * np.tile(tbase_diff, (1, pixel_num)) ts0 = np.array([0.] * pixel_num, np.float32) ts_data = np.vstack((ts0, np.cumsum(ts1, axis=0))) del ts_rate, ts0, ts1 # Temporal coherence print 'calculating temporal coherence (Tizzani et al., 2007, RSE)' temp_coh = np.zeros((1, pixel_num), np.float32) + 0j prog_bar = ptime.progress_bar(maxValue=ifgram_num) for i in range(ifgram_num): ifgram_est = np.dot(A[i, :], ts_data[1:, :]) ifgram_diff = ifgram_data[i, :] - ifgram_est temp_coh += np.exp(1j * ifgram_diff) prog_bar.update(i + 1, suffix=date12_list[i]) prog_bar.close() del ifgram_data, ifgram_est, ifgram_diff temp_coh = np.array((np.absolute(temp_coh) / ifgram_num).reshape( (length, width)), dtype=np.float32) else: print 'weighted least square (WLS) inversion using coherence pixel by pixel' if np.linalg.matrix_rank(A) < date_num - 1: print 'ERROR: singular design matrix!' print ' Input network of interferograms is not fully connected!' print ' Can not inverse the weighted least square solution.' print 'You could try:' print ' 1) Add more interferograms to make the network fully connected:' print ' a.k.a., no multiple subsets nor network islands' print " 2) Use '-w no' option for non-weighted SVD solution." sys.exit(-1) pixel_mask = np.ones(pixel_num, np.bool_) print 'reading coherence: ' + os.path.basename(coherenceFile) h5coh = h5py.File(coherenceFile, 'r') coh_list = sorted(h5coh['coherence'].keys()) coh_data = np.zeros((ifgram_num, pixel_num), np.float32) prog_bar = ptime.progress_bar(maxValue=ifgram_num) for j in range(ifgram_num): ifgram = coh_list[j] d = h5coh['coherence'][ifgram].get(ifgram)[:].flatten() d[np.isnan(d)] = 0. pixel_mask[d == 0.] = 0 coh_data[j] = d prog_bar.update(j + 1, suffix=date12_list[j]) h5coh.close() prog_bar.close() # Get mask of valid pixels to inverse print 'skip pixels with zero coherence in at least one interferogram' print 'skip pixels with zero phase in all interferograms' ifgram_stack = ut.get_file_stack(ifgramFile).flatten() pixel_mask[ifgram_stack == 0.] = 0 pixel_num2inv = np.sum(pixel_mask) pixel_idx2inv = np.where(pixel_mask)[0] ifgram_data = ifgram_data[:, pixel_mask] coh_data = coh_data[:, pixel_mask] print 'number of pixels to inverse: %d' % (pixel_num2inv) ##### Calculate Weight matrix weight = coh_data if weight_func.startswith('var'): print 'convert coherence to weight using inverse of variance: x**2/(1-x**2) from Hanssen (2001, for 4.2.32)' weight[weight > 0.999] = 0.999 if weight_func == 'variance-max-coherence': print 'constrain the max coherence to %f' % max_coh weight[weight > max_coh] = max_coh weight = np.square(weight) weight *= 1. / (1. - weight) if weight_func == 'variance-log': print 'use log(1/variance)+1 as weight' weight = np.log(weight + 1) elif weight_func.startswith('lin'): print 'use coherence as weight directly (Tong et al., 2016, RSE)' elif weight_func.startswith('norm'): print 'convert coherence to weight using CDF of normal distribution: N(%f, %f)' % ( mu, std) mu = (min_coh + max_coh) / 2.0 std = (max_coh - min_coh) / 6.0 chunk_size = 1000 chunk_num = int(pixel_num2inv / chunk_size) + 1 prog_bar = ptime.progress_bar(maxValue=chunk_num) for i in range(chunk_num): i0 = (i - 1) * chunk_size i1 = min([pixel_num2inv, i0 + chunk_size]) weight[:, i0:i1] = norm.cdf(weight[:, i0:i1], mu, std) prog_bar.update(i + 1, every=10) prog_bar.close() #weight = norm.cdf(weight, mu, std) else: print 'Un-recognized weight function: %s' % weight_func sys.exit(-1) ##### Weighted Inversion pixel by pixel print 'inversing time series ...' ts_data = np.zeros((date_num, pixel_num), np.float32) temp_coh = np.zeros(pixel_num, np.float32) prog_bar = ptime.progress_bar(maxValue=pixel_num2inv) for i in range(pixel_num2inv): # Inverse timeseries ifgram_pixel = ifgram_data[:, i] weight_pixel = weight[:, i] W = np.diag(weight_pixel) ts = np.linalg.inv(A.T.dot(W).dot(A)).dot( A.T).dot(W).dot(ifgram_pixel) ts_data[1:, pixel_idx2inv[i]] = ts # Calculate weighted temporal coherence ifgram_diff = ifgram_pixel - np.dot(A, ts) temp_coh_pixel = np.abs( np.sum(np.multiply(weight_pixel, np.exp(1j * ifgram_diff)), axis=0)) / np.sum(weight_pixel) temp_coh[pixel_idx2inv[i]] = temp_coh_pixel prog_bar.update(i + 1, every=2000, suffix=str(i + 1) + ' pixels') prog_bar.close() del ifgram_data, weight #####---------------------- Outputs ----------------------##### ## 1.1 Convert time-series phase to displacement print 'converting phase to range' phase2range = -1 * float(atr['WAVELENGTH']) / (4. * np.pi) ts_data *= phase2range ## 1.2 Write time-series data matrix timeseriesFile = 'timeseries.h5' print 'writing >>> ' + timeseriesFile print 'number of acquisitions: ' + str(date_num) h5timeseries = h5py.File(timeseriesFile, 'w') group = h5timeseries.create_group('timeseries') prog_bar = ptime.progress_bar(maxValue=date_num) for i in range(date_num): date = date8_list[i] dset = group.create_dataset(date, data=ts_data[i].reshape(length, width), compression='gzip') prog_bar.update(i + 1, suffix=date) prog_bar.close() ## 1.3 Write time-series attributes print 'calculating perpendicular baseline timeseries' pbase, pbase_top, pbase_bottom = ut.perp_baseline_ifgram2timeseries( ifgramFile, ifgram_list) pbase = str(pbase.tolist()).translate( None, '[],') # convert np.array into string separated by white space pbase_top = str(pbase_top.tolist()).translate(None, '[],') pbase_bottom = str(pbase_bottom.tolist()).translate(None, '[],') atr['P_BASELINE_TIMESERIES'] = pbase atr['P_BASELINE_TOP_TIMESERIES'] = pbase_top atr['P_BASELINE_BOTTOM_TIMESERIES'] = pbase_bottom atr['ref_date'] = date8_list[0] atr['FILE_TYPE'] = 'timeseries' atr['UNIT'] = 'm' for key, value in atr.iteritems(): group.attrs[key] = value h5timeseries.close() del ts_data ## 2. Write Temporal Coherence File tempCohFile = 'temporalCoherence.h5' print 'writing >>> ' + tempCohFile atr['FILE_TYPE'] = 'temporal_coherence' atr['UNIT'] = '1' writefile.write(temp_coh.reshape(length, width), atr, tempCohFile) print 'Time series inversion took ' + str(time.time() - total) + ' secs\nDone.' return timeseriesFile, tempCohFile
def ifgram_inversion_patch(ifgramFile, coherenceFile, meta, box=None): ''' Inputs: ifgramFile - string, interferograms hdf5 file coherenceFile - string, coherence hdf5 file box - 4-tuple, left, upper, right, and lower pixel coordinate of area of interest meta - dict, including the following attributes: #Interferograms length/width - int, file size for each interferogram ifgram_list - list of string, interferogram dataset name date12_list - list of string, YYMMDD-YYMMDD ref_value - np.array in size of (ifgram_num, 1) reference pixel coordinate in row/column number ref_y/x - int, reference pixel coordinate in row/column number #Time-series date8_list - list of string in YYYYMMDD tbase_diff - np.array in size of (date_num-1, 1), differential temporal baseline #Inversion weight_function - ''' ##### Get patch size/index if not box: box = (0, 0, meta['width'], meta['length']) c0, r0, c1, r1 = box print 'processing %8d/%d lines ...' % (r1, meta['length']) ## Initiate output data matrixs row_num = r1 - r0 col_num = c1 - c0 pixel_num = row_num * col_num date_num = len(meta['date8_list']) ts = np.zeros((date_num, pixel_num), np.float32) temp_coh = np.zeros(pixel_num, np.float32) ##### Get mask of non-zero pixels print 'skip pixels with zero/nan value in all interferograms' ifgram_stack = ut.get_file_stack(ifgramFile)[r0:r1, c0:c1].flatten() mask = ~np.isnan(ifgram_stack) mask[ifgram_stack == 0.] = 0 pixel_num2inv = np.sum(mask) pixel_idx2inv = np.where(mask)[0] print 'number of pixels to inverse: %d' % (pixel_num2inv) if pixel_num2inv < 1: ts = ts.reshape(date_num, row_num, col_num) temp_coh = temp_coh.reshape(row_num, col_num) return ts, temp_coh ##### Read interferograms ifgram_num = len(meta['ifgram_list']) ifgram_data = np.zeros((ifgram_num, pixel_num2inv), np.float32) date12_list = meta['date12_list'] atr = readfile.read_attribute(ifgramFile) h5ifgram = h5py.File(ifgramFile, 'r') for j in range(ifgram_num): ifgram = meta['ifgram_list'][j] d = h5ifgram['interferograms'][ifgram].get(ifgram)[r0:r1, c0:c1] ifgram_data[j] = d.flatten()[mask] sys.stdout.write('\rreading interferograms %s/%s ...' % (j + 1, ifgram_num)) sys.stdout.flush() print ' ' h5ifgram.close() ifgram_data -= meta['ref_value'] ##### Design matrix A, B = ut.design_matrix(ifgramFile, date12_list) B_inv = np.array(np.linalg.pinv(B), np.float32) ##### Inverse if meta['weight_function'] in ['no', 'uniform']: print 'inversing time-series ...' ts[1:, mask] = network_inversion_sbas(B, ifgram_data, meta['tbase_diff'], B_inv=B_inv) print 'calculating temporal coherence ...' temp_coh[mask] = temporal_coherence(A, ts[1:, mask], ifgram_data) else: ##### Read coherence coh_data = np.zeros((ifgram_num, pixel_num2inv), np.float32) h5coh = h5py.File(coherenceFile, 'r') coh_list = sorted(h5coh['coherence'].keys()) coh_list = ut.check_drop_ifgram(h5coh) for j in range(ifgram_num): ifgram = coh_list[j] d = h5coh['coherence'][ifgram].get(ifgram)[r0:r1, c0:c1] d[np.isnan(d)] = 0. coh_data[j] = d.flatten()[mask] sys.stdout.write('\rreading coherence %s/%s ...' % (j + 1, ifgram_num)) sys.stdout.flush() print ' ' h5coh.close() ##### Calculate Weight matrix weight = coh_data if meta['weight_function'].startswith('var'): print 'convert coherence to weight using inverse of phase variance' print ' with phase PDF for distributed scatterers from Tough et al. (1995)' L = int(atr['ALOOKS']) * int(atr['RLOOKS']) lineStr = ' number of multilooks L=%d' % L if L > 80: L = 80 lineStr += ', use L=80 to avoid dividing by 0 in calculation with Negligible effect' print lineStr weight = 1.0 / coherence2phase_variance_ds(weight, L) elif meta['weight_function'].startswith(('lin', 'coh', 'cor')): print 'use coherence as weight directly (Perissin & Wang, 2012; Tong et al., 2016)' epsilon = 1e-4 weight[weight < epsilon] = epsilon else: print 'Un-recognized weight function: %s' % meta['weight_function'] sys.exit(-1) ##### Weighted Inversion pixel by pixel print 'inversing time series ...' prog_bar = ptime.progress_bar(maxValue=pixel_num2inv) for i in range(pixel_num2inv): ts[1:, pixel_idx2inv[i]] = network_inversion_wls( A, ifgram_data[:, i], weight[:, i])[0].flatten() prog_bar.update(i + 1, every=100, suffix=str(i + 1) + '/' + str(pixel_num2inv) + ' pixels') prog_bar.close() print 'calculating temporal coherence ...' #temp_coh[mask] = temporal_coherence(A, ts[1:,mask], ifgram_data, weight) temp_coh[mask] = temporal_coherence(A, ts[1:, mask], ifgram_data) ts = ts.reshape(date_num, row_num, col_num) temp_coh = temp_coh.reshape(row_num, col_num) ##Write to temp hdf5 files for parallel processing if meta['parallel']: fname = meta['ftemp_base'] + str(int(r0 / meta['row_step'])) + '.h5' print 'writing >>> ' + fname h5temp = h5py.File(fname, 'w') group = h5temp.create_group('timeseries') dset = group.create_dataset('timeseries', shape=(date_num + 1, row_num, col_num), dtype=np.float32) dset[0:-1, :, :] = ts dset[1, :, :] = temp_coh h5temp.close() return else: return ts, temp_coh