def read_network_info(inps): ext = os.path.splitext(inps.file)[1] # 1. Read dateList and pbaseList if ext in ['.h5', '.he5']: k = readfile.read_attribute(inps.file)['FILE_TYPE'] print('reading temporal/spatial baselines from {} file: {}'.format(k, inps.file)) if k == 'ifgramStack': inps.dateList = ifgramStack(inps.file).get_date_list(dropIfgram=False) inps.pbaseList = ifgramStack(inps.file).get_perp_baseline_timeseries(dropIfgram=False) elif k == 'timeseries': obj = timeseries(inps.file) obj.open(print_msg=False) inps.dateList = obj.dateList inps.pbaseList = obj.pbase else: raise ValueError('input file is not ifgramStack/timeseries, can not read temporal/spatial baseline info.') else: print('reading temporal/spatial baselines from list file: '+inps.bl_list_file) inps.dateList, inps.pbaseList = pnet.read_baseline_file(inps.bl_list_file)[0:2] print('number of acquisitions: {}'.format(len(inps.dateList))) # 2. Read All Date12/Ifgrams/Pairs inps.date12List = pnet.get_date12_list(inps.file) print('reading interferograms info from file: {}'.format(inps.file)) print('number of interferograms: {}'.format(len(inps.date12List))) if inps.save_list: txtFile = os.path.splitext(os.path.basename(inps.file))[0]+'_date12List.txt' np.savetxt(txtFile, inps.date12List, fmt='%s') print('save pairs/date12 info to file: '+txtFile) # Optional: Read dropped date12 / date inps.dateList_drop = [] inps.date12List_drop = [] if ext in ['.h5', '.he5'] and k == 'ifgramStack': inps.date12List_keep = ifgramStack(inps.file).get_date12_list(dropIfgram=True) inps.date12List_drop = sorted(list(set(inps.date12List) - set(inps.date12List_keep))) print('-'*50) print('number of interferograms marked as drop: {}'.format(len(inps.date12List_drop))) print('number of interferograms marked as keep: {}'.format(len(inps.date12List_keep))) mDates = [i.split('_')[0] for i in inps.date12List_keep] sDates = [i.split('_')[1] for i in inps.date12List_keep] inps.dateList_keep = sorted(list(set(mDates + sDates))) inps.dateList_drop = sorted(list(set(inps.dateList) - set(inps.dateList_keep))) print('number of acquisitions marked as drop: {}'.format(len(inps.dateList_drop))) if len(inps.dateList_drop) > 0: print(inps.dateList_drop) # Optional: Read Coherence List inps.cohList = None if ext in ['.h5', '.he5'] and k == 'ifgramStack': inps.cohList, cohDate12List = ut.spatial_average(inps.file, datasetName='coherence', maskFile=inps.maskFile, saveList=True, checkAoi=False) if all(np.isnan(inps.cohList)): inps.cohList = None print('WARNING: all coherence value are nan! Do not use this and continue.') if set(cohDate12List) > set(inps.date12List): print('extract coherence value for all pair/date12 in input file') inps.cohList = [inps.cohList[cohDate12List.index(i)] for i in inps.date12List] elif set(cohDate12List) < set(inps.date12List): inps.cohList = None print('WARNING: not every pair/date12 from input file is in coherence file') print('turn off the color plotting of interferograms based on coherence') return inps
def main(argv): inps = cmdLineParse() if inps.timeseries_file: inps.timeseries_file=ut.get_file_list([inps.timeseries_file])[0] atr=readfile.read_attribute(inps.timeseries_file) k = atr['FILE_TYPE'] if 'ref_y' not in list(atr.keys()) and inps.ref_yx: print('No reference info found in input file, use input ref_yx: '+str(inps.ref_yx)) atr['ref_y'] = inps.ref_yx[0] atr['ref_x'] = inps.ref_yx[1] #****reading incidence angle file***/ if os.path.isfile(inps.inc_angle): inps.inc_angle=readfile.read(inps.inc_angle, datasetName='incidenceAngle')[0] inps.inc_angle=np.nan_to_num(inps.inc_angle) else: inps.inps.inc_angle = float(inps.inc_angle) print('incidence angle: '+str(inps.inc_angle)) cinc=np.cos(inps.inc_angle*np.pi/180.0); #****look up file****/ if inps.lookup_file: inps.lookup_file = ut.get_file_list([inps.lookup_file])[0] #'geomap_32rlks_tight.trans' #****GACOS****/ delay_source = 'GACOS' # Get weather directory if not inps.GACOS_dir: if inps.timeseries_file: inps.GACOS_dir = os.path.dirname(os.path.abspath(inps.timeseries_file))+'/../WEATHER/GACOS' elif inps.lookup_file: inps.GACOS_dir = os.path.dirname(os.path.abspath(inps.lookup_file))+'/../WEATHER/GACOS' else: inps.GACOS_dir = os.path.abspath(os.getcwd()) print('Store weather data into directory: '+inps.GACOS_dir) #source_dir=os.path.dirname(os.path.abspath('timeseries_file'))+'/Agung/GACOS/data';print source_dir #os.makedirs(GACOS_dir) -----------------------------------------------add part to copy/download weather data------# #----get date list-----# if not inps.date_list_file: print('read date list info from: '+inps.timeseries_file) h5=h5py.File(inps.timeseries_file,'r') if 'timeseries' in list(h5.keys()): date_list=sorted(h5[k].keys()) elif k in ['interferograms','coherence','wrapped']: ifgram_list = sorted(h5[k].keys()) date12_list = pnet.get_date12_list(inps.timeseries_file) m_dates = [i.split('-')[0] for i in date12_list] s_dates = [i.split('-')[1] for i in date12_list] date_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates)))) else: raise ValueError('Un-support input file type:'+k) h5.close() else: date_list = ptime.yyyymmdd(np.loadtxt(inps.date_list_file, dtype=str, usecols=(0,)).tolist()) print('read date list info from: '+inps.date_list_file) #****cheacking availability of delays****/ print('checking availability of delays') delay_file_list=[] for d in date_list: if delay_source == 'GACOS': delay_file = inps.GACOS_dir+'/'+d+'.ztd'; delay_file_list.append(delay_file) delay_file_existed = ut.get_file_list(delay_file_list) if len(delay_file_existed)==len(date_list): print('no missing files') else: print('no. of date files found:', len(delay_file_existed)); print('no. of dates:', len(date_list)) #*****Calculating delays***/ print('calculating delays') length=int(atr['FILE_LENGTH']) width=int(atr['WIDTH']) #initialise delay files date_num=len(date_list) trop_ts=np.zeros((date_num, length, width), np.float32) #reading wrf files for each epoch and getting delay for i in range(date_num): delay_file=delay_file_existed[i] date=date_list[i] print('calculating delay for date',date) trop_ts[i] =get_delay(delay_file,atr,inps.lookup_file,cinc) print('Delays Calculated') # Convert relative phase delay on reference date try: ref_date = atr['ref_date'] except: ref_date = date_list[0] print('convert to relative phase delay with reference date: '+ref_date) ref_idx = date_list.index(ref_date) trop_ts -= np.tile(trop_ts[ref_idx,:,:], (date_num, 1, 1)) ## Write tropospheric delay to HDF5 tropFile = 'GACOSdelays'+'.h5' print('writing >>> %s' % (tropFile)) h5trop = h5py.File(tropFile, 'w') group_trop = h5trop.create_group('timeseries') print('number of acquisitions: '+str(date_num)) prog_bar = ptime.progress_bar(maxValue=date_num) for i in range(date_num): date = date_list[i] group_trop.create_dataset(date, data=trop_ts[i], compression='gzip') prog_bar.update(i+1, suffix=date) prog_bar.close() # Write Attributes for key,value in atr.items(): group_trop.attrs[key] = value h5trop.close() ## Write corrected Time series to HDF5 if k == 'timeseries': if not inps.out_file: inps.out_file = os.path.splitext(inps.timeseries_file)[0]+'_'+'GACOS'+'.h5' print('writing trop corrected timeseries file %s' % (inps.out_file)) h5ts = h5py.File(inps.timeseries_file, 'r') h5tsCor = h5py.File(inps.out_file, 'w') group_tsCor = h5tsCor.create_group('timeseries') print('number of acquisitions: '+str(date_num)) prog_bar = ptime.progress_bar(maxValue=date_num) for i in range(date_num): date = date_list[i];print(date) ts = h5ts['timeseries'].get(date)[:] group_tsCor.create_dataset(date, data=ts-trop_ts[i], compression='gzip') prog_bar.update(i+1, suffix=date) prog_bar.close() h5ts.close() # Write Attributes for key,value in atr.items(): group_tsCor.attrs[key] = value h5tsCor.close() print('delays written to %s' % (inps.out_file)) print('finished') return inps.out_file
def get_date12_to_drop(inps): """Get date12 list to dropped Return [] if no ifgram to drop, thus KEEP ALL ifgrams; None if nothing to change, exit without doing anything. """ obj = ifgramStack(inps.file) obj.open() date12ListAll = obj.date12List dateList = obj.dateList print('number of interferograms: {}'.format(len(date12ListAll))) # Get date12_to_drop date12_to_drop = [] # reference file if inps.referenceFile: date12_to_keep = pnet.get_date12_list(inps.referenceFile, dropIfgram=True) print('--------------------------------------------------') print('use reference pairs info from file: {}'.format(inps.referenceFile)) print('number of interferograms in reference: {}'.format(len(date12_to_keep))) tempList = sorted(list(set(date12ListAll) - set(date12_to_keep))) date12_to_drop += tempList print('date12 not in reference file: ({})\n{}'.format(len(tempList), tempList)) # temp baseline threshold if inps.tempBaseMax: tempIndex = np.abs(obj.tbaseIfgram) > inps.tempBaseMax tempList = list(np.array(date12ListAll)[tempIndex]) date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with temporal baseline > {} days: ({})\n{}'.format( inps.tempBaseMax, len(tempList), tempList)) # perp baseline threshold if inps.perpBaseMax: tempIndex = np.abs(obj.pbaseIfgram) > inps.perpBaseMax tempList = list(np.array(date12ListAll)[tempIndex]) date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with perp baseline > {} meters: ({})\n{}'.format( inps.perpBaseMax, len(tempList), tempList)) # connection number threshold if inps.connNumMax: seq_date12_list = pnet.select_pairs_sequential(dateList, inps.connNumMax) seq_date12_list = ptime.yyyymmdd_date12(seq_date12_list) tempList = [i for i in date12ListAll if i not in seq_date12_list] date12_to_drop += tempList print('--------------------------------------------------') msg = 'Drop ifgrams with temporal baseline beyond {} neighbors: ({})'.format( inps.connNumMax, len(tempList)) if len(tempList) <= 200: msg += '\n{}'.format(tempList) print(msg) # excludeIfgIndex if inps.excludeIfgIndex: tempList = [date12ListAll[i] for i in inps.excludeIfgIndex] date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with the following index number: {}'.format(len(tempList))) for i, date12 in enumerate(tempList): print('{} : {}'.format(i, date12)) # excludeDate if inps.excludeDate: tempList = [i for i in date12ListAll if any(j in inps.excludeDate for j in i.split('_'))] date12_to_drop += tempList print('-'*50+'\nDrop ifgrams including the following dates: ({})\n{}'.format( len(tempList), inps.excludeDate)) print('-'*30+'\n{}'.format(tempList)) # startDate if inps.startDate: minDate = int(inps.startDate) tempList = [i for i in date12ListAll if any(int(j) < minDate for j in i.split('_'))] date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with date earlier than: {} ({})\n{}'.format( inps.startDate, len(tempList), tempList)) # endDate if inps.endDate: maxDate = int(inps.endDate) tempList = [i for i in date12ListAll if any(int(j) > maxDate for j in i.split('_'))] date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with date later than: {} ({})\n{}'.format( inps.endDate, len(tempList), tempList)) # coherence file if inps.coherenceBased: print('--------------------------------------------------') print('use coherence-based network modification') # get area of interest for coherence calculation pix_box = get_aoi_pix_box(obj.metadata, inps.lookupFile, inps.aoi_pix_box, inps.aoi_geo_box) # calculate spatial average coherence cohList = ut.spatial_average(inps.file, datasetName='coherence', maskFile=inps.maskFile, box=pix_box, saveList=True)[0] # get coherence-based network coh_date12_list = list(np.array(date12ListAll)[np.array(cohList) >= inps.minCoherence]) # get MST network mst_date12_list, msg = get_mst_date12(inps.keepMinSpanTree, cohList, date12ListAll, date12_to_drop, min_par=inps.minCoherence, par_name='average coherence') # drop all dates (below cohh thresh AND not in MST) tempList = sorted(list(set(date12ListAll) - set(coh_date12_list + mst_date12_list))) date12_to_drop += tempList msg += '({})'.format(len(tempList)) if len(tempList) <= 200: msg += '\n{}'.format(tempList) print(msg) # area ratio file if inps.areaRatioBased: print('--------------------------------------------------') print('use area-ratio-based network modification') # get area of interest for coherence calculation pix_box = get_aoi_pix_box(obj.metadata, inps.lookupFile, inps.aoi_pix_box, inps.aoi_geo_box) # calculate average coherence in masked out areas as threshold meanMaskCoh = np.nanmean(ut.spatial_average(inps.file, datasetName='coherence', maskFile=inps.maskFile, saveList=True, reverseMask=True)[0]) print(f'Average coherence of {inps.maskFile} reverse is {meanMaskCoh:.2f}') # calculate area-ratio with pixels greater than meanMaskCoh areaRatioList = ut.spatial_average(inps.file, datasetName='coherence', maskFile=inps.maskFile, box=pix_box, saveList=True, checkAoi=True, threshold=meanMaskCoh)[0] # get area-ratio-based network area_ratio_date12_list = list(np.array(date12ListAll)[np.array(areaRatioList) >= inps.minAreaRatio]) # get MST network mst_date12_list, msg = get_mst_date12(inps.keepMinSpanTree, areaRatioList, date12ListAll, date12_to_drop, min_par=inps.minAreaRatio, par_name='coherent area ratio') # drop all dates (below area-ratio thresh AND not in MST) tempList = sorted(list(set(date12ListAll) - set(area_ratio_date12_list + mst_date12_list))) date12_to_drop += tempList msg += '({})'.format(len(tempList)) if len(tempList) <= 200: msg += '\n{}'.format(tempList) print(msg) # Manually drop pairs if inps.manual: tempList = manual_select_pairs_to_remove(inps.file) if tempList is None: return None tempList = [i for i in tempList if i in date12ListAll] print('date12 selected to remove: ({})\n{}'.format(len(tempList), tempList)) date12_to_drop += tempList ## summary # drop duplicate date12 and sort in order date12_to_drop = sorted(list(set(date12_to_drop))) date12_to_keep = sorted(list(set(date12ListAll) - set(date12_to_drop))) print('--------------------------------------------------') print('number of interferograms to remove: {}'.format(len(date12_to_drop))) print('number of interferograms to keep : {}'.format(len(date12_to_keep))) # print list of date to drop date_to_keep = [d for date12 in date12_to_keep for d in date12.split('_')] date_to_keep = sorted(list(set(date_to_keep))) date_to_drop = sorted(list(set(dateList) - set(date_to_keep))) if len(date_to_drop) > 0: print('number of acquisitions to remove: {}\n{}'.format(len(date_to_drop), date_to_drop)) # checking: # 1) no new date12 to drop against existing file # 2) no date12 left after dropping date12ListKept = obj.get_date12_list(dropIfgram=True) date12ListDropped = sorted(list(set(date12ListAll) - set(date12ListKept))) if date12_to_drop == date12ListDropped: print('Calculated date12 to drop is the same as exsiting marked input file, skip updating file.') date12_to_drop = None elif date12_to_drop == date12ListAll: raise Exception('Zero interferogram left! Please adjust your setting and try again.') return date12_to_drop
def get_date12_to_drop(inps): """Get date12 list to dropped Return [] if no ifgram to drop, thus KEEP ALL ifgrams; None if nothing to change, exit without doing anything. """ obj = ifgramStack(inps.file) obj.open() date12ListAll = obj.date12List dateList = obj.dateList print('number of interferograms: {}'.format(len(date12ListAll))) # Get date12_to_drop date12_to_drop = [] # reference file if inps.referenceFile: date12_to_keep = pnet.get_date12_list(inps.referenceFile, dropIfgram=True) print('--------------------------------------------------') print('use reference pairs info from file: {}'.format( inps.referenceFile)) print('number of interferograms in reference: {}'.format( len(date12_to_keep))) tempList = sorted(list(set(date12ListAll) - set(date12_to_keep))) date12_to_drop += tempList print('date12 not in reference file: ({})\n{}'.format( len(tempList), tempList)) # coherence file if inps.coherenceBased: print('--------------------------------------------------') print('use coherence-based network modification') coord = ut.coordinate(obj.metadata, lookup_file=inps.lookupFile) if inps.aoi_geo_box and inps.lookupFile: print('input AOI in (lon0, lat1, lon1, lat0): {}'.format( inps.aoi_geo_box)) inps.aoi_pix_box = coord.bbox_geo2radar(inps.aoi_geo_box) if inps.aoi_pix_box: inps.aoi_pix_box = coord.check_box_within_data_coverage( inps.aoi_pix_box) print('input AOI in (x0,y0,x1,y1): {}'.format(inps.aoi_pix_box)) # Calculate spatial average coherence cohList = ut.spatial_average(inps.file, datasetName='coherence', maskFile=inps.maskFile, box=inps.aoi_pix_box, saveList=True)[0] coh_date12_list = list( np.array(date12ListAll)[np.array(cohList) >= inps.minCoherence]) # MST network if inps.keepMinSpanTree: print( 'Get minimum spanning tree (MST) of interferograms with inverse of coherence.' ) msg = ('Drop ifgrams with ' '1) average coherence < {} AND ' '2) not in MST network: '.format(inps.minCoherence)) mst_date12_list = pnet.threshold_coherence_based_mst( date12ListAll, cohList) mst_date12_list = ptime.yyyymmdd_date12(mst_date12_list) else: msg = 'Drop ifgrams with average coherence < {}: '.format( inps.minCoherence) mst_date12_list = [] tempList = sorted( list(set(date12ListAll) - set(coh_date12_list + mst_date12_list))) date12_to_drop += tempList msg += '({})'.format(len(tempList)) if len(tempList) <= 200: msg += '\n{}'.format(tempList) print(msg) # temp baseline threshold if inps.tempBaseMax: tempIndex = np.abs(obj.tbaseIfgram) > inps.tempBaseMax tempList = list(np.array(date12ListAll)[tempIndex]) date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with temporal baseline > {} days: ({})\n{}'.format( inps.tempBaseMax, len(tempList), tempList)) # perp baseline threshold if inps.perpBaseMax: tempIndex = np.abs(obj.pbaseIfgram) > inps.perpBaseMax tempList = list(np.array(date12ListAll)[tempIndex]) date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with perp baseline > {} meters: ({})\n{}'.format( inps.perpBaseMax, len(tempList), tempList)) # connection number threshold if inps.connNumMax: seq_date12_list = pnet.select_pairs_sequential(dateList, inps.connNumMax) seq_date12_list = ptime.yyyymmdd_date12(seq_date12_list) tempList = [i for i in date12ListAll if i not in seq_date12_list] date12_to_drop += tempList print('--------------------------------------------------') msg = 'Drop ifgrams with temporal baseline beyond {} neighbors: ({})'.format( inps.connNumMax, len(tempList)) if len(tempList) <= 200: msg += '\n{}'.format(tempList) print(msg) # excludeIfgIndex if inps.excludeIfgIndex: tempList = [date12ListAll[i] for i in inps.excludeIfgIndex] date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with the following index number: {}'.format( len(tempList))) for i in range(len(tempList)): print('{} : {}'.format(i, tempList[i])) #len(tempList), zip(inps.excludeIfgIndex, tempList))) # excludeDate if inps.excludeDate: tempList = [ i for i in date12ListAll if any(j in inps.excludeDate for j in i.split('_')) ] date12_to_drop += tempList print('-' * 50 + '\nDrop ifgrams including the following dates: ({})\n{}'.format( len(tempList), inps.excludeDate)) print('-' * 30 + '\n{}'.format(tempList)) # startDate if inps.startDate: minDate = int(inps.startDate) tempList = [ i for i in date12ListAll if any( int(j) < minDate for j in i.split('_')) ] date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with date earlier than: {} ({})\n{}'.format( inps.startDate, len(tempList), tempList)) # endDate if inps.endDate: maxDate = int(inps.endDate) tempList = [ i for i in date12ListAll if any( int(j) > maxDate for j in i.split('_')) ] date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with date later than: {} ({})\n{}'.format( inps.endDate, len(tempList), tempList)) # Manually drop pairs if inps.manual: tempList = manual_select_pairs_to_remove(inps.file) if tempList is None: return None tempList = [i for i in tempList if i in date12ListAll] print('date12 selected to remove: ({})\n{}'.format( len(tempList), tempList)) date12_to_drop += tempList # drop duplicate date12 and sort in order date12_to_drop = sorted(list(set(date12_to_drop))) date12_to_keep = sorted(list(set(date12ListAll) - set(date12_to_drop))) print('--------------------------------------------------') print('number of interferograms to remove: {}'.format(len(date12_to_drop))) print('number of interferograms to keep : {}'.format(len(date12_to_keep))) date_to_keep = [d for date12 in date12_to_keep for d in date12.split('_')] date_to_keep = sorted(list(set(date_to_keep))) date_to_drop = sorted(list(set(dateList) - set(date_to_keep))) if len(date_to_drop) > 0: print('number of acquisitions to remove: {}\n{}'.format( len(date_to_drop), date_to_drop)) date12ListKept = obj.get_date12_list(dropIfgram=True) date12ListDropped = sorted(list(set(date12ListAll) - set(date12ListKept))) if date12_to_drop == date12ListDropped: print( 'Calculated date12 to drop is the same as exsiting marked input file, skip updating file.' ) date12_to_drop = None elif date12_to_drop == date12ListAll: raise Exception( 'Zero interferogram left! Please adjust your setting and try again.' ) return date12_to_drop