def create_subset_dataset(inps, pix_box=None, geo_box=None): '''Create/prepare subset of datasets in different folder for time series analysis. For dataset (unwrapped interferograms) in radar coord, only support subset in row/col or y/x For dataset (unwrapped interferograms) in geo coord, lalo has higher priority than yx, if both are specified. ''' # Subset directory subset_dir = inps.work_dir + '/subset' if not os.path.isdir(subset_dir): os.mkdir(subset_dir) os.chdir(subset_dir) print '\n--------------------------------------------' print 'Creating subset datasets ...' print "Go to directory: " + subset_dir atr = readfile.read_attribute(inps.ifgram_file) if not 'X_FIRST' in atr.keys(): print 'Loaded dataset is in radar coordinate.' geomap_file_orig = inps.geomap_file # subset.lalo has higher priority than subset.yx, except when no geomap*.trans exists. # don't subset if only subset.lalo without geomap*.trans exsits, because glob2radar won't be accurate. if geo_box: if inps.geomap_file: pix_box = None else: geo_box = None print 'turn off subset.lalo because no geomap*.trans file exsits.' if not geo_box and not pix_box: sys.exit('ERROR: no valid subset input!') # Calculate subset range in lat/lon for geo files and y/x for radar files if geo_box: print 'use subset input in lat/lon' print 'calculate corresponding bounding box in radar coordinate.' pix_box = subset.bbox_geo2radar(geo_box, atr, geomap_file_orig) else: print 'use subset input in y/x' print 'calculate corresponding bounding box in geo coordinate.' geo_box = subset.bbox_radar2geo(pix_box, atr, geomap_file_orig) # subset inps = subset_dataset(inps, geo_box, pix_box) else: print 'Loaded dataset is in geo coordinate.' return inps
def main(argv): ##### Read Inputs inps = cmdLineParse() inps.file = ut.get_file_list(inps.file) date12_orig = pnet.get_date12_list(inps.file[0]) print 'input file(s) to be modified: ' + str(inps.file) print 'number of interferograms: ' + str(len(date12_orig)) atr = readfile.read_attribute(inps.file[0]) # Update inps if template is input if inps.template_file: inps = read_template2inps(inps.template_file, inps) if all(not i for i in [inps.reference_file, inps.max_temp_baseline, inps.max_perp_baseline,\ inps.exclude_ifg_index, inps.exclude_date, inps.coherence_based,\ inps.start_date, inps.end_date, inps.reset]): # Display network for manually modification when there is no other modification input. print 'No input option found to remove interferogram' if inps.template_file: print 'Keep all interferograms by enable --reset option' inps.reset = True else: print 'To manually modify network, please use --manual option ' return if inps.reset: print '----------------------------------------------------------------------------' for file in inps.file: reset_pairs(file) mean_coh_txt_file = os.path.splitext( inps.coherence_file)[0] + '_spatialAverage.txt' if os.path.isfile(mean_coh_txt_file): rmCmd = 'rm ' + mean_coh_txt_file print rmCmd os.system(rmCmd) return # Convert index : input to continous index list if inps.exclude_ifg_index: ifg_index = list(inps.exclude_ifg_index) inps.exclude_ifg_index = [] for index in ifg_index: index_temp = [int(i) for i in index.split(':')] index_temp.sort() if len(index_temp) == 2: for j in range(index_temp[0], index_temp[1] + 1): inps.exclude_ifg_index.append(j) elif len(index_temp) == 1: inps.exclude_ifg_index.append(int(index)) else: print 'Unrecoganized input: ' + index inps.exclude_ifg_index = sorted(inps.exclude_ifg_index) if max(inps.exclude_ifg_index) > len(date12_orig): raise Exception('Input index out of range!\n'+\ 'input index:'+str(inps.exclude_ifg_index)+'\n'+\ 'index range of file: '+str(len(date12_orig))) ##### Get date12_to_rmv date12_to_rmv = [] # 1. Update date12_to_rmv from reference file if inps.reference_file: date12_to_keep = pnet.get_date12_list(inps.reference_file, check_drop_ifgram=True) print '----------------------------------------------------------------------------' print 'use reference pairs info from file: ' + inps.reference_file print 'number of interferograms in reference: ' + str( len(date12_to_keep)) print 'date12 not in reference file:' date12_to_rmv_temp = [] for date12 in date12_orig: if date12 not in date12_to_keep: date12_to_rmv.append(date12) date12_to_rmv_temp.append(date12) print date12_to_rmv_temp # 2.1 Update date12_to_rmv from coherence file if inps.coherence_based and os.path.isfile(inps.coherence_file): print '----------------------------------------------------------------------------' print 'use coherence-based network modification from coherence file: ' + inps.coherence_file # check mask AOI in lalo if inps.aoi_geo_box and inps.lookup_file: print 'input AOI in (lon0, lat1, lon1, lat0): ' + str( inps.aoi_geo_box) inps.aoi_pix_box = subset.bbox_geo2radar(inps.aoi_geo_box, atr, inps.lookup_file) if inps.aoi_pix_box: # check mask AOI within the data coverage inps.aoi_pix_box = subset.check_box_within_data_coverage( inps.aoi_pix_box, atr) print 'input AOI in (x0,y0,x1,y1): ' + str(inps.aoi_pix_box) # Calculate spatial average coherence coh_list, coh_date12_list = ut.spatial_average(inps.coherence_file, inps.mask_file,\ inps.aoi_pix_box, saveList=True) # MST network if inps.keep_mst: print 'Get minimum spanning tree (MST) of interferograms with inverse of coherence.' print 'date12 with 1) average coherence < ' + str( inps.min_coherence) + ' AND 2) not in MST network: ' mst_date12_list = pnet.threshold_coherence_based_mst( coh_date12_list, coh_list) else: print 'date12 with average coherence < ' + str(inps.min_coherence) mst_date12_list = [] date12_to_rmv_temp = [] for i in range(len(coh_date12_list)): date12 = coh_date12_list[i] if coh_list[ i] < inps.min_coherence and date12 not in mst_date12_list: date12_to_rmv.append(date12) date12_to_rmv_temp.append(date12) print date12_to_rmv_temp # 2.2 Update date12_to_rmv from temp baseline threshold if inps.max_temp_baseline: print '----------------------------------------------------------------------------' print 'Drop pairs with temporal baseline > ' + str( inps.max_temp_baseline) + ' days' date8_list = ptime.ifgram_date_list(inps.file[0]) date6_list = ptime.yymmdd(date8_list) tbase_list = ptime.date_list2tbase(date8_list)[0] date12_to_rmv_temp = [] for i in range(len(date12_orig)): date1, date2 = date12_orig[i].split('-') idx1 = date6_list.index(date1) idx2 = date6_list.index(date2) t_diff = tbase_list[idx2] - tbase_list[idx1] if t_diff > inps.max_temp_baseline: date12 = date12_orig[i] date12_to_rmv.append(date12) date12_to_rmv_temp.append(date12) print 'number of pairs to drop: %d' % (len(date12_to_rmv_temp)) print date12_to_rmv_temp # 2.3 Update date12_to_rmv from perp baseline threshold if inps.max_perp_baseline: print '----------------------------------------------------------------------------' print 'Drop pairs with perpendicular spatial baseline > ' + str( inps.max_perp_baseline) + ' meters' ifg_bperp_list = pnet.igram_perp_baseline_list(inps.file[0]) date12_to_rmv_temp = [] for i in range(len(ifg_bperp_list)): if abs(ifg_bperp_list[i]) > inps.max_perp_baseline: date12 = date12_orig[i] date12_to_rmv.append(date12) date12_to_rmv_temp.append(date12) print 'number of pairs to drop: %d' % (len(date12_to_rmv_temp)) print date12_to_rmv_temp # 2.4 Update date12_to_rmv from exclude_ifg_index if inps.exclude_ifg_index: print '----------------------------------------------------------------------------' print 'drop date12/pair with the following index number:' for index in inps.exclude_ifg_index: date12 = date12_orig[index - 1] date12_to_rmv.append(date12) print str(index) + ' ' + date12 # 2.5 Update date12_to_rmv from exclude_date if inps.exclude_date: inps.exclude_date = ptime.yymmdd(inps.exclude_date) print '----------------------------------------------------------------------------' print 'Drop pairs including the following dates: \n' + str( inps.exclude_date) date12_to_rmv_temp = [] for i in range(len(date12_orig)): date1, date2 = date12_orig[i].split('-') if (date1 in inps.exclude_date) or (date2 in inps.exclude_date): date12 = date12_orig[i] date12_to_rmv.append(date12) date12_to_rmv_temp.append(date12) print date12_to_rmv_temp # 2.6 Update date12_to_rmv from start_date if inps.start_date: inps.start_date = ptime.yymmdd(inps.start_date) print '----------------------------------------------------------------------------' print 'Drop pairs with date earlier than start-date: ' + inps.start_date min_date = int(ptime.yyyymmdd(inps.start_date)) date12_to_rmv_temp = [] for i in range(len(date12_orig)): date12 = date12_orig[i] if any( int(j) < min_date for j in ptime.yyyymmdd(date12.split('-'))): date12_to_rmv.append(date12) date12_to_rmv_temp.append(date12) print date12_to_rmv_temp # 2.7 Update date12_to_rmv from end_date if inps.end_date: inps.end_date = ptime.yymmdd(inps.end_date) print '----------------------------------------------------------------------------' print 'Drop pairs with date earlier than end-date: ' + inps.end_date max_date = int(ptime.yyyymmdd(inps.end_date)) date12_to_rmv_temp = [] for i in range(len(date12_orig)): date12 = date12_orig[i] if any( int(j) > max_date for j in ptime.yyyymmdd(date12.split('-'))): date12_to_rmv.append(date12) date12_to_rmv_temp.append(date12) print date12_to_rmv_temp # 3. Manually drop pairs if inps.disp_network: date12_click = manual_select_pairs_to_remove(inps.file[0]) for date12 in list(date12_click): if date12 not in date12_orig: date12_click.remove(date12) print 'date12 selected to remove:' print date12_click date12_to_rmv += date12_click # 4. drop duplicate date12 and sort in order date12_to_rmv = sorted(list(set(date12_to_rmv))) date12_keep = sorted(list(set(date12_orig) - set(date12_to_rmv))) print '----------------------------------------------------------------------------' print 'number of interferograms to remove: ' + str(len(date12_to_rmv)) print 'number of interferograms kept : ' + str(len(date12_keep)) ##### Calculated date12_to_drop v.s. existing date12_to_drop # Get list of date12 of interferograms already been marked to drop k = readfile.read_attribute(inps.file[0])['FILE_TYPE'] h5 = h5py.File(inps.file[0], 'r') ifgram_list_all = sorted(h5[k].keys()) ifgram_list_keep = ut.check_drop_ifgram(h5, print_msg=False) ifgram_list_dropped = sorted( list(set(ifgram_list_all) - set(ifgram_list_keep))) date12_list_dropped = ptime.list_ifgram2date12(ifgram_list_dropped) h5.close() if date12_to_rmv == date12_list_dropped and inps.mark_attribute: print 'Calculated date12 to drop is the same as exsiting marked input file, skip update file attributes.' date12_to_rmv = [] ##### Update date12 to drop if date12_to_rmv: ##### Update Input Files with date12_to_rmv Modified_CoherenceFile = 'Modified_coherence.h5' for File in inps.file: Modified_File = modify_file_date12_list(File, date12_to_rmv, inps.mark_attribute) k = readfile.read_attribute(File)['FILE_TYPE'] # Update Mask File if k == 'interferograms' and inps.update_aux: print 'update mask file for input ' + k + ' file based on ' + Modified_File inps.mask_file = 'mask.h5' print 'writing >>> ' + inps.mask_file ut.nonzero_mask(Modified_File, inps.mask_file) elif k == 'coherence' and inps.update_aux: inps.coherence_file = Modified_File print 'update average spatial coherence for input ' + k + ' file based on: ' + Modified_File outFile = 'averageSpatialCoherence.h5' print 'writing >>> ' + outFile ut.temporal_average(Modified_File, outFile) # Touch spatial average txt file of coherence if it's existed coh_spatialAverage_file = os.path.splitext( Modified_File)[0] + '_spatialAverage.txt' if os.path.isfile(coh_spatialAverage_file): touchCmd = 'touch ' + coh_spatialAverage_file print touchCmd os.system(touchCmd) # Plot result if inps.plot: print '\nplot modified network and save to file.' plotCmd = 'plot_network.py ' + inps.coherence_file + ' --coherence ' + inps.coherence_file + ' --nodisplay' if inps.template_file: plotCmd += ' --template ' + inps.template_file print plotCmd os.system(plotCmd) print 'Done.' return
def read_subset_box(inpsDict): # Read subset info from template inpsDict['box'] = None inpsDict['box4geo_lut'] = None pix_box, geo_box = subset.read_subset_template2box( inpsDict['template_file']) # Grab required info to read input geo_box into pix_box try: lookupFile = [ glob.glob(str(inpsDict['pysar.load.lookupYFile']))[0], glob.glob(str(inpsDict['pysar.load.lookupXFile']))[0] ] except: lookupFile = None try: pathKey = [ i for i in datasetName2templateKey.values() if i in inpsDict.keys() ][0] file = glob.glob(str(inpsDict[pathKey]))[0] atr = readfile.read_attribute(file) except: atr = dict() geocoded = None if 'Y_FIRST' in atr.keys(): geocoded = True else: geocoded = False # Check conflict if geo_box and not geocoded and lookupFile is None: geo_box = None print(('WARNING: pysar.subset.lalo is not supported' ' if 1) no lookup file AND' ' 2) radar/unkonwn coded dataset')) print('\tignore it and continue.') if not geo_box and not pix_box: return inpsDict # geo_box --> pix_box if geo_box is not None: if geocoded: pix_box = (0, 0, 0, 0) pix_box[0:3:2] = ut.coord_geo2radar(geo_box[0:3:2], atr, 'lon') pix_box[1:4:2] = ut.coord_geo2radar(geo_box[1:4:2], atr, 'lat') else: pix_box = subset.bbox_geo2radar(geo_box, atr, lookupFile) print('input bounding box of interest in lalo: {}'.format(geo_box)) print('box to read for datasets in y/x: {}'.format(pix_box)) # Get box for geocoded lookup table (for gamma/roipac) box4geo_lut = None if lookupFile is not None: atrLut = readfile.read_attribute(lookupFile[0]) if not geocoded and 'Y_FIRST' in atrLut.keys(): geo_box = subset.bbox_radar2geo(pix_box, atr, lookupFile) box4geo_lut = subset.bbox_geo2radar(geo_box, atrLut) print('box to read for geocoded lookup file in y/x: {}'.format( box4geo_lut)) inpsDict['box'] = pix_box inpsDict['box4geo_lut'] = box4geo_lut return inpsDict
def get_date12_to_drop(inps): """Get date12 list to dropped Return [] if no ifgram to drop, thus KEEP ALL ifgrams; None if nothing to change, exit without doing anything. """ obj = ifgramStack(inps.file) obj.open() date12ListAll = obj.date12List print('number of interferograms: {}'.format(len(date12ListAll))) # Get date12_to_drop date12_to_drop = [] # 1. Update date12_to_drop from reference file if inps.referenceFile: date12_to_keep = ifgramStack( inps.referenceFile).get_date12_list(dropIfgram=True) print('--------------------------------------------------') print('use reference pairs info from file: {}'.format( inps.referenceFile)) print('number of interferograms in reference: {}'.format( len(date12_to_keep))) tempList = sorted(list(set(date12ListAll) - set(date12_to_keep))) date12_to_drop += tempList print('date12 not in reference file: ({})\n{}'.format( len(tempList), tempList)) # 2.1 Update date12_to_drop from coherence file if inps.coherenceBased: print('--------------------------------------------------') print('use coherence-based network modification') if inps.aoi_geo_box and inps.lookupFile: print('input AOI in (lon0, lat1, lon1, lat0): {}'.format( inps.aoi_geo_box)) inps.aoi_pix_box = subset.bbox_geo2radar(inps.aoi_geo_box, obj.metadata, inps.lookupFile) if inps.aoi_pix_box: inps.aoi_pix_box = subset.check_box_within_data_coverage( inps.aoi_pix_box, obj.metadata) print('input AOI in (x0,y0,x1,y1): {}'.format(inps.aoi_pix_box)) # Calculate spatial average coherence cohList = ut.spatial_average(inps.file, datasetName='coherence', maskFile=inps.maskFile, box=inps.aoi_pix_box, saveList=True)[0] coh_date12_list = list( np.array(date12ListAll)[np.array(cohList) >= inps.minCoherence]) # MST network if inps.keepMinSpanTree: print( 'Get minimum spanning tree (MST) of interferograms with inverse of coherence.' ) msg = 'Drop ifgrams with 1) average coherence < {} AND 2) not in MST network: '.format( inps.minCoherence) mst_date12_list = pnet.threshold_coherence_based_mst( date12ListAll, cohList) else: msg = 'Drop ifgrams with average coherence < {}: '.format( inps.minCoherence) mst_date12_list = [] tempList = [ i for i in date12ListAll if (i in coh_date12_list and i not in mst_date12_list) ] #tempList = sorted(list(set(date12ListAll) - set(coh_date12_list) - set(mst_date12_list))) date12_to_drop += tempList print(msg + '({})\n{}'.format(len(tempList), tempList)) # 2.2 Update date12_to_drop from temp baseline threshold if inps.tempBaseMax: tempIndex = np.abs(obj.tbaseIfgram) > inps.tempBaseMax tempList = list(np.array(date12ListAll)[tempIndex]) date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with temporal baseline > {} days: ({})\n{}'.format( inps.tempBaseMax, len(tempList), tempList)) # 2.3 Update date12_to_drop from perp baseline threshold if inps.perpBaseMax: tempIndex = np.abs(obj.pbaseIfgram) > inps.perpBaseMax tempList = list(np.array(date12ListAll)[tempIndex]) date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with perp baseline > {} meters: ({})\n{}'.format( inps.perpBaseMax, len(tempList), tempList)) # 2.4 Update date12_to_drop from excludeIfgIndex if inps.excludeIfgIndex: tempList = [date12ListAll[i] for i in inps.excludeIfgIndex] date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with the following index number: {}\n{}'.format( len(tempList), zip(inps.excludeIfgIndex, tempList))) # 2.5 Update date12_to_drop from excludeDate if inps.excludeDate: tempList = [ i for i in date12ListAll if any(j in inps.excludeDate for j in i.split('_')) ] date12_to_drop += tempList print('-' * 50 + '\nDrop ifgrams including the following dates:\n{}'.format( inps.excludeDate)) print('-' * 20 + 'Ifgrams dropped: ({})\n{}'.format(len(templist), tempList)) # 2.6 Update date12_to_drop from startDate if inps.startDate: minDate = int(inps.startDate) tempList = [ i for i in date12ListAll if any( int(j) < minDate for j in i.split('_')) ] date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with date earlier than: {} ({})\n{}'.format( inps.startDate, len(tempList), tempList)) # 2.7 Update date12_to_drop from endDate if inps.endDate: maxDate = int(inps.endDate) tempList = [ i for i in date12ListAll if any( int(j) > maxDate for j in i.split('_')) ] date12_to_drop += tempList print('--------------------------------------------------') print('Drop ifgrams with date later than: {} ({})\n{}'.format( inps.endDate, len(tempList), tempList)) # 3. Manually drop pairs if inps.manual: tempList = manual_select_pairs_to_remove(inps.file) if tempList is None: return None tempList = [i for i in tempList if i in date12ListAll] print('date12 selected to remove: ({})\n{}'.format( len(tempList), tempList)) date12_to_drop += tempList # 4. drop duplicate date12 and sort in order date12_to_drop = sorted(list(set(date12_to_drop))) date12_to_keep = sorted(list(set(date12ListAll) - set(date12_to_drop))) print('--------------------------------------------------') print('number of interferograms to remove: {}'.format(len(date12_to_drop))) print('number of interferograms to keep : {}'.format(len(date12_to_keep))) date12ListKept = obj.get_date12_list(dropIfgram=True) date12ListDropped = sorted(list(set(date12ListAll) - set(date12ListKept))) if date12_to_drop == date12ListDropped: print( 'Calculated date12 to drop is the same as exsiting marked input file, skip updating file.' ) date12_to_drop = None return date12_to_drop