def run_network_modification(self, step_name): """Modify network of interferograms before the network inversion.""" # check the existence of ifgramStack.h5 stack_file, geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1:3] coh_txt = '{}_coherence_spatialAvg.txt'.format(os.path.splitext(os.path.basename(stack_file))[0]) try: net_fig = [i for i in ['Network.pdf', 'pic/Network.pdf'] if os.path.isfile(i)][0] except: net_fig = None # 1) output waterMask.h5 to simplify the detection/use of waterMask water_mask_file = 'waterMask.h5' if 'waterMask' in readfile.get_dataset_list(geom_file): print('generate {} from {} for conveniency'.format(water_mask_file, geom_file)) if ut.run_or_skip(out_file=water_mask_file, in_file=geom_file) == 'run': water_mask, atr = readfile.read(geom_file, datasetName='waterMask') atr['FILE_TYPE'] = 'waterMask' writefile.write(water_mask, out_file=water_mask_file, metadata=atr) # 2) modify network scp_args = '{} -t {}'.format(stack_file, self.templateFile) print('modify_network.py', scp_args) mintpy.modify_network.main(scp_args.split()) # 3) plot network scp_args = '{} -t {} --nodisplay'.format(stack_file, self.templateFile) print('\nplot_network.py', scp_args) if ut.run_or_skip(out_file=net_fig, in_file=[stack_file, coh_txt, self.templateFile], check_readable=False) == 'run': mintpy.plot_network.main(scp_args.split()) # 4) aux files: maskConnComp and avgSpatialCoh self.generate_ifgram_aux_file() return
def run_network_modification(self, step_name, plot=True): """Modify network of interferograms before the network inversion.""" # check the existence of ifgramStack.h5 stack_file, geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1:3] coh_txt = 'coherenceSpatialAvg.txt' try: net_fig = [i for i in ['network.pdf', 'pic/network.pdf'] if os.path.isfile(i)][0] except: net_fig = None # 1) output waterMask.h5 to simplify the detection/use of waterMask water_mask_file = 'waterMask.h5' if 'waterMask' in readfile.get_dataset_list(geom_file): print('generate {} from {} for conveniency'.format(water_mask_file, geom_file)) if ut.run_or_skip(out_file=water_mask_file, in_file=geom_file) == 'run': water_mask, atr = readfile.read(geom_file, datasetName='waterMask') # ignore no-data pixels in geometry files ds_name_list = readfile.get_dataset_list(geom_file) for ds_name in ['latitude','longitude']: if ds_name in ds_name_list: print('set pixels with 0 in {} to 0 in waterMask'.format(ds_name)) ds = readfile.read(geom_file, datasetName=ds_name)[0] water_mask[ds == 0] = 0 atr['FILE_TYPE'] = 'waterMask' writefile.write(water_mask, out_file=water_mask_file, metadata=atr) # 2) modify network iargs = [stack_file, '-t', self.templateFile] print('\nmodify_network.py', ' '.join(iargs)) mintpy.modify_network.main(iargs) # 3) plot network iargs = [stack_file, '-t', self.templateFile, '--nodisplay'] dsNames = readfile.get_dataset_list(stack_file) if any('phase' in i.lower() for i in dsNames): iargs += ['-d', 'coherence', '-v', '0.2', '1.0'] elif any('offset' in i.lower() for i in dsNames): iargs += ['-d', 'offsetSNR', '-v', '0', '20'] print('\nplot_network.py', ' '.join(iargs)) # run if self.template['mintpy.plot'] and plot: if ut.run_or_skip(out_file=net_fig, in_file=[stack_file, coh_txt, self.templateFile], check_readable=False) == 'run': mintpy.plot_network.main(iargs) else: print('mintpy.plot is turned OFF, skip plotting network.') return
def run_network_modification(self, step_name, plot=True): """Modify network of interferograms before the network inversion.""" # check the existence of ifgramStack.h5 stack_file, geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1:3] coh_txt = '{}_coherence_spatialAvg.txt'.format(os.path.splitext(os.path.basename(stack_file))[0]) try: net_fig = [i for i in ['Network.pdf', 'pic/Network.pdf'] if os.path.isfile(i)][0] except: net_fig = None # 1) output waterMask.h5 to simplify the detection/use of waterMask water_mask_file = 'waterMask.h5' if 'waterMask' in readfile.get_dataset_list(geom_file): print('generate {} from {} for conveniency'.format(water_mask_file, geom_file)) if ut.run_or_skip(out_file=water_mask_file, in_file=geom_file) == 'run': water_mask, atr = readfile.read(geom_file, datasetName='waterMask') # ignore no-data pixels in geometry files ds_name_list = readfile.get_dataset_list(geom_file) for ds_name in ['latitude','longitude']: if ds_name in ds_name_list: print('set pixels with 0 in {} to 0 in waterMask'.format(ds_name)) ds = readfile.read(geom_file, datasetName=ds_name)[0] water_mask[ds == 0] = 0 atr['FILE_TYPE'] = 'waterMask' writefile.write(water_mask, out_file=water_mask_file, metadata=atr) # 2) modify network scp_args = '{} -t {}'.format(stack_file, self.templateFile) print('modify_network.py', scp_args) mintpy.modify_network.main(scp_args.split()) # 3) plot network if self.template['mintpy.plot'] and plot: scp_args = '{} -t {} --nodisplay'.format(stack_file, self.templateFile) dsNames = readfile.get_dataset_list(stack_file) if any('phase' in i.lower() for i in dsNames): scp_args += ' -d coherence -v 0.2 1.0 ' elif any('offset' in i.lower() for i in dsNames): scp_args += ' -d offsetSNR -v 0 20 ' print('\nplot_network.py', scp_args) if ut.run_or_skip(out_file=net_fig, in_file=[stack_file, coh_txt, self.templateFile], check_readable=False) == 'run': mintpy.plot_network.main(scp_args.split()) # 4) aux files: maskConnComp and avgSpatialCoh self.generate_ifgram_aux_file() return
def run_or_skip(iono_file, grib_files, dis_file, geom_file): print('update mode: ON') print('output file: {}'.format(iono_file)) flag = 'skip' # check existance and modification time if ut.run_or_skip(out_file=iono_file, in_file=grib_files, print_msg=False) == 'run': flag = 'run' print('1) output file either do NOT exist or is NOT newer than all IONEX files.') else: print('1) output file exists and is newer than all IONEX files.') # check dataset size in space / time ds_size_dis = get_dataset_size(dis_file) ds_size_ion = get_dataset_size(geom_file) date_list_dis = timeseries(dis_file).get_date_list() date_list_ion = timeseries(iono_file).get_date_list() if ds_size_ion != ds_size_dis or any (x not in date_list_ion for x in date_list_dis): flag = 'run' print(f'2) output file does NOT have the same len/wid as the geometry file {geom_file} or does NOT contain all dates') else: print('2) output file has the same len/wid as the geometry file and contains all dates') # check if output file is fully written with h5py.File(iono_file, 'r') as f: if np.all(f['timeseries'][-1,:,:] == 0): flag = 'run' print('3) output file is NOT fully written.') else: print('3) output file is fully written.') # result print('run or skip: {}'.format(flag)) return flag
def update_object(outFile, inObj, box, updateMode=True): """Do not write h5 file if: 1) h5 exists and readable, 2) it contains all date12 from ifgramStackDict, or all datasets from geometryDict""" write_flag = True if updateMode and ut.run_or_skip(outFile, check_readable=True) == 'skip': if inObj.name == 'ifgramStack': in_size = inObj.get_size(box=box)[1:] in_date12_list = inObj.get_date12_list() outObj = ifgramStack(outFile) out_size = outObj.get_size()[1:] out_date12_list = outObj.get_date12_list(dropIfgram=False) if out_size == in_size and set(in_date12_list).issubset(set(out_date12_list)): print(('All date12 exists in file {} with same size as required,' ' no need to re-load.'.format(os.path.basename(outFile)))) write_flag = False elif inObj.name == 'geometry': outObj = geometry(outFile) outObj.open(print_msg=False) if (outObj.get_size() == inObj.get_size(box=box) and all(i in outObj.datasetNames for i in inObj.get_dataset_list())): print(('All datasets exists in file {} with same size as required,' ' no need to re-load.'.format(os.path.basename(outFile)))) write_flag = False return write_flag
def run_save2google_earth(self, step_name): """Save velocity file in geo coordinates into Google Earth raster image.""" if self.template['mintpy.save.kmz'] is True: print('creating Google Earth KMZ file for geocoded velocity file: ...') # input vel_file = 'velocity.h5' atr = readfile.read_attribute(vel_file) if 'Y_FIRST' not in atr.keys(): vel_file = os.path.join(self.workDir, 'geo/geo_velocity.h5') # output kmz_file = '{}.kmz'.format(os.path.splitext(vel_file)[0]) scp_args = '{} -o {}'.format(vel_file, kmz_file) print('save_kmz.py', scp_args) # update mode try: fbase = os.path.basename(kmz_file) kmz_file = [i for i in [fbase, './geo/{}'.format(fbase), './pic/{}'.format(fbase)] if os.path.isfile(i)][0] except: kmz_file = None if ut.run_or_skip(out_file=kmz_file, in_file=vel_file, check_readable=False) == 'run': mintpy.save_kmz.main(scp_args.split()) else: print('save velocity to Google Earth format is OFF.') return
def run_network_modification(self, step_name): """Modify network of interferograms before the network inversion.""" # check the existence of ifgramStack.h5 stack_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1] coh_txt = '{}_coherence_spatialAvg.txt'.format( os.path.splitext(os.path.basename(stack_file))[0]) try: net_fig = [ i for i in ['Network.pdf', 'pic/Network.pdf'] if os.path.isfile(i) ][0] except: net_fig = None # 1) modify network scp_args = '{} -t {}'.format(stack_file, self.templateFile) print('modify_network.py', scp_args) mintpy.modify_network.main(scp_args.split()) # 2) plot network scp_args = '{} -t {} --nodisplay'.format(stack_file, self.templateFile) print('\nplot_network.py', scp_args) if ut.run_or_skip(out_file=net_fig, in_file=[stack_file, coh_txt, self.templateFile], check_readable=False) == 'run': mintpy.plot_network.main(scp_args.split()) # 3) aux files: maskConnComp and avgSpatialCoh self.generate_ifgram_aux_file() return
def run_geocode(self, step_name): """geocode data files in radar coordinates into ./geo folder.""" if self.template['mintpy.geocode']: ts_file = self.get_timeseries_filename(self.template)[step_name]['input'] atr = readfile.read_attribute(ts_file) if 'Y_FIRST' not in atr.keys(): # 1. geocode out_dir = os.path.join(self.workDir, 'geo') if not os.path.isdir(out_dir): os.makedirs(out_dir) print('create directory:', out_dir) geom_file, lookup_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2:4] in_files = [geom_file, 'temporalCoherence.h5', ts_file, 'velocity.h5'] scp_args = '-l {l} -t {t} --outdir {o} --update '.format(l=lookup_file, t=self.templateFile, o=out_dir) for in_file in in_files: scp_args += ' {}'.format(in_file) print('geocode.py', scp_args) mintpy.geocode.main(scp_args.split()) # 2. generate reliable pixel mask in geo coordinate geom_file = os.path.join(out_dir, 'geo_{}'.format(os.path.basename(geom_file))) tcoh_file = os.path.join(out_dir, 'geo_temporalCoherence.h5') mask_file = os.path.join(out_dir, 'geo_maskTempCoh.h5') tcoh_min = self.template['mintpy.networkInversion.minTempCoh'] scp_args = '{} -m {} -o {} --shadow {}'.format(tcoh_file, tcoh_min, mask_file, geom_file) print('generate_mask.py', scp_args) if ut.run_or_skip(out_file=mask_file, in_file=tcoh_file) == 'run': mintpy.generate_mask.main(scp_args.split()) else: print('geocoding is OFF') return
def run_save2google_earth(self, step_name): """Save velocity file in geo coordinates into Google Earth raster image.""" if self.template['mintpy.save.kmz'] is True: print( 'creating Google Earth KMZ file for geocoded velocity file: ...' ) # input vel_file = 'velocity.h5' atr = readfile.read_attribute(vel_file) if 'Y_FIRST' not in atr.keys(): vel_file = os.path.join(self.workDir, 'geo/geo_velocity.h5') # output kmz_file = '{}.kmz'.format(os.path.splitext(vel_file)[0]) scp_args = '{} -o {}'.format(vel_file, kmz_file) print('save_kmz.py', scp_args) # update mode try: fbase = os.path.basename(kmz_file) kmz_file = [ i for i in [ fbase, './geo/{}'.format(fbase), './pic/{}'.format( fbase) ] if os.path.isfile(i) ][0] except: kmz_file = None if ut.run_or_skip(out_file=kmz_file, in_file=vel_file, check_readable=False) == 'run': mintpy.save_kmz.main(scp_args.split()) else: print('save velocity to Google Earth format is OFF.') return
def generate_temporal_coherence_mask(self): """Generate reliable pixel mask from temporal coherence""" geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2] tcoh_file = 'temporalCoherence.h5' mask_file = 'maskTempCoh.h5' tcoh_min = self.template['mintpy.networkInversion.minTempCoh'] scp_args = '{} -m {} -o {} --shadow {}'.format(tcoh_file, tcoh_min, mask_file, geom_file) print('generate_mask.py', scp_args) # update mode: run only if: # 1) output file exists and newer than input file, AND # 2) all config keys are the same config_keys = ['mintpy.networkInversion.minTempCoh'] print('update mode: ON') flag = 'skip' if ut.run_or_skip(out_file=mask_file, in_file=tcoh_file, print_msg=False) == 'run': flag = 'run' else: print( '1) output file: {} already exists and newer than input file: {}' .format(mask_file, tcoh_file)) atr = readfile.read_attribute(mask_file) if any( str(self.template[i]) != atr.get(i, 'False') for i in config_keys): flag = 'run' print( '2) NOT all key configration parameters are the same: {}'. format(config_keys)) else: print('2) all key configuration parameters are the same: {}'. format(config_keys)) print('run or skip: {}'.format(flag)) if flag == 'run': mintpy.generate_mask.main(scp_args.split()) # update configKeys atr = {} for key in config_keys: atr[key] = self.template[key] ut.add_attribute(mask_file, atr) # check number of pixels selected in mask file for following analysis num_pixel = np.sum(readfile.read(mask_file)[0] != 0.) print('number of reliable pixels: {}'.format(num_pixel)) min_num_pixel = float( self.template['mintpy.networkInversion.minNumPixel']) if num_pixel < min_num_pixel: msg = "Not enough reliable pixels (minimum of {}). ".format( int(min_num_pixel)) msg += "Try the following:\n" msg += "1) Check the reference pixel and make sure it's not in areas with unwrapping errors\n" msg += "2) Check the network and make sure it's fully connected without subsets" raise RuntimeError(msg) return
def run_network_modification(self, step_name): """Modify network of interferograms before the network inversion.""" # check the existence of ifgramStack.h5 stack_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1] coh_txt = '{}_coherence_spatialAvg.txt'.format(os.path.splitext(os.path.basename(stack_file))[0]) try: net_fig = [i for i in ['Network.pdf', 'pic/Network.pdf'] if os.path.isfile(i)][0] except: net_fig = None # 1) modify network scp_args = '{} -t {}'.format(stack_file, self.templateFile) print('modify_network.py', scp_args) mintpy.modify_network.main(scp_args.split()) # 2) plot network scp_args = '{} -t {} --nodisplay'.format(stack_file, self.templateFile) print('\nplot_network.py', scp_args) if ut.run_or_skip(out_file=net_fig, in_file=[stack_file, coh_txt, self.templateFile], check_readable=False) == 'run': mintpy.plot_network.main(scp_args.split()) # 3) aux files: maskConnComp and avgSpatialCoh self.generate_ifgram_aux_file() return
def run_or_skip(grib_files, tropo_file, geom_file): print('update mode: ON') print('output file: {}'.format(tropo_file)) flag = 'skip' # check existance and modification time if ut.run_or_skip(out_file=tropo_file, in_file=grib_files, print_msg=False) == 'run': flag = 'run' print('1) output file either do NOT exist or is NOT newer than all GRIB files.') else: print('1) output file exists and is newer than all GRIB files.') # check dataset size in space / time date_list = [str(re.findall('\d{8}', os.path.basename(i))[0]) for i in grib_files] if (get_dataset_size(tropo_file) != get_dataset_size(geom_file) or any(i not in timeseries(tropo_file).get_date_list() for i in date_list)): flag = 'run' print('2) output file does NOT have the same len/wid as the geometry file {} or does NOT contain all dates'.format(geom_file)) else: print('2) output file has the same len/wid as the geometry file and contains all dates') # check if output file is fully written with h5py.File(tropo_file, 'r') as f: if np.all(f['timeseries'][-1,:,:] == 0): flag = 'run' print('3) output file is NOT fully written.') else: print('3) output file is fully written.') # result print('run or skip: {}'.format(flag)) return flag
def update_object(outFile, inObj, box, updateMode=True): """Do not write h5 file if: 1) h5 exists and readable, 2) it contains all date12 from ifgramStackDict, or all datasets from geometryDict""" write_flag = True if updateMode and ut.run_or_skip(outFile, check_readable=True) == 'skip': if inObj.name == 'ifgramStack': in_size = inObj.get_size(box=box)[1:] in_date12_list = inObj.get_date12_list() outObj = ifgramStack(outFile) out_size = outObj.get_size()[1:] out_date12_list = outObj.get_date12_list(dropIfgram=False) if out_size == in_size and set(in_date12_list).issubset(set(out_date12_list)): print(('All date12 exists in file {} with same size as required,' ' no need to re-load.'.format(os.path.basename(outFile)))) write_flag = False elif inObj.name == 'geometry': outObj = geometry(outFile) outObj.open(print_msg=False) if (outObj.get_size() == inObj.get_size(box=box) and all(i in outObj.datasetNames for i in inObj.get_dataset_list())): print(('All datasets exists in file {} with same size as required,' ' no need to re-load.'.format(os.path.basename(outFile)))) write_flag = False return write_flag
def extract_isce_metadata(meta_file, geom_dir=None, rsc_file=None, update_mode=True): """Extract metadata from ISCE stack products Parameters: meta_file : str, path of metadata file, reference/IW1.xml or referenceShelve/data.dat geom_dir : str, path of geometry directory. rsc_file : str, output file name of ROIPAC format rsc file Returns: metadata : dict """ if not rsc_file: rsc_file = os.path.join(os.path.dirname(meta_file), 'data.rsc') # check existing rsc_file if update_mode and ut.run_or_skip( rsc_file, in_file=meta_file, check_readable=False) == 'skip': return readfile.read_roipac_rsc(rsc_file) # 1. extract metadata from XML / shelve file fbase = os.path.basename(meta_file) if fbase.startswith("IW"): print('extract metadata from ISCE/topsStack xml file:', meta_file) #metadata = extract_tops_metadata(meta_file)[0] metadata = isce_utils.extract_tops_metadata(meta_file)[0] metadata['sensor_type'] = 'tops' elif fbase.startswith("data"): print('extract metadata from ISCE/stripmapStack shelve file:', meta_file) #metadata = extract_stripmap_metadata(meta_file)[0] metadata = isce_utils.extract_stripmap_metadata(meta_file)[0] metadata['sensor_type'] = 'stripmap' elif fbase.endswith(".xml"): #metadata = extract_stripmap_metadata(meta_file)[0] metadata = isce_utils.extract_stripmap_metadata(meta_file)[0] else: raise ValueError( "unrecognized ISCE metadata file: {}".format(meta_file)) # 2. extract metadata from geometry file if geom_dir: metadata = isce_utils.extract_geometry_metadata(geom_dir, metadata) # 3. common metadata metadata['PROCESSOR'] = 'isce' metadata['ANTENNA_SIDE'] = '-1' # convert all value to string format for key, value in metadata.items(): metadata[key] = str(value) # write to .rsc file metadata = readfile.standardize_metadata(metadata) if rsc_file: print('writing ', rsc_file) writefile.write_roipac_rsc(metadata, rsc_file) return metadata
def run_or_skip(inps, dsNameDict, out_file): flag = 'run' # check 1 - update mode status if not inps.updateMode: return flag # check 2 - output file existance if ut.run_or_skip(out_file, check_readable=True) == 'run': return flag # check 3 - output dataset info key = [i for i in ['unwrapPhase', 'height'] if i in dsNameDict.keys()][0] ds_shape = dsNameDict[key][1] in_shape = ds_shape[-2:] if 'unwrapPhase' in dsNameDict.keys(): # compare date12 and size ds = gdal.Open(inps.unwFile, gdal.GA_ReadOnly) in_date12_list = [ ds.GetRasterBand(i + 1).GetMetadata("unwrappedPhase")['Dates'] for i in range(ds_shape[0]) ] in_date12_list = ['_'.join(d.split('_')[::-1]) for d in in_date12_list] try: out_obj = ifgramStack(out_file) out_obj.open(print_msg=False) out_shape = (out_obj.length, out_obj.width) out_date12_list = out_obj.get_date12_list(dropIfgram=False) if out_shape == in_shape and set(in_date12_list).issubset( set(out_date12_list)): print(( 'All date12 exists in file {} with same size as required,' ' no need to re-load.'.format(os.path.basename(out_file)))) flag = 'skip' except: pass elif 'height' in dsNameDict.keys(): # compare dataset names and size in_dsNames = list(dsNameDict.keys()) in_size = in_shape[0] * in_shape[1] * 4 * len(in_dsNames) out_obj = geometry(out_file) out_obj.open(print_msg=False) out_dsNames = out_obj.datasetNames out_shape = (out_obj.length, out_obj.width) out_size = os.path.getsize(out_file) if (set(in_dsNames).issubset(set(out_dsNames)) and out_shape == in_shape and out_size > in_size * 0.3): print(('All datasets exists in file {} with same size as required,' ' no need to re-load.'.format(os.path.basename(out_file)))) flag = 'skip' return flag
def run_load_data(self, step_name): """Load InSAR stacks into HDF5 files in ./inputs folder. It 1) copy auxiliary files into work directory (for Unvi of Miami only) 2) load all interferograms stack files into mintpy/inputs directory. 3) check loading result 4) add custom metadata (optional, for HDF-EOS5 format only) """ # 1) copy aux files (optional) self._copy_aux_file() # 2) loading data scp_args = '--template {}'.format(self.templateFile) if self.customTemplateFile: scp_args += ' {}'.format(self.customTemplateFile) if self.projectName: scp_args += ' --project {}'.format(self.projectName) # run print("load_data.py", scp_args) mintpy.load_data.main(scp_args.split()) os.chdir(self.workDir) # 3) check loading result load_complete, stack_file, geom_file = ut.check_loaded_dataset(self.workDir, print_msg=True)[0:3] # 3.1) output waterMask.h5 water_mask_file = 'waterMask.h5' if 'waterMask' in readfile.get_dataset_list(geom_file): print('generate {} from {} for conveniency'.format(water_mask_file, geom_file)) if ut.run_or_skip(out_file=water_mask_file, in_file=geom_file) == 'run': water_mask, atr = readfile.read(geom_file, datasetName='waterMask') atr['FILE_TYPE'] = 'waterMask' writefile.write(water_mask, out_file=water_mask_file, metadata=atr) # 4) add custom metadata (optional) if self.customTemplateFile: print('updating {}, {} metadata based on custom template file: {}'.format( os.path.basename(stack_file), os.path.basename(geom_file), os.path.basename(self.customTemplateFile))) # use ut.add_attribute() instead of add_attribute.py because of # better control of special metadata, such as SUBSET_X/YMIN ut.add_attribute(stack_file, self.customTemplate) ut.add_attribute(geom_file, self.customTemplate) # 5) if not load_complete, plot and raise exception if not load_complete: # plot result if error occured self.plot_result(print_aux=False, plot=plot) # go back to original directory print('Go back to directory:', self.cwd) os.chdir(self.cwd) # raise error msg = 'step {}: NOT all required dataset found, exit.'.format(step_name) raise RuntimeError(msg) return
def analyze_rms(date_list, rms_list, inps): # reference date ref_idx = np.argmin(rms_list) print('-' * 50 + '\ndate with min RMS: {} - {:.4f}'.format( date_list[ref_idx], rms_list[ref_idx])) ref_date_file = 'reference_date.txt' if ut.run_or_skip( out_file=ref_date_file, in_file=[inps.timeseries_file, inps.mask_file, inps.template_file], check_readable=False) == 'run': with open(ref_date_file, 'w') as f: f.write(date_list[ref_idx] + '\n') print('save date to file: ' + ref_date_file) # exclude date(s) - outliers try: rms_threshold = ut.median_abs_deviation_threshold(rms_list, center=0., cutoff=inps.cutoff) except: # equivalent calculation using numpy assuming Gaussian distribution rms_threshold = np.median(rms_list) / .6745 * inps.cutoff ex_idx = [rms_list.index(i) for i in rms_list if i > rms_threshold] print(('-' * 50 + '\ndate(s) with RMS > {} * median RMS' ' ({:.4f})'.format(inps.cutoff, rms_threshold))) ex_date_file = 'exclude_date.txt' if ex_idx: # print for i in ex_idx: print('{} - {:.4f}'.format(date_list[i], rms_list[i])) # save to text file with open(ex_date_file, 'w') as f: for i in ex_idx: f.write(date_list[i] + '\n') print('save date(s) to file: ' + ex_date_file) else: print('None.') if os.path.isfile(ex_date_file): rmCmd = 'rm {}'.format(ex_date_file) print(rmCmd) os.system(rmCmd) # plot bar figure and save fig_file = os.path.splitext(inps.rms_file)[0] + '.pdf' fig, ax = plt.subplots(figsize=inps.fig_size) print('create figure in size:', inps.fig_size) ax = plot_rms_bar(ax, date_list, np.array(rms_list) * 1000., cutoff=inps.cutoff) fig.savefig(fig_file, bbox_inches='tight', transparent=True) print('save figure to file: ' + fig_file) return inps
def get_phase_linking_coherence_mask(template, work_dir): """ Generate reliable pixel mask from temporal coherence functions = [generate_mask, readfile, run_or_skip, add_attribute] # from mintpy import generate_mask # from mintpy.utils import readfile # from mintpy.utils.utils import run_or_skip, add_attribute """ tcoh_file = os.path.join(work_dir, 'temporalCoherence.h5') mask_file = os.path.join(work_dir, 'maskTempCoh.h5') tcoh_min = float(template['minopy.timeseries.minTempCoh']) scp_args = '{} -m {} --nonzero -o {} --update'.format( tcoh_file, tcoh_min, mask_file) print('generate_mask.py', scp_args) # update mode: run only if: # 1) output file exists and newer than input file, AND # 2) all config keys are the same print('update mode: ON') flag = 'skip' if ut.run_or_skip(out_file=mask_file, in_file=tcoh_file, print_msg=False) == 'run': flag = 'run' print('run or skip: {}'.format(flag)) if flag == 'run': generate_mask.main(scp_args.split()) # update configKeys atr = {} atr['minopy.timeseries.minTempCoh'] = tcoh_min ut.add_attribute(mask_file, atr) ut.add_attribute(mask_file, atr) # check number of pixels selected in mask file for following analysis #num_pixel = np.sum(readfile.read(mask_file)[0] != 0.) #print('number of reliable pixels: {}'.format(num_pixel)) #min_num_pixel = float(template['mintpy.networkInversion.minNumPixel']) # 100 #if num_pixel < min_num_pixel: # msg = "Not enough reliable pixels (minimum of {}). ".format(int(min_num_pixel)) # msg += "Try the following:\n" # msg += "1) Check the reference pixel and make sure it's not in areas with unwrapping errors\n" # msg += "2) Check the network and make sure it's fully connected without subsets" # raise RuntimeError(msg) return
def analyze_rms(date_list, rms_list, inps): # reference date ref_idx = np.argmin(rms_list) print('-'*50+'\ndate with min RMS: {} - {:.4f}'.format(date_list[ref_idx], rms_list[ref_idx])) ref_date_file = 'reference_date.txt' if ut.run_or_skip(out_file=ref_date_file, in_file=[inps.timeseries_file, inps.mask_file, inps.template_file], check_readable=False) == 'run': with open(ref_date_file, 'w') as f: f.write(date_list[ref_idx]+'\n') print('save date to file: '+ref_date_file) # exclude date(s) - outliers try: rms_threshold = ut.median_abs_deviation_threshold(rms_list, center=0., cutoff=inps.cutoff) except: # equivalent calculation using numpy assuming Gaussian distribution rms_threshold = np.median(rms_list) / .6745 * inps.cutoff ex_idx = [rms_list.index(i) for i in rms_list if i > rms_threshold] print(('-'*50+'\ndate(s) with RMS > {} * median RMS' ' ({:.4f})'.format(inps.cutoff, rms_threshold))) ex_date_file = 'exclude_date.txt' if ex_idx: # print for i in ex_idx: print('{} - {:.4f}'.format(date_list[i], rms_list[i])) # save to text file with open(ex_date_file, 'w') as f: for i in ex_idx: f.write(date_list[i]+'\n') print('save date(s) to file: '+ex_date_file) else: print('None.') if os.path.isfile(ex_date_file): rmCmd = 'rm {}'.format(ex_date_file) print(rmCmd) os.system(rmCmd) # plot bar figure and save fig_file = os.path.splitext(inps.rms_file)[0]+'.pdf' fig, ax = plt.subplots(figsize=inps.fig_size) print('create figure in size:', inps.fig_size) ax = plot_rms_bar(ax, date_list, np.array(rms_list)*1000., cutoff=inps.cutoff) fig.savefig(fig_file, bbox_inches='tight', transparent=True) print('save figure to file: '+fig_file) return inps
def generate_temporal_coherence_mask(self): """Generate reliable pixel mask from temporal coherence""" geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2] tcoh_file = 'temporalCoherence.h5' mask_file = 'maskTempCoh.h5' tcoh_min = self.template['mintpy.networkInversion.minTempCoh'] scp_args = '{} -m {} -o {} --shadow {}'.format(tcoh_file, tcoh_min, mask_file, geom_file) print('generate_mask.py', scp_args) # update mode: run only if: # 1) output file exists and newer than input file, AND # 2) all config keys are the same config_keys = ['mintpy.networkInversion.minTempCoh'] print('update mode: ON') flag = 'skip' if ut.run_or_skip(out_file=mask_file, in_file=tcoh_file, print_msg=False) == 'run': flag = 'run' else: print('1) output file: {} already exists and newer than input file: {}'.format(mask_file, tcoh_file)) atr = readfile.read_attribute(mask_file) if any(str(self.template[i]) != atr.get(i, 'False') for i in config_keys): flag = 'run' print('2) NOT all key configration parameters are the same: {}'.format(config_keys)) else: print('2) all key configuration parameters are the same: {}'.format(config_keys)) print('run or skip: {}'.format(flag)) if flag == 'run': mintpy.generate_mask.main(scp_args.split()) # update configKeys atr = {} for key in config_keys: atr[key] = self.template[key] ut.add_attribute(mask_file, atr) # check number of pixels selected in mask file for following analysis num_pixel = np.sum(readfile.read(mask_file)[0] != 0.) print('number of reliable pixels: {}'.format(num_pixel)) min_num_pixel = float(self.template['mintpy.networkInversion.minNumPixel']) if num_pixel < min_num_pixel: msg = "Not enough reliable pixels (minimum of {}). ".format(int(min_num_pixel)) msg += "Try the following:\n" msg += "1) Check the reference pixel and make sure it's not in areas with unwrapping errors\n" msg += "2) Check the network and make sure it's fully connected without subsets" raise RuntimeError(msg) return
def extract_isce_metadata(meta_file, geom_dir=None, rsc_file=None, update_mode=True): """Extract metadata from ISCE stack products Parameters: meta_file : str, path of metadata file, master/IW1.xml or masterShelve/data.dat geom_dir : str, path of geometry directory. rsc_file : str, output file name of ROIPAC format rsc file Returns: metadata : dict """ if not rsc_file: rsc_file = os.path.join(os.path.dirname(meta_file), 'data.rsc') # check existing rsc_file if update_mode and ut.run_or_skip( rsc_file, in_file=meta_file, check_readable=False) == 'skip': return readfile.read_roipac_rsc(rsc_file) # 1. read/extract metadata from XML / shelve file processor = get_processor(meta_file) if processor == 'tops': print('extract metadata from ISCE/topsStack xml file:', meta_file) metadata = extract_tops_metadata(meta_file)[0] else: print('extract metadata from ISCE/stripmapStack shelve file:', meta_file) metadata = extract_stripmap_metadata(meta_file)[0] # 2. extract metadata from geometry file if geom_dir: metadata = extract_geometry_metadata(geom_dir, metadata) # 3. common metadata metadata['PROCESSOR'] = 'isce' metadata['ANTENNA_SIDE'] = '-1' # convert all value to string format for key, value in metadata.items(): metadata[key] = str(value) # write to .rsc file metadata = readfile.standardize_metadata(metadata) if rsc_file: print('writing ', rsc_file) writefile.write_roipac_rsc(metadata, rsc_file) return metadata
def extract_isce_metadata(meta_file, geom_dir=None, rsc_file=None, update_mode=True): """Extract metadata from ISCE stack products Parameters: meta_file : str, path of metadata file, master/IW1.xml or masterShelve/data.dat geom_dir : str, path of geometry directory. rsc_file : str, output file name of ROIPAC format rsc file Returns: metadata : dict """ if not rsc_file: rsc_file = os.path.join(os.path.dirname(meta_file), 'data.rsc') # check existing rsc_file if update_mode and ut.run_or_skip(rsc_file, in_file=meta_file, check_readable=False) == 'skip': return readfile.read_roipac_rsc(rsc_file) # 1. extract metadata from XML / shelve file fbase = os.path.basename(meta_file) if fbase.startswith("IW"): print('extract metadata from ISCE/topsStack xml file:', meta_file) metadata = extract_tops_metadata(meta_file) elif fbase.startswith("data"): print('extract metadata from ISCE/stripmapStack shelve file:', meta_file) metadata = extract_stripmap_metadata(meta_file) elif fbase.endswith(".xml"): metadata = extract_stripmap_metadata(meta_file) else: raise ValueError("unrecognized ISCE metadata file: {}".format(meta_file)) # 2. extract metadata from geometry file if geom_dir: metadata = extract_geometry_metadata(geom_dir, metadata) # 3. common metadata metadata['PROCESSOR'] = 'isce' metadata['ANTENNA_SIDE'] = '-1' # convert all value to string format for key, value in metadata.items(): metadata[key] = str(value) # write to .rsc file metadata = readfile.standardize_metadata(metadata) if rsc_file: print('writing ', rsc_file) writefile.write_roipac_rsc(metadata, rsc_file) return metadata
def _copy_aux_file(self): if not self.projectName: return # for Univ of Miami flist = ['PROCESS/unavco_attributes.txt', 'PROCESS/bl_list.txt', 'SLC/summary*slc.jpg'] try: proj_dir = os.path.join(os.getenv('SCRATCHDIR'), self.projectName) flist = get_file_list([os.path.join(proj_dir, i) for i in flist], abspath=True) for fname in flist: if ut.run_or_skip(out_file=os.path.basename(fname), in_file=fname, check_readable=False) == 'run': shutil.copy2(fname, self.workDir) print('copy {} to work directory'.format(os.path.basename(fname))) except: pass return
def _copy_aux_file(self): if not self.projectName: return # for Univ of Miami flist = ['PROCESS/unavco_attributes.txt', 'PROCESS/bl_list.txt', 'SLC/summary*slc.jpg'] try: proj_dir = os.path.join(os.getenv('SCRATCHDIR'), self.projectName) flist = ut.get_file_list([os.path.join(proj_dir, i) for i in flist], abspath=True) for fname in flist: if ut.run_or_skip(out_file=os.path.basename(fname), in_file=fname, check_readable=False) == 'run': shutil.copy2(fname, self.workDir) print('copy {} to work directory'.format(os.path.basename(fname))) except: pass return
def run_save2hdfeos5(self, step_name): """Save displacement time-series and its aux data in geo coordinate into HDF-EOS5 format""" if self.template['mintpy.save.hdfEos5'] is True: # input ts_file = self.get_timeseries_filename( self.template)[step_name]['input'] # Add attributes from custom template to timeseries file if self.customTemplate is not None: ut.add_attribute(ts_file, self.customTemplate) tcoh_file = 'temporalCoherence.h5' mask_file = 'geo_maskTempCoh.h5' geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2] if 'geo' in ts_file: tcoh_file = './geo/geo_temporalCoherence.h5' mask_file = './geo/geo_maskTempCoh.h5' geom_file = './geo/geo_{}'.format(os.path.basename(geom_file)) # cmd print('--------------------------------------------') scp_args = '{f} -c {c} -m {m} -g {g} -t {t}'.format( f=ts_file, c=tcoh_file, m=mask_file, g=geom_file, t=self.templateFile) print('save_hdfeos5.py', scp_args) # output (check existing file) atr = readfile.read_attribute(ts_file) SAT = sensor.get_unavco_mission_name(atr) try: hdfeos5_file = get_file_list('{}_*.he5'.format(SAT))[0] except: hdfeos5_file = None if ut.run_or_skip( out_file=hdfeos5_file, in_file=[ts_file, tcoh_file, mask_file, geom_file]) == 'run': mintpy.save_hdfeos5.main(scp_args.split()) else: print('save time-series to HDF-EOS5 format is OFF.') return
def run_local_oscillator_drift_correction(self, step_name): """Correct local oscillator drift (LOD). Automatically applied for Envisat data. Automatically skipped for all the other data. """ geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2] fnames = self.get_timeseries_filename(self.template)[step_name] in_file = fnames['input'] out_file = fnames['output'] if in_file != out_file: scp_args = '{} {} -o {}'.format(in_file, geom_file, out_file) print('local_oscilator_drift.py', scp_args) if ut.run_or_skip(out_file=out_file, in_file=in_file) == 'run': mintpy.local_oscilator_drift.main(scp_args.split()) else: atr = readfile.read_attribute(in_file) sat = atr.get('PLATFORM', None) print('No local oscillator drift correction is needed for {}.'.format(sat)) return
def run_local_oscillator_drift_correction(self, step_name): """Correct local oscillator drift (LOD). Automatically applied for Envisat data. Automatically skipped for all the other data. """ geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2] fnames = self.get_timeseries_filename(self.template)[step_name] in_file = fnames['input'] out_file = fnames['output'] if in_file != out_file: scp_args = '{} {} -o {}'.format(in_file, geom_file, out_file) print('local_oscilator_drift.py', scp_args) if ut.run_or_skip(out_file=out_file, in_file=in_file) == 'run': mintpy.local_oscilator_drift.main(scp_args.split()) else: atr = readfile.read_attribute(in_file) sat = atr.get('PLATFORM', None) print('No local oscillator drift correction is needed for {}.'.format(sat)) return
def run_geocode(self, step_name): """geocode data files in radar coordinates into ./geo folder.""" if self.template['mintpy.geocode']: ts_file = self.get_timeseries_filename(self.template)[step_name]['input'] atr = readfile.read_attribute(ts_file) if 'Y_FIRST' not in atr.keys(): # 1. geocode out_dir = os.path.join(self.workDir, 'geo') os.makedirs(out_dir, exist_ok=True) geom_file, lookup_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2:4] in_files = [geom_file, 'temporalCoherence.h5', 'avgSpatialCoh.h5', ts_file, 'velocity.h5'] iargs = ['-l', lookup_file, '-t', self.templateFile, '--outdir', out_dir, '--update'] for in_file in in_files: iargs += [in_file] print('geocode.py', ' '.join(iargs)) mintpy.geocode.main(iargs) # 2. generate reliable pixel mask in geo coordinate geom_file = os.path.join(out_dir, 'geo_{}'.format(os.path.basename(geom_file))) tcoh_file = os.path.join(out_dir, 'geo_temporalCoherence.h5') mask_file = os.path.join(out_dir, 'geo_maskTempCoh.h5') tcoh_min = self.template['mintpy.networkInversion.minTempCoh'] iargs = [tcoh_file, '-m', tcoh_min, '-o', mask_file] # exclude pixels in shadow if shadowMask dataset is available if (self.template['mintpy.networkInversion.shadowMask'] is True and 'shadowMask' in readfile.get_dataset_list(geom_file)): iargs += ['--base', geom_file, '--base-dataset', 'shadowMask', '--base-value', '1'] print('generate_mask.py', ' '.join(iargs)) if ut.run_or_skip(out_file=mask_file, in_file=tcoh_file) == 'run': mintpy.generate_mask.main(iargs) else: print('dataset is geocoded, skip geocoding and continue.') else: print('geocoding is OFF') return
def run_save2hdfeos5(self, step_name): """Save displacement time-series and its aux data in geo coordinate into HDF-EOS5 format""" if self.template['mintpy.save.hdfEos5'] is True: # input ts_file = self.get_timeseries_filename(self.template)[step_name]['input'] # Add attributes from custom template to timeseries file if self.customTemplate is not None: ut.add_attribute(ts_file, self.customTemplate) tcoh_file = 'temporalCoherence.h5' mask_file = 'geo_maskTempCoh.h5' geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2] if 'geo' in ts_file: tcoh_file = './geo/geo_temporalCoherence.h5' mask_file = './geo/geo_maskTempCoh.h5' geom_file = './geo/geo_{}'.format(os.path.basename(geom_file)) # cmd print('--------------------------------------------') scp_args = '{f} -c {c} -m {m} -g {g} -t {t}'.format(f=ts_file, c=tcoh_file, m=mask_file, g=geom_file, t=self.templateFile) print('save_hdfeos5.py', scp_args) # output (check existing file) atr = readfile.read_attribute(ts_file) SAT = sensor.get_unavco_mission_name(atr) try: hdfeos5_file = get_file_list('{}_*.he5'.format(SAT))[0] except: hdfeos5_file = None if ut.run_or_skip(out_file=hdfeos5_file, in_file=[ts_file, tcoh_file, mask_file, geom_file]) == 'run': mintpy.save_hdfeos5.main(scp_args.split()) else: print('save time-series to HDF-EOS5 format is OFF.') return
def _read_template_minopy(self): if self.org_custom_template: # Update default template file based on custom template print('update default template based on input custom template') self.templateFile = ut.update_template_file( self.templateFile, self.customTemplate) # 2) backup custome/default template file in inputs/pic folder for backup_dirname in ['inputs']: backup_dir = os.path.join(self.workDir, backup_dirname) # create directory os.makedirs(backup_dir, exist_ok=True) # back up to the directory for tfile in [self.org_custom_template, self.templateFile]: if tfile and ut.run_or_skip(out_file=os.path.join( backup_dir, os.path.basename(tfile)), in_file=tfile, check_readable=False, print_msg=False) == 'run': shutil.copy2(tfile, backup_dir) print('copy {} to {:<8} directory for backup.'.format( os.path.basename(tfile), os.path.basename(backup_dir))) # 3) read default template file print('read default template file:', self.templateFile) self.template = readfile.read_template(self.templateFile) auto_template_file = os.path.join(os.path.dirname(__file__), 'defaults/minopyApp_auto.cfg') self.template = check_template_auto_value( self.template, self.template_mintpy, auto_file=auto_template_file, templateFile=self.templateFile) return
def get_delay_timeseries(inps, atr): """Calculate delay time-series and write it to HDF5 file. Parameters: inps : namespace, all input parameters atr : dict, metadata to be saved in trop_file Returns: trop_file : str, file name of ECMWF.h5 """ def get_dataset_size(fname): atr = readfile.read_attribute(fname) return (atr['LENGTH'], atr['WIDTH']) # check 1 - existing tropo delay file if (ut.run_or_skip(out_file=inps.trop_file, in_file=inps.grib_file_list, print_msg=False) == 'skip' and get_dataset_size(inps.trop_file) == get_dataset_size(inps.geom_file)): print('{} file exists and is newer than all GRIB files, skip updating.'.format(inps.trop_file)) return # check 2 - geometry file if any(i is None for i in [inps.geom_file, inps.ref_yx]): print('No DEM / incidenceAngle / ref_yx found, skip calculating tropospheric delays.') if not os.path.isfile(inps.trop_file): inps.trop_file = None return # prepare geometry data geom_obj = geometry(inps.geom_file) geom_obj.open() inps.dem = geom_obj.read(datasetName='height') inps.inc = geom_obj.read(datasetName='incidenceAngle') if 'latitude' in geom_obj.datasetNames: inps.lat = geom_obj.read(datasetName='latitude') inps.lon = geom_obj.read(datasetName='longitude') else: inps.lat, inps.lon = get_lat_lon(geom_obj.metadata) # calculate phase delay length, width = int(atr['LENGTH']), int(atr['WIDTH']) num_date = len(inps.grib_file_list) date_list = [str(re.findall('\d{8}', i)[0]) for i in inps.grib_file_list] trop_data = np.zeros((num_date, length, width), np.float32) print('calcualting delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...') print('number of grib files used: {}'.format(num_date)) prog_bar = ptime.progressBar(maxValue=num_date) for i in range(num_date): grib_file = inps.grib_file_list[i] trop_data[i] = get_delay(grib_file, inps) prog_bar.update(i+1, suffix=os.path.basename(grib_file)) prog_bar.close() # Convert relative phase delay on reference date inps.ref_date = atr.get('REF_DATE', date_list[0]) print('convert to relative phase delay with reference date: '+inps.ref_date) inps.ref_idx = date_list.index(inps.ref_date) trop_data -= np.tile(trop_data[inps.ref_idx, :, :], (num_date, 1, 1)) # Write tropospheric delay to HDF5 atr['REF_Y'] = inps.ref_yx[0] atr['REF_X'] = inps.ref_yx[1] ts_obj = timeseries(inps.trop_file) ts_obj.write2hdf5(data=trop_data, dates=date_list, metadata=atr, refFile=inps.timeseries_file) return
def extract_gmtsar_metadata(unw_file, template_file, rsc_file=None, update_mode=True): """Extract metadata from GMTSAR interferogram stack.""" # update_mode: check existing rsc_file if update_mode and ut.run_or_skip(rsc_file, in_file=unw_file, check_readable=False) == 'skip': return readfile.read_roipac_rsc(rsc_file) ifg_dir = os.path.dirname(unw_file) # 1. read *.PRM file prm_file = get_prm_files(ifg_dir)[0] meta = readfile.read_gmtsar_prm(prm_file) meta['PROCESSOR'] = 'gmtsar' # 2. read template file: HEADING, ORBIT_DIRECTION template = readfile.read_template(template_file) for key in ['HEADING', 'ORBIT_DIRECTION']: if key in template.keys(): meta[key] = template[key].lower() else: raise ValueError('Attribute {} is missing! Please manually specify it in the template file.') # 3. grab A/RLOOKS from radar-coord data file meta['ALOOKS'], meta['RLOOKS'] = get_multilook_number(ifg_dir) meta['AZIMUTH_PIXEL_SIZE'] *= meta['ALOOKS'] meta['RANGE_PIXEL_SIZE'] *= meta['RLOOKS'] # 4. grab LAT/LON_REF1/2/3/4 from geo-coord data file meta = get_lalo_ref(ifg_dir, meta) # 5. grab X/Y_FIRST/STEP from unw_file if in geo-coord ds = gdal.Open(unw_file, gdal.GA_ReadOnly) transform = ds.GetGeoTransform() x_step = abs(transform[1]) y_step = abs(transform[5]) * -1. if 1e-7 < x_step < 1.: meta['X_STEP'] = x_step meta['Y_STEP'] = y_step meta['X_FIRST'] = transform[0] - x_step / 2. meta['Y_FIRST'] = transform[3] - y_step / 2. # constrain longitude within (-180, 180] if meta['X_FIRST'] > 180.: meta['X_FIRST'] -= 360. # 6. extra metadata for the missing geometry dataset: SLANT_RANGE_DISTANCE / INCIDENCE_ANGLE # for dataset in geo-coordinates if 'Y_FIRST' in meta.keys(): meta = get_slant_range_distance(ifg_dir, meta) Re = float(meta['EARTH_RADIUS']) H = float(meta['HEIGHT']) Rg = float(meta['SLANT_RANGE_DISTANCE']) Inc = (np.pi - np.arccos((Re**2 + Rg**2 - (Re+H)**2) / (2*Re*Rg))) * 180./np.pi meta['INCIDENCE_ANGLE'] = Inc # convert all value to string format for key, value in meta.items(): meta[key] = str(value) # write to .rsc file meta = readfile.standardize_metadata(meta) if rsc_file: print('writing ', rsc_file) os.makedirs(os.path.dirname(rsc_file), exist_ok=True) writefile.write_roipac_rsc(meta, rsc_file) return meta
def run_tropospheric_delay_correction(self, step_name): """Correct tropospheric delays.""" geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2] mask_file = 'maskTempCoh.h5' fnames = self.get_timeseries_filename(self.template)[step_name] in_file = fnames['input'] out_file = fnames['output'] if in_file != out_file: poly_order = self.template['mintpy.troposphericDelay.polyOrder'] tropo_model = self.template[ 'mintpy.troposphericDelay.weatherModel'] weather_dir = self.template['mintpy.troposphericDelay.weatherDir'] method = self.template['mintpy.troposphericDelay.method'] def get_dataset_size(fname): atr = readfile.read_attribute(fname) return (atr['LENGTH'], atr['WIDTH']) # Phase/Elevation Ratio (Doin et al., 2009) if method == 'height_correlation': tropo_look = self.template['mintpy.troposphericDelay.looks'] tropo_min_cor = self.template[ 'mintpy.troposphericDelay.minCorrelation'] scp_args = '{f} -g {g} -p {p} -m {m} -o {o} -l {l} -t {t}'.format( f=in_file, g=geom_file, p=poly_order, m=mask_file, o=out_file, l=tropo_look, t=tropo_min_cor) print( 'tropospheric delay correction with height-correlation approach' ) print('tropo_phase_elevation.py', scp_args) if ut.run_or_skip(out_file=out_file, in_file=in_file) == 'run': mintpy.tropo_phase_elevation.main(scp_args.split()) # Weather Re-analysis Data (Jolivet et al., 2011;2014) elif method == 'pyaps': scp_args = '-f {f} --model {m} -g {g} -w {w}'.format( f=in_file, m=tropo_model, g=geom_file, w=weather_dir) print( 'Atmospheric correction using Weather Re-analysis dataset (PyAPS, Jolivet et al., 2011)' ) print('Weather Re-analysis dataset:', tropo_model) tropo_file = './inputs/{}.h5'.format(tropo_model) if ut.run_or_skip(out_file=out_file, in_file=[in_file, tropo_file]) == 'run': if os.path.isfile(tropo_file) and get_dataset_size( tropo_file) == get_dataset_size(in_file): scp_args = '{f} {t} -o {o} --force'.format( f=in_file, t=tropo_file, o=out_file) print('--------------------------------------------') print('Use existed tropospheric delay file: {}'.format( tropo_file)) print('diff.py', scp_args) mintpy.diff.main(scp_args.split()) else: if tropo_model in ['ERA5']: cmd = 'tropo_pyaps3.py' + ' ' + scp_args print(cmd) status = subprocess.Popen(cmd, shell=True).wait() # from mintpy import tropo_pyaps3#tropo_pyaps3 # print('tropo_pyaps3.py', scp_args) #print('tropo_pyaps3.py', scp_args) # tropo_pyaps3.main(scp_args.split()) #tropo_pyaps3.main(scp_args.split()) else: # opt 1 - using tropo_pyaps as python module and call its main function # prefered, disabled for now to make it compatible with python2-pyaps #print('tropo_pyaps.py', scp_args) #from mintpy import tropo_pyaps #tropo_pyaps.main(scp_args.split()) # opt 2 - using tropo_pyaps as executable script # will be deprecated after python3-pyaps is fully funcational cmd = 'tropo_pyaps.py ' + scp_args print(cmd) status = subprocess.Popen(cmd, shell=True).wait() else: print('No tropospheric delay correction.') return
def _read_template(self): # read custom template, to: # 1) update default template # 2) add metadata to ifgramStack file and HDF-EOS5 file self.customTemplate = None if self.customTemplateFile: cfile = self.customTemplateFile # Copy custom template file to inputs directory for backup inputs_dir = os.path.join(self.workDir, 'inputs') if not os.path.isdir(inputs_dir): os.makedirs(inputs_dir) print('create directory:', inputs_dir) if ut.run_or_skip(out_file=os.path.join(inputs_dir, os.path.basename(cfile)), in_file=cfile, check_readable=False) == 'run': shutil.copy2(cfile, inputs_dir) print('copy {} to inputs directory for backup.'.format(os.path.basename(cfile))) # Read custom template print('read custom template file:', cfile) cdict = readfile.read_template(cfile) # correct some loose type errors standardValues = {'def':'auto', 'default':'auto', 'y':'yes', 'on':'yes', 'true':'yes', 'n':'no', 'off':'no', 'false':'no' } for key, value in cdict.items(): if value in standardValues.keys(): cdict[key] = standardValues[value] for key in ['mintpy.deramp', 'mintpy.troposphericDelay.method']: if key in cdict.keys(): cdict[key] = cdict[key].lower().replace('-', '_') if 'processor' in cdict.keys(): cdict['mintpy.load.processor'] = cdict['processor'] # these metadata are used in load_data.py only, not needed afterwards # (in order to manually add extra offset when the lookup table is shifted) # (seen in ROI_PAC product sometimes) for key in ['SUBSET_XMIN', 'SUBSET_YMIN']: if key in cdict.keys(): cdict.pop(key) self.customTemplate = dict(cdict) # Update default template file based on custom template print('update default template based on input custom template') self.templateFile = ut.update_template_file(self.templateFile, self.customTemplate) print('read default template file:', self.templateFile) self.template = readfile.read_template(self.templateFile) self.template = ut.check_template_auto_value(self.template) # correct some loose setup conflicts if self.template['mintpy.geocode'] is False: for key in ['mintpy.save.hdfEos5', 'mintpy.save.kmz']: if self.template[key] is True: self.template['mintpy.geocode'] = True print('Turn ON mintpy.geocode in order to run {}.'.format(key)) break return
def run_geocode(inps): """geocode all input files""" start_time = time.time() # feed the largest file for resample object initiation ind_max = np.argmax([os.path.getsize(i) for i in inps.file]) # prepare geometry for geocoding res_obj = resample(lut_file=inps.lookupFile, src_file=inps.file[ind_max], SNWE=inps.SNWE, lalo_step=inps.laloStep, interp_method=inps.interpMethod, fill_value=inps.fillValue, nprocs=inps.nprocs, max_memory=inps.maxMemory, software=inps.software, print_msg=True) res_obj.open() res_obj.prepare() # resample input files one by one for infile in inps.file: print('-' * 50 + '\nresampling file: {}'.format(infile)) ext = os.path.splitext(infile)[1] atr = readfile.read_attribute(infile, datasetName=inps.dset) outfile = auto_output_filename(infile, inps) # update_mode if inps.updateMode: print('update mode: ON') if ut.run_or_skip(outfile, in_file=[infile, inps.lookupFile]) == 'skip': continue ## prepare output # update metadata if inps.radar2geo: atr = attr.update_attribute4radar2geo(atr, res_obj=res_obj) else: atr = attr.update_attribute4geo2radar(atr, res_obj=res_obj) # instantiate output file file_is_hdf5 = os.path.splitext(infile)[1] in ['.h5', '.he5'] if file_is_hdf5: writefile.layout_hdf5(outfile, metadata=atr, ref_file=infile) else: dsDict = dict() ## run dsNames = readfile.get_dataset_list(infile, datasetName=inps.dset) maxDigit = max([len(i) for i in dsNames]) for dsName in dsNames: if not file_is_hdf5: dsDict[dsName] = np.zeros((res_obj.length, res_obj.width)) # loop for block-by-block IO for i in range(res_obj.num_box): src_box = res_obj.src_box_list[i] dest_box = res_obj.dest_box_list[i] # read print('-' * 50 + '\nreading {d:<{w}} in block {b} from {f} ...'.format( d=dsName, w=maxDigit, b=src_box, f=os.path.basename(infile))) data = readfile.read(infile, datasetName=dsName, box=src_box, print_msg=False)[0] # resample data = res_obj.run_resample(src_data=data, box_ind=i) # write / save block data if data.ndim == 3: block = [ 0, data.shape[0], dest_box[1], dest_box[3], dest_box[0], dest_box[2] ] else: block = [ dest_box[1], dest_box[3], dest_box[0], dest_box[2] ] if file_is_hdf5: print('write data in block {} to file: {}'.format( block, outfile)) writefile.write_hdf5_block(outfile, data=data, datasetName=dsName, block=block, print_msg=False) else: dsDict[dsName][block[0]:block[1], block[2]:block[3]] = data # for binary file: ensure same data type if not file_is_hdf5: dsDict[dsName] = np.array(dsDict[dsName], dtype=data.dtype) # write binary file if not file_is_hdf5: writefile.write(dsDict, out_file=outfile, metadata=atr, ref_file=infile) m, s = divmod(time.time() - start_time, 60) print('time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) return outfile
def get_delay_timeseries(inps, atr): """Calculate delay time-series and write it to HDF5 file. Parameters: inps : namespace, all input parameters atr : dict, metadata to be saved in trop_file Returns: trop_file : str, file name of ECMWF.h5 """ def get_dataset_size(fname): atr = readfile.read_attribute(fname) return (atr['LENGTH'], atr['WIDTH']) if (ut.run_or_skip(out_file=inps.trop_file, in_file=inps.grib_file_list, print_msg=False) == 'skip' and get_dataset_size(inps.trop_file) == get_dataset_size(inps.geom_file)): print('{} file exists and is newer than all GRIB files, skip updating.'.format(inps.trop_file)) else: if any(i is None for i in [inps.geom_file, inps.ref_yx]): print('No DEM / incidenceAngle / ref_yx found, skip calculating tropospheric delays.') if not os.path.isfile(inps.trop_file): inps.trop_file = None return # calculate phase delay length, width = int(atr['LENGTH']), int(atr['WIDTH']) num_date = len(inps.grib_file_list) date_list = [str(re.findall('\d{8}', i)[0]) for i in inps.grib_file_list] trop_data = np.zeros((num_date, length, width), np.float32) print('calcualting delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...') print('number of grib files used: {}'.format(num_date)) prog_bar = ptime.progressBar(maxValue=num_date) for i in range(num_date): grib_file = inps.grib_file_list[i] trop_data[i] = get_delay(grib_file, inps) prog_bar.update(i+1, suffix=os.path.basename(grib_file)) prog_bar.close() # Convert relative phase delay on reference date try: inps.ref_date = atr['REF_DATE'] except: inps.ref_date = date_list[0] print('convert to relative phase delay with reference date: '+inps.ref_date) inps.ref_idx = date_list.index(inps.ref_date) trop_data -= np.tile(trop_data[inps.ref_idx, :, :], (num_date, 1, 1)) # Write tropospheric delay to HDF5 atr['REF_Y'] = inps.ref_yx[0] atr['REF_X'] = inps.ref_yx[1] ts_obj = timeseries(inps.trop_file) ts_obj.write2hdf5(data=trop_data, dates=date_list, metadata=atr, refFile=inps.timeseries_file) # Delete temporary DEM file in ROI_PAC format if inps.geom_file: temp_files =[fname for fname in [inps.dem_file, inps.inc_angle_file, inps.lat_file, inps.lon_file] if (fname is not None and 'pyaps' in fname)] if temp_files: print('delete temporary geometry files') rmCmd = 'rm ' for fname in temp_files: rmCmd += ' {f} {f}.rsc '.format(f=fname) print(rmCmd) os.system(rmCmd) return
def run_geocode(inps): """geocode all input files""" start_time = time.time() # Prepare geometry for geocoding res_obj = resample(lookupFile=inps.lookupFile, dataFile=inps.file[0], SNWE=inps.SNWE, laloStep=inps.laloStep, processor=inps.processor) res_obj.open() # resample input files one by one for infile in inps.file: print('-' * 50 + '\nresampling file: {}'.format(infile)) ext = os.path.splitext(infile)[1] atr = readfile.read_attribute(infile, datasetName=inps.dset) outfile = auto_output_filename(infile, inps) if inps.updateMode and ut.run_or_skip( outfile, in_file=[infile, inps.lookupFile]) == 'skip': print('update mode is ON, skip geocoding.') continue # read source data and resample dsNames = readfile.get_dataset_list(infile, datasetName=inps.dset) maxDigit = max([len(i) for i in dsNames]) dsResDict = dict() for dsName in dsNames: print('reading {d:<{w}} from {f} ...'.format( d=dsName, w=maxDigit, f=os.path.basename(infile))) if ext in ['.h5', '.he5']: data = readfile.read(infile, datasetName=dsName, print_msg=False)[0] else: data, atr = readfile.read(infile, datasetName=dsName, print_msg=False) # keep timeseries data as 3D matrix when there is only one acquisition # because readfile.read() will squeeze it to 2D if atr['FILE_TYPE'] == 'timeseries' and len(data.shape) == 2: data = np.reshape(data, (1, data.shape[0], data.shape[1])) res_data = res_obj.run_resample(src_data=data, interp_method=inps.interpMethod, fill_value=inps.fillValue, nprocs=inps.nprocs, print_msg=True) dsResDict[dsName] = res_data # update metadata if inps.radar2geo: atr = metadata_radar2geo(atr, res_obj) else: atr = metadata_geo2radar(atr, res_obj) #if len(dsNames) == 1 and dsName not in ['timeseries']: # atr['FILE_TYPE'] = dsNames[0] # infile = None writefile.write(dsResDict, out_file=outfile, metadata=atr, ref_file=infile) m, s = divmod(time.time() - start_time, 60) print('time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) return outfile
def get_delay_timeseries(inps, atr): """Calculate delay time-series and write it to HDF5 file. Parameters: inps : namespace, all input parameters atr : dict, metadata to be saved in trop_file Returns: trop_file : str, file name of ECMWF.h5 """ def get_dataset_size(fname): atr = readfile.read_attribute(fname) return (atr['LENGTH'], atr['WIDTH']) # check 1 - existing tropo delay file if (ut.run_or_skip(out_file=inps.trop_file, in_file=inps.grib_file_list, print_msg=False) == 'skip' and get_dataset_size(inps.trop_file) == get_dataset_size(inps.geom_file)): print('{} file exists and is newer than all GRIB files, skip updating.'.format(inps.trop_file)) return # check 2 - geometry file if any(i is None for i in [inps.geom_file, inps.ref_yx]): print('No DEM / incidenceAngle / ref_yx found, skip calculating tropospheric delays.') if not os.path.isfile(inps.trop_file): inps.trop_file = None return # prepare geometry data geom_obj = geometry(inps.geom_file) geom_obj.open() inps.dem = geom_obj.read(datasetName='height') inps.inc = geom_obj.read(datasetName='incidenceAngle') if 'latitude' in geom_obj.datasetNames: inps.lat = geom_obj.read(datasetName='latitude') inps.lon = geom_obj.read(datasetName='longitude') else: inps.lat, inps.lon = get_lat_lon(geom_obj.metadata) # calculate phase delay length, width = int(atr['LENGTH']), int(atr['WIDTH']) num_date = len(inps.grib_file_list) date_list = [str(re.findall('\d{8}', i)[0]) for i in inps.grib_file_list] trop_data = np.zeros((num_date, length, width), np.float32) #trop_data_abs = np.zeros((num_date, length, width), np.float32) print('calcualting delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...') print('number of grib files used: {}'.format(num_date)) if verbose: prog_bar = ptime.progressBar(maxValue=num_date) for i in range(num_date): grib_file = inps.grib_file_list[i] trop_data[i] = get_delay(grib_file, inps) #trop_data_abs[i] = get_delay_abs(grib_file, inps) if verbose: prog_bar.update(i+1, suffix=os.path.basename(grib_file)) if verbose: prog_bar.close() # Convert relative phase delay on reference date inps.ref_date = atr.get('REF_DATE', date_list[0]) print('convert to relative phase delay with reference date: '+inps.ref_date) inps.ref_idx = date_list.index(inps.ref_date) trop_data -= np.tile(trop_data[inps.ref_idx, :, :], (num_date, 1, 1)) # Write tropospheric delay to HDF5 atr['REF_Y'] = inps.ref_yx[0] atr['REF_X'] = inps.ref_yx[1] ts_obj = timeseries(inps.trop_file) ts_obj.write2hdf5(data=trop_data, dates=date_list, metadata=atr, refFile=inps.timeseries_file) #ts_obj.write2hdf5(data=trop_data, # outFile = 'ERA5_abs.h5', # dates=date_list, # metadata=atr, # refFile=inps.timeseries_file) return
def get_delay_timeseries(inps, atr): """Calculate delay time-series and write it to HDF5 file. Parameters: inps : namespace, all input parameters atr : dict, metadata to be saved in trop_file Returns: trop_file : str, file name of ECMWF.h5 """ def get_dataset_size(fname): atr = readfile.read_attribute(fname) return (atr['LENGTH'], atr['WIDTH']) if (ut.run_or_skip(out_file=inps.trop_file, in_file=inps.grib_file_list, print_msg=False) == 'skip' and get_dataset_size(inps.trop_file) == get_dataset_size(inps.geom_file)): print('{} file exists and is newer than all GRIB files, skip updating.'.format(inps.trop_file)) else: if any(i is None for i in [inps.geom_file, inps.ref_yx]): print('No DEM / incidenceAngle / ref_yx found, skip calculating tropospheric delays.') if not os.path.isfile(inps.trop_file): inps.trop_file = None return # calculate phase delay length, width = int(atr['LENGTH']), int(atr['WIDTH']) num_date = len(inps.grib_file_list) date_list = [str(re.findall('\d{8}', i)[0]) for i in inps.grib_file_list] trop_data = np.zeros((num_date, length, width), np.float32) print('calcualting delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...') print('number of grib files used: {}'.format(num_date)) prog_bar = ptime.progressBar(maxValue=num_date) for i in range(num_date): grib_file = inps.grib_file_list[i] trop_data[i] = get_delay(grib_file, inps) prog_bar.update(i+1, suffix=os.path.basename(grib_file)) prog_bar.close() # Convert relative phase delay on reference date try: inps.ref_date = atr['REF_DATE'] except: inps.ref_date = date_list[0] print('convert to relative phase delay with reference date: '+inps.ref_date) inps.ref_idx = date_list.index(inps.ref_date) trop_data -= np.tile(trop_data[inps.ref_idx, :, :], (num_date, 1, 1)) # Write tropospheric delay to HDF5 atr['REF_Y'] = inps.ref_yx[0] atr['REF_X'] = inps.ref_yx[1] ts_obj = timeseries(inps.trop_file) ts_obj.write2hdf5(data=trop_data, dates=date_list, metadata=atr, refFile=inps.timeseries_file) # Delete temporary DEM file in ROI_PAC format if inps.geom_file: temp_files =[fname for fname in [inps.dem_file, inps.inc_angle_file, inps.lat_file, inps.lon_file] if (fname is not None and 'pyaps' in fname)] if temp_files: print('delete temporary geometry files') rmCmd = 'rm ' for fname in temp_files: rmCmd += ' {f} {f}.rsc '.format(f=fname) print(rmCmd) os.system(rmCmd) return
def _read_template(self): # read custom template, to: # 1) update default template # 2) add metadata to ifgramStack file and HDF-EOS5 file self.customTemplate = None if self.customTemplateFile: cfile = self.customTemplateFile # Copy custom template file to inputs directory for backup inputs_dir = os.path.join(self.workDir, 'inputs') if not os.path.isdir(inputs_dir): os.makedirs(inputs_dir) print('create directory:', inputs_dir) if ut.run_or_skip(out_file=os.path.join(inputs_dir, os.path.basename(cfile)), in_file=cfile, check_readable=False) == 'run': shutil.copy2(cfile, inputs_dir) print('copy {} to inputs directory for backup.'.format( os.path.basename(cfile))) # Read custom template print('read custom template file:', cfile) cdict = readfile.read_template(cfile) # correct some loose type errors standardValues = { 'def': 'auto', 'default': 'auto', 'y': 'yes', 'on': 'yes', 'true': 'yes', 'n': 'no', 'off': 'no', 'false': 'no' } for key, value in cdict.items(): if value in standardValues.keys(): cdict[key] = standardValues[value] for key in ['mintpy.deramp', 'mintpy.troposphericDelay.method']: if key in cdict.keys(): cdict[key] = cdict[key].lower().replace('-', '_') if 'processor' in cdict.keys(): cdict['mintpy.load.processor'] = cdict['processor'] # these metadata are used in load_data.py only, not needed afterwards # (in order to manually add extra offset when the lookup table is shifted) # (seen in ROI_PAC product sometimes) for key in ['SUBSET_XMIN', 'SUBSET_YMIN']: if key in cdict.keys(): cdict.pop(key) self.customTemplate = dict(cdict) # Update default template file based on custom template print('update default template based on input custom template') self.templateFile = ut.update_template_file( self.templateFile, self.customTemplate) print('read default template file:', self.templateFile) self.template = readfile.read_template(self.templateFile) self.template = ut.check_template_auto_value(self.template) # correct some loose setup conflicts if self.template['mintpy.geocode'] is False: for key in ['mintpy.save.hdfEos5', 'mintpy.save.kmz']: if self.template[key] is True: self.template['mintpy.geocode'] = True print('Turn ON mintpy.geocode in order to run {}.'.format( key)) break return
def run_tropospheric_delay_correction(self, step_name): """Correct tropospheric delays.""" geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2] mask_file = 'maskTempCoh.h5' fnames = self.get_timeseries_filename(self.template)[step_name] in_file = fnames['input'] out_file = fnames['output'] if in_file != out_file: poly_order = self.template['mintpy.troposphericDelay.polyOrder'] tropo_model = self.template['mintpy.troposphericDelay.weatherModel'] weather_dir = self.template['mintpy.troposphericDelay.weatherDir'] method = self.template['mintpy.troposphericDelay.method'] def get_dataset_size(fname): atr = readfile.read_attribute(fname) return (atr['LENGTH'], atr['WIDTH']) # Phase/Elevation Ratio (Doin et al., 2009) if method == 'height_correlation': tropo_look = self.template['mintpy.troposphericDelay.looks'] tropo_min_cor = self.template['mintpy.troposphericDelay.minCorrelation'] scp_args = '{f} -g {g} -p {p} -m {m} -o {o} -l {l} -t {t}'.format(f=in_file, g=geom_file, p=poly_order, m=mask_file, o=out_file, l=tropo_look, t=tropo_min_cor) print('tropospheric delay correction with height-correlation approach') print('tropo_phase_elevation.py', scp_args) if ut.run_or_skip(out_file=out_file, in_file=in_file) == 'run': mintpy.tropo_phase_elevation.main(scp_args.split()) # Weather Re-analysis Data (Jolivet et al., 2011;2014) elif method == 'pyaps': scp_args = '-f {f} --model {m} -g {g} -w {w}'.format(f=in_file, m=tropo_model, g=geom_file, w=weather_dir) print('Atmospheric correction using Weather Re-analysis dataset (PyAPS, Jolivet et al., 2011)') print('Weather Re-analysis dataset:', tropo_model) tropo_file = './inputs/{}.h5'.format(tropo_model) if ut.run_or_skip(out_file=out_file, in_file=[in_file, tropo_file]) == 'run': if os.path.isfile(tropo_file) and get_dataset_size(tropo_file) == get_dataset_size(in_file): scp_args = '{f} {t} -o {o} --force'.format(f=in_file, t=tropo_file, o=out_file) print('--------------------------------------------') print('Use existed tropospheric delay file: {}'.format(tropo_file)) print('diff.py', scp_args) mintpy.diff.main(scp_args.split()) else: if tropo_model in ['ERA5']: from mintpy import tropo_pyaps3 print('tropo_pyaps3.py', scp_args) tropo_pyaps3.main(scp_args.split()) else: # opt 1 - using tropo_pyaps as python module and call its main function # prefered, disabled for now to make it compatible with python2-pyaps #print('tropo_pyaps.py', scp_args) #from mintpy import tropo_pyaps #tropo_pyaps.main(scp_args.split()) # opt 2 - using tropo_pyaps as executable script # will be deprecated after python3-pyaps is fully funcational cmd = 'tropo_pyaps.py '+scp_args print(cmd) status = subprocess.Popen(cmd, shell=True).wait() else: print('No tropospheric delay correction.') return
def calculate_delay_timeseries(inps): """Calculate delay time-series and write it to HDF5 file. Parameters: inps : namespace, all input parameters Returns: tropo_file : str, file name of ECMWF.h5 """ def get_dataset_size(fname): atr = readfile.read_attribute(fname) shape = (int(atr['LENGTH']), int(atr['WIDTH'])) return shape # check existing tropo delay file if (ut.run_or_skip(out_file=inps.tropo_file, in_file=inps.grib_files, print_msg=False) == 'skip' and get_dataset_size(inps.tropo_file) == get_dataset_size( inps.geom_file)): print( '{} file exists and is newer than all GRIB files, skip updating.'. format(inps.tropo_file)) return # prepare geometry data geom_obj = geometry(inps.geom_file) geom_obj.open() inps.dem = geom_obj.read(datasetName='height') inps.inc = geom_obj.read(datasetName='incidenceAngle') if 'latitude' in geom_obj.datasetNames: # for dataset in geo OR radar coord with lookup table in radar-coord (isce, doris) inps.lat = geom_obj.read(datasetName='latitude') inps.lon = geom_obj.read(datasetName='longitude') elif 'Y_FIRST' in geom_obj.metadata: # for geo-coded dataset (gamma, roipac) inps.lat, inps.lon = ut.get_lat_lon(geom_obj.metadata) else: # for radar-coded dataset (gamma, roipac) inps.lat, inps.lon = ut.get_lat_lon_rdc(geom_obj.metadata) # calculate phase delay length, width = int(inps.atr['LENGTH']), int(inps.atr['WIDTH']) num_date = len(inps.grib_files) date_list = [str(re.findall('\d{8}', i)[0]) for i in inps.grib_files] tropo_data = np.zeros((num_date, length, width), np.float32) print( '\n------------------------------------------------------------------------------' ) print( 'calcualting absolute delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...' ) print('number of grib files used: {}'.format(num_date)) prog_bar = ptime.progressBar(maxValue=num_date) for i in range(num_date): grib_file = inps.grib_files[i] tropo_data[i] = get_delay(grib_file, inps) prog_bar.update(i + 1, suffix=os.path.basename(grib_file)) prog_bar.close() # remove metadata related with double reference # because absolute delay is calculated and saved for key in ['REF_DATE', 'REF_X', 'REF_Y', 'REF_LAT', 'REF_LON']: if key in inps.atr.keys(): inps.atr.pop(key) # Write tropospheric delay to HDF5 ts_obj = timeseries(inps.tropo_file) ts_obj.write2hdf5(data=tropo_data, dates=date_list, metadata=inps.atr, refFile=inps.timeseries_file) return inps.tropo_file
def run_geocode(inps): """geocode all input files""" start_time = time.time() # Prepare geometry for geocoding res_obj = resample(lookupFile=inps.lookupFile, dataFile=inps.file[0], SNWE=inps.SNWE, laloStep=inps.laloStep, processor=inps.processor) res_obj.open() # resample input files one by one for infile in inps.file: print('-' * 50+'\nresampling file: {}'.format(infile)) ext = os.path.splitext(infile)[1] atr = readfile.read_attribute(infile, datasetName=inps.dset) outfile = auto_output_filename(infile, inps) if inps.updateMode and ut.run_or_skip(outfile, in_file=[infile, inps.lookupFile]) == 'skip': print('update mode is ON, skip geocoding.') continue # read source data and resample dsNames = readfile.get_dataset_list(infile, datasetName=inps.dset) maxDigit = max([len(i) for i in dsNames]) dsResDict = dict() for dsName in dsNames: print('reading {d:<{w}} from {f} ...'.format(d=dsName, w=maxDigit, f=os.path.basename(infile))) if ext in ['.h5','.he5']: data = readfile.read(infile, datasetName=dsName, print_msg=False)[0] else: data, atr = readfile.read(infile, datasetName=dsName, print_msg=False) # keep timeseries data as 3D matrix when there is only one acquisition # because readfile.read() will squeeze it to 2D if atr['FILE_TYPE'] == 'timeseries' and len(data.shape) == 2: data = np.reshape(data, (1, data.shape[0], data.shape[1])) res_data = res_obj.run_resample(src_data=data, interp_method=inps.interpMethod, fill_value=inps.fillValue, nprocs=inps.nprocs, print_msg=True) dsResDict[dsName] = res_data # update metadata if inps.radar2geo: atr = metadata_radar2geo(atr, res_obj) else: atr = metadata_geo2radar(atr, res_obj) #if len(dsNames) == 1 and dsName not in ['timeseries']: # atr['FILE_TYPE'] = dsNames[0] # infile = None writefile.write(dsResDict, out_file=outfile, metadata=atr, ref_file=infile) m, s = divmod(time.time()-start_time, 60) print('time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) return outfile
def _read_template(self): # 1) update default template self.customTemplate = None if self.customTemplateFile: # customTemplateFile --> customTemplate print('read custom template file:', self.customTemplateFile) cdict = readfile.read_template(self.customTemplateFile) # correct some loose type errors standardValues = {'def':'auto', 'default':'auto', 'y':'yes', 'on':'yes', 'true':'yes', 'n':'no', 'off':'no', 'false':'no' } for key, value in cdict.items(): if value in standardValues.keys(): cdict[key] = standardValues[value] for key in ['mintpy.deramp', 'mintpy.troposphericDelay.method']: if key in cdict.keys(): cdict[key] = cdict[key].lower().replace('-', '_') if 'processor' in cdict.keys(): cdict['mintpy.load.processor'] = cdict['processor'] # these metadata are used in load_data.py only, not needed afterwards # (in order to manually add extra offset when the lookup table is shifted) # (seen in ROI_PAC product sometimes) for key in ['SUBSET_XMIN', 'SUBSET_YMIN']: if key in cdict.keys(): cdict.pop(key) self.customTemplate = dict(cdict) # customTemplate --> templateFile print('update default template based on input custom template') self.templateFile = ut.update_template_file(self.templateFile, self.customTemplate) # 2) backup custome/default template file in inputs/pic folder for backup_dirname in ['inputs', 'pic']: backup_dir = os.path.join(self.workDir, backup_dirname) # create directory os.makedirs(backup_dir, exist_ok=True) # back up to the directory for tfile in [self.customTemplateFile, self.templateFile]: if tfile and ut.run_or_skip(out_file=os.path.join(backup_dir, os.path.basename(tfile)), in_file=tfile, check_readable=False, print_msg=False) == 'run': shutil.copy2(tfile, backup_dir) print('copy {} to {} directory for backup.'.format(os.path.basename(tfile), os.path.basename(backup_dir))) # 3) read default template file print('read default template file:', self.templateFile) self.template = readfile.read_template(self.templateFile) self.template = ut.check_template_auto_value(self.template) # correct some loose setup conflicts if self.template['mintpy.geocode'] is False: for key in ['mintpy.save.hdfEos5', 'mintpy.save.kmz']: if self.template[key] is True: self.template['mintpy.geocode'] = True print('Turn ON mintpy.geocode in order to run {}.'.format(key)) break return