def run_network_modification(self, step_name): """Modify network of interferograms before the network inversion.""" # check the existence of ifgramStack.h5 stack_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1] coh_txt = '{}_coherence_spatialAvg.txt'.format( os.path.splitext(os.path.basename(stack_file))[0]) try: net_fig = [ i for i in ['Network.pdf', 'PIC/Network.pdf'] if os.path.isfile(i) ][0] except: net_fig = None # 1) modify network scp_args = '{} -t {}'.format(stack_file, self.templateFile) print('modify_network.py', scp_args) pysar.modify_network.main(scp_args.split()) # 2) plot network scp_args = '{} -t {} --nodisplay'.format(stack_file, self.templateFile) print('\nplot_network.py', scp_args) if ut.run_or_skip(out_file=net_fig, in_file=[stack_file, coh_txt, self.templateFile], check_readable=False) == 'run': pysar.plot_network.main(scp_args.split()) # 3) aux files: maskConnComp and avgSpatialCoh self.generate_ifgram_aux_file() return
def generate_temporal_coherence_mask(self): """Generate reliable pixel mask from temporal coherence""" geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2] tcoh_file = 'temporalCoherence.h5' mask_file = 'maskTempCoh.h5' tcoh_min = self.template['pysar.networkInversion.minTempCoh'] scp_args = '{} -m {} -o {} --shadow {}'.format(tcoh_file, tcoh_min, mask_file, geom_file) print('generate_mask.py', scp_args) # update mode: run only if: # 1) output file exists and newer than input file, AND # 2) all config keys are the same config_keys = ['pysar.networkInversion.minTempCoh'] print('update mode: ON') flag = 'skip' if ut.run_or_skip(out_file=mask_file, in_file=tcoh_file, print_msg=False) == 'run': flag = 'run' else: print( '1) output file: {} already exists and newer than input file: {}' .format(mask_file, tcoh_file)) atr = readfile.read_attribute(mask_file) if any( str(self.template[i]) != atr.get(i, 'False') for i in config_keys): flag = 'run' print( '2) NOT all key configration parameters are the same: {}'. format(config_keys)) else: print('2) all key configuration parameters are the same: {}'. format(config_keys)) print('run or skip: {}'.format(flag)) if flag == 'run': pysar.generate_mask.main(scp_args.split()) # update configKeys atr = {} for key in config_keys: atr[key] = self.template[key] ut.add_attribute(mask_file, atr) # check number of pixels selected in mask file for following analysis num_pixel = np.sum(readfile.read(mask_file)[0] != 0.) print('number of reliable pixels: {}'.format(num_pixel)) min_num_pixel = float( self.template['pysar.networkInversion.minNumPixel']) if num_pixel < min_num_pixel: msg = "Not enough reliable pixels (minimum of {}). ".format( int(min_num_pixel)) msg += "Try the following:\n" msg += "1) Check the reference pixel and make sure it's not in areas with unwrapping errors\n" msg += "2) Check the network and make sure it's fully connected without subsets" raise RuntimeError(msg) return
def run_unwrap_error_correction(self, step_name): """Correct phase-unwrapping errors""" method = self.template['pysar.unwrapError.method'] if not method: print('phase-unwrapping error correction is OFF.') return # check required input files stack_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1] mask_file = 'maskConnComp.h5' scp_args_bridge = '{} -t {} --update'.format(stack_file, self.templateFile) scp_args_closure = '{} {} -t {} --update'.format( stack_file, mask_file, self.templateFile) from pysar import unwrap_error_bridging, unwrap_error_phase_closure if method == 'bridging': unwrap_error_bridging.main(scp_args_bridge.split()) elif method == 'phase_closure': unwrap_error_phase_closure.main(scp_args_closure.split()) elif method == 'bridging+phase_closure': scp_args_bridge += ' -i unwrapPhase -o unwrapPhase_bridging' unwrap_error_bridging.main(scp_args_bridge.split()) scp_args_closure += ' -i unwrapPhase_bridging -o unwrapPhase_bridging_phaseClosure' unwrap_error_phase_closure.main(scp_args_closure.split()) else: raise ValueError('un-recognized method: {}'.format(method)) return
def run_geocode(self, step_name): """geocode data files in radar coordinates into ./GEOCODE folder.""" if self.template['pysar.geocode']: ts_file = self.get_timeseries_filename(self.template)[step_name]['input'] atr = readfile.read_attribute(ts_file) if 'Y_FIRST' not in atr.keys(): # 1. geocode out_dir = os.path.join(self.workDir, 'GEOCODE') if not os.path.isdir(out_dir): os.makedirs(out_dir) print('create directory:', out_dir) geom_file, lookup_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2:4] in_files = [geom_file, 'temporalCoherence.h5', ts_file, 'velocity.h5'] scp_args = '-l {l} -t {t} --outdir {o} --update '.format(l=lookup_file, t=self.templateFile, o=out_dir) for in_file in in_files: scp_args += ' {}'.format(in_file) print('geocode.py', scp_args) pysar.geocode.main(scp_args.split()) # 2. generate reliable pixel mask in geo coordinate geom_file = os.path.join(out_dir, 'geo_{}'.format(os.path.basename(geom_file))) tcoh_file = os.path.join(out_dir, 'geo_temporalCoherence.h5') mask_file = os.path.join(out_dir, 'geo_maskTempCoh.h5') tcoh_min = self.template['pysar.networkInversion.minTempCoh'] scp_args = '{} -m {} -o {} --shadow {}'.format(tcoh_file, tcoh_min, mask_file, geom_file) print('generate_mask.py', scp_args) if ut.run_or_skip(out_file=mask_file, in_file=tcoh_file) == 'run': pysar.generate_mask.main(scp_args.split()) else: print('geocoding is OFF') return
def run_ifgram_stacking(self, step_name): """Traditional interferograms stacking.""" # check the existence of ifgramStack.h5 stack_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1] pha_vel_file = 'avgPhaseVelocity.h5' scp_args = '{} --dataset unwrapPhase -o {} --update'.format(stack_file, pha_vel_file) print('temporal_average.py', scp_args) pysar.temporal_average.main(scp_args.split()) return
def run_load_data(self, step_name): """Load InSAR stacks into HDF5 files in ./INPUTS folder. It 1) copy auxiliary files into work directory (for Unvi of Miami only) 2) load all interferograms stack files into PYSAR/INPUTS directory. 3) check loading result 4) add custom metadata (optional, for HDF-EOS5 format only) """ # 1) copy aux files (optional) self._copy_aux_file() # 2) loading data scp_args = '--template {}'.format(self.templateFile) if self.customTemplateFile: scp_args += ' {}'.format(self.customTemplateFile) if self.projectName: scp_args += ' --project {}'.format(self.projectName) # run print("load_data.py", scp_args) pysar.load_data.main(scp_args.split()) os.chdir(self.workDir) # 3) check loading result load_complete, stack_file, geom_file = ut.check_loaded_dataset(self.workDir, print_msg=True)[0:3] # 3.1) output waterMask.h5 water_mask_file = 'waterMask.h5' if 'waterMask' in readfile.get_dataset_list(geom_file): print('generate {} from {} for conveniency'.format(water_mask_file, geom_file)) if ut.run_or_skip(out_file=water_mask_file, in_file=geom_file) == 'run': water_mask, atr = readfile.read(geom_file, datasetName='waterMask') atr['FILE_TYPE'] = 'waterMask' writefile.write(water_mask, out_file=water_mask_file, metadata=atr) # 4) add custom metadata (optional) if self.customTemplateFile: print('updating {}, {} metadata based on custom template file: {}'.format( os.path.basename(stack_file), os.path.basename(geom_file), os.path.basename(self.customTemplateFile))) # use ut.add_attribute() instead of add_attribute.py because of # better control of special metadata, such as SUBSET_X/YMIN ut.add_attribute(stack_file, self.customTemplate) ut.add_attribute(geom_file, self.customTemplate) # 5) if not load_complete, plot and raise exception if not load_complete: # plot result if error occured self.plot_result(print_aux=False, plot=plot) # go back to original directory print('Go back to directory:', self.cwd) os.chdir(self.cwd) # raise error msg = 'step {}: NOT all required dataset found, exit.'.format(step_name) raise RuntimeError(msg) return
def run_tropospheric_delay_correction(self, step_name): """Correct tropospheric delays.""" geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2] mask_file = 'maskTempCoh.h5' fnames = self.get_timeseries_filename(self.template)[step_name] in_file = fnames['input'] out_file = fnames['output'] if in_file != out_file: poly_order = self.template['pysar.troposphericDelay.polyOrder'] tropo_model = self.template['pysar.troposphericDelay.weatherModel'] weather_dir = self.template['pysar.troposphericDelay.weatherDir'] method = self.template['pysar.troposphericDelay.method'] def get_dataset_size(fname): atr = readfile.read_attribute(fname) return (atr['LENGTH'], atr['WIDTH']) # Phase/Elevation Ratio (Doin et al., 2009) if method == 'height_correlation': scp_args = '{f} -g {g} -p {p} -m {m} -o {o}'.format(f=in_file, g=geom_file, p=poly_order, m=mask_file, o=out_file) print('tropospheric delay correction with height-correlation approach') print('tropo_phase_elevation.py', scp_args) if ut.run_or_skip(out_file=out_file, in_file=in_file) == 'run': pysar.tropo_phase_elevation.main(scp_args.split()) # Weather Re-analysis Data (Jolivet et al., 2011;2014) elif method == 'pyaps': scp_args = '-f {f} --model {m} -g {g} -w {w}'.format(f=in_file, m=tropo_model, g=geom_file, w=weather_dir) print('Atmospheric correction using Weather Re-analysis dataset (PyAPS, Jolivet et al., 2011)') print('Weather Re-analysis dataset:', tropo_model) print('tropo_pyaps.py ', scp_args) tropo_file = './INPUTS/{}.h5'.format(tropo_model) if ut.run_or_skip(out_file=out_file, in_file=[in_file, tropo_file]) == 'run': if os.path.isfile(tropo_file) and get_dataset_size(tropo_file) == get_dataset_size(in_file): scp_args = '{f} {t} -o {o} --force'.format(f=in_file, t=tropo_file, o=out_file) print('--------------------------------------------') print('Use existed tropospheric delay file: {}'.format(tropo_file)) print('diff.py', scp_args) pysar.diff.main(scp_args.split()) else: from pysar import tropo_pyaps tropo_pyaps.main(scp_args.split()) else: print('No tropospheric delay correction.') return
def run_reference_point(self, step_name): """Select reference point. It 1) generate mask file from common conn comp 2) generate average spatial coherence and its mask 3) add REF_X/Y and/or REF_LAT/LON attribute to stack file """ stack_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1] coh_file = 'avgSpatialCoh.h5' scp_args = '{} -t {} -c {}'.format(stack_file, self.templateFile, coh_file) print('reference_point.py', scp_args) pysar.reference_point.main(scp_args.split()) return
def run_topographic_residual_correction(self, step_name): """step - correct_topography Topographic residual (DEM error) correction (optional). """ geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2] fnames = self.get_timeseries_filename(self.template)[step_name] in_file = fnames['input'] out_file = fnames['output'] if in_file != out_file: scp_args = '{f} -g {g} -t {t} -o {o} --update '.format( f=in_file, g=geom_file, t=self.templateFile, o=out_file) print('dem_error.py', scp_args) pysar.dem_error.main(scp_args.split()) else: print('No topographic residual correction.') return
def run_network_inversion(self, step_name): """Invert network of interferograms for raw phase time-series. 1) network inversion --> timeseries.h5, temporalCoherence.h5, numInvIfgram.h5 2) temporalCoherence.h5 --> maskTempCoh.h5 """ # check the existence of ifgramStack.h5 stack_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1] # 1) invert ifgramStack for time-series scp_args = '{} -t {} --update '.format(stack_file, self.templateFile) print('ifgram_inversion.py', scp_args) pysar.ifgram_inversion.main(scp_args.split()) # 2) get reliable pixel mask: maskTempCoh.h5 self.generate_temporal_coherence_mask() return
def run_save2hdfeos5(self, step_name): """Save displacement time-series and its aux data in geo coordinate into HDF-EOS5 format""" if self.template['pysar.save.hdfEos5'] is True: # input ts_file = self.get_timeseries_filename( self.template)[step_name]['input'] # Add attributes from custom template to timeseries file if self.customTemplate is not None: ut.add_attribute(ts_file, self.customTemplate) tcoh_file = 'temporalCoherence.h5' mask_file = 'geo_maskTempCoh.h5' geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2] if 'GEOCODE' in ts_file: tcoh_file = './GEOCODE/geo_temporalCoherence.h5' mask_file = './GEOCODE/geo_maskTempCoh.h5' geom_file = './GEOCODE/geo_{}'.format( os.path.basename(geom_file)) # cmd print('--------------------------------------------') scp_args = '{f} -c {c} -m {m} -g {g} -t {t}'.format( f=ts_file, c=tcoh_file, m=mask_file, g=geom_file, t=self.templateFile) print('save_hdfeos5.py', scp_args) # output (check existing file) atr = readfile.read_attribute(ts_file) SAT = sensor.get_unavco_mission_name(atr) try: hdfeos5_file = get_file_list('{}_*.he5'.format(SAT))[0] except: hdfeos5_file = None if ut.run_or_skip( out_file=hdfeos5_file, in_file=[ts_file, tcoh_file, mask_file, geom_file]) == 'run': pysar.save_hdfeos5.main(scp_args.split()) else: print('save time-series to HDF-EOS5 format is OFF.') return
def generate_ifgram_aux_file(self): """Generate auxiliary files from ifgramStack file, including: """ stack_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1] cc_mask_file = 'maskConnComp.h5' coh_file = 'avgSpatialCoh.h5' # 1) generate mask file from the common connected components scp_args = '{} --nonzero -o {} --update'.format(stack_file, cc_mask_file) print('\ngenerate_mask.py', scp_args) pysar.generate_mask.main(scp_args.split()) # 2) generate average spatial coherence scp_args = '{} --dataset coherence -o {} --update'.format(stack_file, coh_file) print('\ntemporal_average.py', scp_args) pysar.temporal_average.main(scp_args.split()) return
def run_local_oscillator_drift_correction(self, step_name): """Correct local oscillator drift (LOD). Automatically applied for Envisat data. Automatically skipped for all the other data. """ geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2] fnames = self.get_timeseries_filename(self.template)[step_name] in_file = fnames['input'] out_file = fnames['output'] if in_file != out_file: scp_args = '{} {} -o {}'.format(in_file, geom_file, out_file) print('local_oscilator_drift.py', scp_args) if ut.run_or_skip(out_file=out_file, in_file=in_file) == 'run': pysar.local_oscilator_drift.main(scp_args.split()) else: atr = readfile.read_attribute(in_file) sat = atr.get('PLATFORM', None) print('No local oscillator drift correction is needed for {}.'.format(sat)) return
def main(iargs=None): start_time = time.time() inps = cmd_line_parse(iargs) if inps.version: raise SystemExit(version.version_description) ######################################### # Initiation ######################################### print(version.logo) # Project Name inps.projectName = None if inps.customTemplateFile: inps.customTemplateFile = os.path.abspath(inps.customTemplateFile) inps.projectName = os.path.splitext(os.path.basename(inps.customTemplateFile))[0] print('Project name:', inps.projectName) # Work directory if not inps.workDir: if autoPath and 'SCRATCHDIR' in os.environ and inps.projectName: inps.workDir = os.path.join(os.getenv('SCRATCHDIR'), inps.projectName, 'PYSAR') else: inps.workDir = os.getcwd() inps.workDir = os.path.abspath(inps.workDir) if not os.path.isdir(inps.workDir): os.makedirs(inps.workDir) os.chdir(inps.workDir) print("Go to work directory:", inps.workDir) copy_aux_file(inps) inps, template, customTemplate = read_template(inps) ######################################### # Loading Data ######################################### print('\n********** Load Data **********') loadCmd = 'load_data.py --template {}'.format(inps.templateFile) if inps.customTemplateFile: loadCmd += ' {}'.format(inps.customTemplateFile) if inps.projectName: loadCmd += ' --project {}'.format(inps.projectName) print(loadCmd) status = subprocess.Popen(loadCmd, shell=True).wait() os.chdir(inps.workDir) print('-'*50) inps, atr = ut.check_loaded_dataset(inps.workDir, inps) # Add template options into HDF5 file metadata if inps.customTemplateFile: #metaCmd = 'add_attribute.py {} {}'.format(inps.stackFile, inps.customTemplateFile) #print(metaCmd) #status = subprocess.Popen(metaCmd, shell=True).wait() # better control of special metadata, such as SUBSET_X/YMIN print('updating {} metadata based on custom template file: {}'.format( os.path.basename(inps.stackFile), inps.customTemplateFile)) ut.add_attribute(inps.stackFile, customTemplate) if inps.load_dataset: raise SystemExit('Exit as planned after loading/checking the dataset.') if inps.reset: print('Reset dataset attributtes for a fresh re-run.\n'+'-'*50) # Reset reference pixel refPointCmd = 'reference_point.py {} --reset'.format(inps.stackFile) print(refPointCmd) status = subprocess.Popen(refPointCmd, shell=True).wait() # Reset network modification networkCmd = 'modify_network.py {} --reset'.format(inps.stackFile) print(networkCmd) status = subprocess.Popen(networkCmd, shell=True).wait() ######################################### # Generating Aux files ######################################### print('\n********** Generate Auxiliary Files **********') inps.waterMaskFile = 'waterMask.h5' if not os.path.isfile(inps.waterMaskFile): inps.waterMaskFile = None # Initial mask (pixels with valid unwrapPhase or connectComponent in ALL interferograms) inps.maskFile = 'mask.h5' maskCmd = 'generate_mask.py {} --nonzero -o {} --update'.format(inps.stackFile, inps.maskFile) print(maskCmd) status = subprocess.Popen(maskCmd, shell=True).wait() # Average phase velocity - Stacking inps.avgPhaseVelFile = 'avgPhaseVelocity.h5' avgCmd = 'temporal_average.py {i} --dataset unwrapPhase -o {o} --update'.format(i=inps.stackFile, o=inps.avgPhaseVelFile) print(avgCmd) status = subprocess.Popen(avgCmd, shell=True).wait() # Average spatial coherence inps.avgSpatialCohFile = 'avgSpatialCoherence.h5' avgCmd = 'temporal_average.py {i} --dataset coherence -o {o} --update'.format(i=inps.stackFile, o=inps.avgSpatialCohFile) print(avgCmd) status = subprocess.Popen(avgCmd, shell=True).wait() # mask based on average spatial coherence inps.maskSpatialCohFile = 'maskSpatialCoh.h5' if ut.run_or_skip(out_file=inps.maskSpatialCohFile, in_file=inps.avgSpatialCohFile) == 'run': maskCmd = 'generate_mask.py {i} -m 0.7 -o {o}'.format(i=inps.avgSpatialCohFile, o=inps.maskSpatialCohFile) if inps.waterMaskFile: maskCmd += ' --base {}'.format(inps.waterMaskFile) print(maskCmd) status = subprocess.Popen(maskCmd, shell=True).wait() ######################################### # Referencing Interferograms in Space ######################################### print('\n********** Select Reference Point **********') refPointCmd = 'reference_point.py {} -t {} -c {}'.format(inps.stackFile, inps.templateFile, inps.avgSpatialCohFile) print(refPointCmd) status = subprocess.Popen(refPointCmd, shell=True).wait() if status is not 0: raise Exception('Error while finding reference pixel in space.\n') ############################################ # Unwrapping Error Correction (Optional) # based on the consistency of triplets # of interferograms ############################################ correct_unwrap_error(inps, template) ######################################### # Network Modification (Optional) ######################################### print('\n********** Modify Network **********') networkCmd = 'modify_network.py {} -t {}'.format(inps.stackFile, inps.templateFile) print(networkCmd) status = subprocess.Popen(networkCmd, shell=True).wait() if status is not 0: raise Exception('Error while modifying the network of interferograms.\n') # Plot network colored in spatial coherence print('--------------------------------------------------') plotCmd = 'plot_network.py {} --template {} --nodisplay'.format(inps.stackFile, inps.templateFile) print(plotCmd) inps.cohSpatialAvgFile = '{}_coherence_spatialAverage.txt'.format( os.path.splitext(os.path.basename(inps.stackFile))[0]) try: outFile = [i for i in ['Network.pdf', 'PIC/Network.pdf'] if os.path.isfile(i)][0] except: outFile = None if ut.run_or_skip(out_file=outFile, in_file=[inps.stackFile, inps.cohSpatialAvgFile, inps.templateFile], check_readable=False) == 'run': status = subprocess.Popen(plotCmd, shell=True).wait() if inps.modify_network: raise SystemExit('Exit as planned after network modification.') ######################################### # Inversion of Interferograms ######################################## print('\n********** Invert Network of Interferograms into Time-series **********') invCmd = 'ifgram_inversion.py {} --template {} --update '.format(inps.stackFile, inps.templateFile) if inps.fast: invCmd += ' --fast' if inps.waterMaskFile: invCmd += ' -m {}'.format(inps.waterMaskFile) print(invCmd) inps.timeseriesFile = 'timeseries.h5' inps.tempCohFile = 'temporalCoherence.h5' inps.timeseriesFiles = ['timeseries.h5'] #all ts files status = subprocess.Popen(invCmd, shell=True).wait() if status is not 0: raise Exception('Error while inverting network interferograms into timeseries') print('\n--------------------------------------------') print('Update Mask based on Temporal Coherence ...') get_temporal_coherence_mask(inps, template) if inps.invert_network: raise SystemExit('Exit as planned after network inversion.') ############################################## # LOD (Local Oscillator Drift) Correction # for Envisat data in radar coord only ############################################## if atr['PLATFORM'].lower().startswith('env'): print('\n********** Local Oscillator Drift Correction for Envisat **********') outName = os.path.splitext(inps.timeseriesFile)[0]+'_LODcor.h5' lodCmd = 'local_oscilator_drift.py {} {} -o {}'.format(inps.timeseriesFile, inps.geomFile, outName) print(lodCmd) if ut.run_or_skip(out_file=outName, in_file=[inps.timeseriesFile, inps.geomFile]) == 'run': status = subprocess.Popen(lodCmd, shell=True).wait() if status is not 0: raise Exception('Error while correcting Local Oscillator Drift.\n') inps.timeseriesFile = outName inps.timeseriesFiles.append(outName) ############################################## # Tropospheric Delay Correction (Optional) ############################################## print('\n********** Tropospheric Delay Correction **********') correct_tropospheric_delay(inps, template) ############################################## # Phase Ramp Correction (Optional) ############################################## print('\n********** Remove Phase Ramp **********') inps.derampMaskFile = template['pysar.deramp.maskFile'] inps.derampMethod = template['pysar.deramp'] if inps.derampMethod: print('Phase Ramp Removal method: {}'.format(inps.derampMethod)) ramp_list = ['linear', 'quadratic', 'linear_range', 'quadratic_range', 'linear_azimuth', 'quadratic_azimuth'] if inps.derampMethod in ramp_list: outName = '{}_ramp.h5'.format(os.path.splitext(inps.timeseriesFile)[0]) derampCmd = 'remove_ramp.py {} -s {} -m {} -o {}'.format(inps.timeseriesFile, inps.derampMethod, inps.derampMaskFile, outName) print(derampCmd) if ut.run_or_skip(out_file=outName, in_file=inps.timeseriesFile) == 'run': status = subprocess.Popen(derampCmd, shell=True).wait() if status is not 0: raise Exception('Error while removing phase ramp for time-series.\n') inps.timeseriesFile = outName inps.timeseriesFiles.append(outName) else: msg = 'un-recognized phase ramp method: {}'.format(inps.derampMethod) msg += '\navailable ramp types:\n{}'.format(ramp_list) raise ValueError(msg) else: print('No phase ramp removal.') ############################################## # Topographic (DEM) Residuals Correction (Optional) ############################################## print('\n********** Topographic Residual (DEM error) Correction **********') outName = os.path.splitext(inps.timeseriesFile)[0]+'_demErr.h5' topoCmd = 'dem_error.py {i} -t {t} -o {o} --update '.format(i=inps.timeseriesFile, t=inps.templateFile, o=outName) if not inps.fast: topoCmd += ' -g {}'.format(inps.geomFile) print(topoCmd) inps.timeseriesResFile = None if template['pysar.topographicResidual']: status = subprocess.Popen(topoCmd, shell=True).wait() if status is not 0: raise Exception('Error while correcting topographic phase residual.\n') inps.timeseriesFile = outName inps.timeseriesResFile = 'timeseriesResidual.h5' inps.timeseriesFiles.append(outName) else: print('No correction for topographic residuals.') # Timeseries Residual Standard Deviation print('\n********** Timeseries Residual Root Mean Square **********') if inps.timeseriesResFile: rmsCmd = 'timeseries_rms.py {} -t {}'.format(inps.timeseriesResFile, inps.templateFile) print(rmsCmd) status = subprocess.Popen(rmsCmd, shell=True).wait() if status is not 0: raise Exception('Error while calculating RMS of time series phase residual.\n') else: print('No timeseries residual file found! Skip residual RMS analysis.') # Reference in Time print('\n********** Select Reference Date **********') if template['pysar.reference.date']: refCmd = 'reference_date.py -t {} '.format(inps.templateFile) for fname in inps.timeseriesFiles: refCmd += ' {}'.format(fname) print(refCmd) status = subprocess.Popen(refCmd, shell=True).wait() if status is not 0: raise Exception('Error while changing reference date.\n') else: print('No reference change in time.') ############################################# # Velocity and rmse maps ############################################# print('\n********** Estimate Velocity **********') inps.velFile = 'velocity.h5' velCmd = 'timeseries2velocity.py {} -t {} -o {} --update'.format(inps.timeseriesFile, inps.templateFile, inps.velFile) print(velCmd) status = subprocess.Popen(velCmd, shell=True).wait() if status is not 0: raise Exception('Error while estimating linear velocity from time-series.\n') # Velocity from Tropospheric delay if inps.tropFile: suffix = os.path.splitext(os.path.basename(inps.tropFile))[0].title() inps.tropVelFile = '{}{}.h5'.format(os.path.splitext(inps.velFile)[0], suffix) velCmd = 'timeseries2velocity.py {} -t {} -o {} --update'.format(inps.tropFile, inps.templateFile, inps.tropVelFile) print(velCmd) status = subprocess.Popen(velCmd, shell=True).wait() ############################################ # Post-processing # Geocodeing --> Masking --> KMZ & HDF-EOS5 ############################################ print('\n********** Post-processing **********') if template['pysar.save.hdfEos5'] is True and template['pysar.geocode'] is False: print('Turn ON pysar.geocode to be able to save to HDF-EOS5 format.') template['pysar.geocode'] = True # Geocoding if not inps.geocoded: if template['pysar.geocode'] is True: print('\n--------------------------------------------') geo_dir = os.path.abspath('./GEOCODE') if not os.path.isdir(geo_dir): os.makedirs(geo_dir) print('create directory: {}'.format(geo_dir)) geoCmd = ('geocode.py {v} {c} {t} {g} -l {l} -t {e}' ' --outdir {d} --update').format(v=inps.velFile, c=inps.tempCohFile, t=inps.timeseriesFile, g=inps.geomFile, l=inps.lookupFile, e=inps.templateFile, d=geo_dir) print(geoCmd) status = subprocess.Popen(geoCmd, shell=True).wait() if status is not 0: raise Exception('Error while geocoding.\n') else: inps.velFile = os.path.join(geo_dir, 'geo_'+os.path.basename(inps.velFile)) inps.tempCohFile = os.path.join(geo_dir, 'geo_'+os.path.basename(inps.tempCohFile)) inps.timeseriesFile = os.path.join(geo_dir, 'geo_'+os.path.basename(inps.timeseriesFile)) inps.geomFile = os.path.join(geo_dir, 'geo_'+os.path.basename(inps.geomFile)) inps.geocoded = True # generate mask based on geocoded temporal coherence print('\n--------------------------------------------') outName = os.path.join(geo_dir, 'geo_maskTempCoh.h5') genCmd = 'generate_mask.py {} -m {} -o {}'.format(inps.tempCohFile, inps.minTempCoh, outName) print(genCmd) if ut.run_or_skip(out_file=outName, in_file=inps.tempCohFile) == 'run': status = subprocess.Popen(genCmd, shell=True).wait() inps.maskFile = outName # mask velocity file if inps.velFile and inps.maskFile: outName = '{}_masked.h5'.format(os.path.splitext(inps.velFile)[0]) maskCmd = 'mask.py {} -m {} -o {}'.format(inps.velFile, inps.maskFile, outName) print(maskCmd) if ut.run_or_skip(out_file=outName, in_file=[inps.velFile, inps.maskFile]) == 'run': status = subprocess.Popen(maskCmd, shell=True).wait() try: inps.velFile = glob.glob(outName)[0] except: inps.velFile = None # Save to Google Earth KML file if inps.geocoded and inps.velFile and template['pysar.save.kml'] is True: print('\n--------------------------------------------') print('creating Google Earth KMZ file for geocoded velocity file: ...') outName = '{}.kmz'.format(os.path.splitext(os.path.basename(inps.velFile))[0]) kmlCmd = 'save_kml.py {} -o {}'.format(inps.velFile, outName) print(kmlCmd) try: outFile = [i for i in [outName, 'PIC/{}'.format(outName)] if os.path.isfile(i)][0] except: outFile = None if ut.run_or_skip(out_file=outFile, in_file=inps.velFile, check_readable=False) == 'run': status = subprocess.Popen(kmlCmd, shell=True).wait() if status is not 0: raise Exception('Error while generating Google Earth KMZ file.') ############################################# # Save Timeseries to HDF-EOS5 format ############################################# if template['pysar.save.hdfEos5'] is True: print('\n********** Save Time-series in HDF-EOS5 Format **********') save_hdfeos5(inps, customTemplate) ############################################# # Plot Figures ############################################# if template['pysar.plot']: plot_pysarApp(inps) ############################################# # Timing # ############################################# m, s = divmod(time.time()-start_time, 60) print('\n###############################################') print('End of PySAR Routine Processing Workflow!') print('###############################################\n') print('time used: {:02.0f} mins {:02.1f} secs'.format(m, s))
def main(iargs=None): start_time = time.time() inps = cmd_line_parse(iargs) if inps.version: raise SystemExit(version.version_description) ######################################### # Initiation ######################################### print(version.logo) # Project Name inps.projectName = None if inps.templateFileCustom: inps.templateFileCustom = os.path.abspath(inps.templateFileCustom) inps.projectName = os.path.splitext( os.path.basename(inps.templateFileCustom))[0] print('Project name: ' + inps.projectName) # Work directory if not inps.workDir: if autoPath and 'SCRATCHDIR' in os.environ and inps.projectName: inps.workDir = os.path.join(os.getenv('SCRATCHDIR'), inps.projectName, 'PYSAR') else: inps.workDir = os.getcwd() inps.workDir = os.path.abspath(inps.workDir) if not os.path.isdir(inps.workDir): os.makedirs(inps.workDir) os.chdir(inps.workDir) print("Go to work directory: " + inps.workDir) copy_aux_file(inps) inps, template, templateCustom = read_template(inps) ######################################### # Loading Data ######################################### print('\n********** Load Data **********') loadCmd = 'load_data.py --template {}'.format(inps.templateFile) if inps.projectName: loadCmd += ' --project {}'.format(inps.projectName) print(loadCmd) status = subprocess.Popen(loadCmd, shell=True).wait() os.chdir(inps.workDir) print('-' * 50) inps, atr = ut.check_loaded_dataset(inps.workDir, inps) # Add template options into HDF5 file metadata # if inps.templateFileCustom: # atrCmd = 'add_attribute.py {} {}'.format(inps.stackFile, inps.templateFileCustom) # print(atrCmd) # status = subprocess.Popen(atrCmd, shell=True).wait() #ut.add_attribute(inps.stackFile, template) if inps.load_dataset: raise SystemExit('Exit as planned after loading/checking the dataset.') if inps.reset: print('Reset dataset attributtes for a fresh re-run.\n' + '-' * 50) # Reset reference pixel refPointCmd = 'reference_point.py {} --reset'.format(inps.stackFile) print(refPointCmd) status = subprocess.Popen(refPointCmd, shell=True).wait() # Reset network modification networkCmd = 'modify_network.py {} --reset'.format(inps.stackFile) print(networkCmd) status = subprocess.Popen(networkCmd, shell=True).wait() ######################################### # Generating Aux files ######################################### print('\n********** Generate Auxiliary Files **********') # Initial mask (pixels with valid unwrapPhase or connectComponent in ALL interferograms) inps.maskFile = 'mask.h5' if ut.update_file(inps.maskFile, inps.stackFile): maskCmd = 'generate_mask.py {} --nonzero -o {}'.format( inps.stackFile, inps.maskFile) print(maskCmd) status = subprocess.Popen(maskCmd, shell=True).wait() # Average spatial coherence inps.avgSpatialCohFile = 'avgSpatialCoherence.h5' if ut.update_file(inps.avgSpatialCohFile, inps.stackFile): avgCmd = 'temporal_average.py {} --dataset coherence -o {}'.format( inps.stackFile, inps.avgSpatialCohFile) print(avgCmd) status = subprocess.Popen(avgCmd, shell=True).wait() ######################################### # Referencing Interferograms in Space ######################################### print('\n********** Select Reference Point **********') refPointCmd = 'reference_point.py {} -t {} -c {}'.format( inps.stackFile, inps.templateFile, inps.avgSpatialCohFile) print(refPointCmd) status = subprocess.Popen(refPointCmd, shell=True).wait() if status is not 0: raise Exception('Error while finding reference pixel in space.\n') ############################################ # Unwrapping Error Correction (Optional) # based on the consistency of triplets # of interferograms ############################################ if template['pysar.unwrapError.method']: print('\n********** Unwrapping Error Correction **********') outName = '{}_unwCor.h5'.format(os.path.splitext(inps.stackFile)[0]) unwCmd = 'unwrap_error.py {} --mask {} --template {}'.format( inps.stackFile, inps.maskFile, inps.templateFile) print(unwCmd) if ut.update_file(outName, inps.stackFile): print( 'This might take a while depending on the size of your data set!' ) status = subprocess.Popen(unwCmd, shell=True).wait() if status is not 0: raise Exception( 'Error while correcting phase unwrapping errors.\n') inps.stackFile = outName ######################################### # Network Modification (Optional) ######################################### print('\n********** Modify Network **********') networkCmd = 'modify_network.py {} -t {}'.format(inps.stackFile, inps.templateFile) print(networkCmd) status = subprocess.Popen(networkCmd, shell=True).wait() if status is not 0: raise Exception( 'Error while modifying the network of interferograms.\n') # Plot network colored in spatial coherence print('--------------------------------------------------') plotCmd = 'plot_network.py {} --template {} --nodisplay'.format( inps.stackFile, inps.templateFile) print(plotCmd) inps.cohSpatialAvgFile = '{}_coherence_spatialAverage.txt'.format( os.path.splitext(os.path.basename(inps.stackFile))[0]) if ut.update_file( 'Network.pdf', check_readable=False, inFile=[inps.stackFile, inps.cohSpatialAvgFile, inps.templateFile]): status = subprocess.Popen(plotCmd, shell=True).wait() if inps.modify_network: raise SystemExit('Exit as planned after network modification.') ######################################### # Inversion of Interferograms ######################################## print( '\n********** Invert Network of Interferograms into Time-series **********' ) invCmd = 'ifgram_inversion.py {} --template {}'.format( inps.stackFile, inps.templateFile) print(invCmd) inps.timeseriesFile = 'timeseries.h5' inps.tempCohFile = 'temporalCoherence.h5' if ut.update_file(inps.timeseriesFile, inps.stackFile): status = subprocess.Popen(invCmd, shell=True).wait() if status is not 0: raise Exception( 'Error while inverting network interferograms into timeseries') print('\n--------------------------------------------') print('Update Mask based on Temporal Coherence ...') inps.maskFile = 'maskTempCoh.h5' inps.minTempCoh = template['pysar.networkInversion.minTempCoh'] maskCmd = 'generate_mask.py {} -m {} -o {}'.format(inps.tempCohFile, inps.minTempCoh, inps.maskFile) print(maskCmd) if ut.update_file(inps.maskFile, inps.tempCohFile): status = subprocess.Popen(maskCmd, shell=True).wait() if status is not 0: raise Exception( 'Error while generating mask file from temporal coherence.') if inps.invert_network: raise SystemExit('Exit as planned after network inversion.') # check number of pixels selected in mask file for following analysis min_num_pixel = float(template['pysar.networkInversion.minNumPixel']) msk = readfile.read(inps.maskFile)[0] num_pixel = np.sum(msk != 0.) print('number of pixels selected: {}'.format(num_pixel)) if num_pixel < min_num_pixel: msg = "Not enought coherent pixels selected (minimum of {}). ".format( int(min_num_pixel)) msg += "Try the following:\n" msg += "1) Check the reference pixel and make sure it's not in areas with unwrapping errors\n" msg += "2) Check the network and make sure it's fully connected without subsets" raise RuntimeError(msg) del msk ############################################## # LOD (Local Oscillator Drift) Correction # for Envisat data in radar coord only ############################################## if atr['PLATFORM'].lower().startswith('env'): print( '\n********** Local Oscillator Drift Correction for Envisat **********' ) outName = os.path.splitext(inps.timeseriesFile)[0] + '_LODcor.h5' lodCmd = 'local_oscilator_drift.py {} {} -o {}'.format( inps.timeseriesFile, inps.geomFile, outName) print(lodCmd) if ut.update_file(outName, [inps.timeseriesFile, inps.geomFile]): status = subprocess.Popen(lodCmd, shell=True).wait() if status is not 0: raise Exception( 'Error while correcting Local Oscillator Drift.\n') inps.timeseriesFile = outName ############################################## # Tropospheric Delay Correction (Optional) ############################################## print('\n********** Tropospheric Delay Correction **********') inps.tropPolyOrder = template['pysar.troposphericDelay.polyOrder'] inps.tropModel = template['pysar.troposphericDelay.weatherModel'] inps.tropMethod = template['pysar.troposphericDelay.method'] try: fileList = [ os.path.join(inps.workDir, 'INPUTS/{}.h5'.format(inps.tropModel)) ] inps.tropFile = ut.get_file_list(fileList)[0] except: inps.tropFile = None if inps.tropMethod: # Check Conflict with base_trop_cor if template['pysar.deramp'] == 'base_trop_cor': msg = """ Method Conflict: base_trop_cor is in conflict with {} option! base_trop_cor applies simultaneous ramp removal AND tropospheric correction. IGNORE base_trop_cor input and continue pysarApp.py. """ warnings.warn(msg) template['pysar.deramp'] = False fbase = os.path.splitext(inps.timeseriesFile)[0] # Call scripts if inps.tropMethod == 'height_correlation': outName = '{}_tropHgt.h5'.format(fbase) print( 'tropospheric delay correction with height-correlation approach' ) tropCmd = ('tropcor_phase_elevation.py {t} -d {d} -p {p}' ' -m {m} -o {o}').format(t=inps.timeseriesFile, d=inps.geomFile, p=inps.tropPolyOrder, m=inps.maskFile, o=outName) print(tropCmd) if ut.update_file(outName, inps.timeseriesFile): status = subprocess.Popen(tropCmd, shell=True).wait() if status is not 0: raise Exception( 'Error while correcting tropospheric delay.\n') inps.timeseriesFile = outName elif inps.tropMethod == 'pyaps': inps.weatherDir = template['pysar.troposphericDelay.weatherDir'] outName = '{}_{}.h5'.format(fbase, inps.tropModel) print(('Atmospheric correction using Weather Re-analysis dataset' ' (PyAPS, Jolivet et al., 2011)')) print('Weather Re-analysis dataset: ' + inps.tropModel) tropCmd = ('tropcor_pyaps.py -f {t} --model {m} --dem {d}' ' -i {i} -w {w}').format(t=inps.timeseriesFile, m=inps.tropModel, d=inps.geomFile, i=inps.geomFile, w=inps.weatherDir) print(tropCmd) if ut.update_file(outName, inps.timeseriesFile): if inps.tropFile: tropCmd = 'diff.py {} {} -o {}'.format( inps.timeseriesFile, inps.tropFile, outName) print('--------------------------------------------') print('Use existed tropospheric delay file: {}'.format( inps.tropFile)) print(tropCmd) status = subprocess.Popen(tropCmd, shell=True).wait() if status is not 0: print( '\nError while correcting tropospheric delay, try the following:' ) print('1) Check the installation of PyAPS') print( ' http://earthdef.caltech.edu/projects/pyaps/wiki/Main' ) print(' Try in command line: python -c "import pyaps"') print( '2) Use other tropospheric correction method, height-correlation, for example' ) print( '3) or turn off the option by setting pysar.troposphericDelay.method = no.\n' ) raise RuntimeError() inps.timeseriesFile = outName else: print('No atmospheric delay correction.') # Grab tropospheric delay file try: fileList = [ os.path.join(inps.workDir, 'INPUTS/{}.h5'.format(inps.tropModel)) ] inps.tropFile = ut.get_file_list(fileList)[0] except: inps.tropFile = None ############################################## # Topographic (DEM) Residuals Correction (Optional) ############################################## print( '\n********** Topographic Residual (DEM error) Correction **********' ) outName = os.path.splitext(inps.timeseriesFile)[0] + '_demErr.h5' topoCmd = 'dem_error.py {} -g {} -t {} -o {}'.format( inps.timeseriesFile, inps.geomFile, inps.templateFile, outName) print(topoCmd) inps.timeseriesResFile = None if template['pysar.topographicResidual']: if ut.update_file(outName, inps.timeseriesFile): status = subprocess.Popen(topoCmd, shell=True).wait() if status is not 0: raise Exception( 'Error while correcting topographic phase residual.\n') inps.timeseriesFile = outName inps.timeseriesResFile = 'timeseriesResidual.h5' else: print('No correction for topographic residuals.') ############################################## # Timeseries Residual Standard Deviation ############################################## print('\n********** Timeseries Residual Root Mean Square **********') if inps.timeseriesResFile: rmsCmd = 'timeseries_rms.py {} -t {}'.format(inps.timeseriesResFile, inps.templateFile) print(rmsCmd) status = subprocess.Popen(rmsCmd, shell=True).wait() if status is not 0: raise Exception( 'Error while calculating RMS of time series phase residual.\n') else: print('No timeseries residual file found! Skip residual RMS analysis.') ############################################## # Reference in Time ############################################## print('\n********** Select Reference Date **********') if template['pysar.reference.date']: outName = '{}_refDate.h5'.format( os.path.splitext(inps.timeseriesFile)[0]) refCmd = 'reference_date.py {} -t {} -o {}'.format( inps.timeseriesFile, inps.templateFile, outName) print(refCmd) if ut.update_file(outName, inps.timeseriesFile): status = subprocess.Popen(refCmd, shell=True).wait() if status is not 0: raise Exception('Error while changing reference date.\n') inps.timeseriesFile = outName else: print('No reference change in time.') ############################################## # Phase Ramp Correction (Optional) ############################################## print('\n********** Remove Phase Ramp **********') inps.derampMaskFile = template['pysar.deramp.maskFile'] inps.derampMethod = template['pysar.deramp'] if inps.derampMethod: print('Phase Ramp Removal method: {}'.format(inps.derampMethod)) if inps.geocoded and inps.derampMethod in [ 'baseline_cor', 'base_trop_cor' ]: warnings.warn( ('dataset is in geo coordinates,' ' can not apply {} method').format(inps.derampMethod)) print('skip deramping and continue.') # Get executable command and output name derampCmd = None fbase = os.path.splitext(inps.timeseriesFile)[0] if inps.derampMethod in [ 'plane', 'quadratic', 'plane_range', 'quadratic_range', 'plane_azimuth', 'quadratic_azimuth' ]: outName = '{}_{}.h5'.format(fbase, inps.derampMethod) derampCmd = 'remove_ramp.py {} -s {} -m {} -o {}'.format( inps.timeseriesFile, inps.derampMethod, inps.derampMaskFile, outName) elif inps.derampMethod == 'baseline_cor': outName = '{}_baselineCor.h5'.format(fbase) derampCmd = 'baseline_error.py {} {}'.format( inps.timeseriesFile, inps.maskFile) elif inps.derampMethod in [ 'base_trop_cor', 'basetropcor', 'baselinetropcor' ]: print('Joint estimation of Baseline error and tropospheric delay') print('\t[height-correlation approach]') outName = '{}_baseTropCor.h5'.format(fbase) derampCmd = ('baseline_trop.py {t} {d} {p}' ' range_and_azimuth {m}').format( t=inps.timeseriesFile, d=inps.geomFile, p=inps.tropPolyOrder, m=inps.maskFile) else: warnings.warn('Unrecognized phase ramp method: {}'.format( template['pysar.deramp'])) # Execute command if derampCmd: print(derampCmd) if ut.update_file(outName, inps.timeseriesFile): status = subprocess.Popen(derampCmd, shell=True).wait() if status is not 0: raise Exception( 'Error while removing phase ramp for time-series.\n') inps.timeseriesFile = outName else: print('No phase ramp removal.') ############################################# # Velocity and rmse maps ############################################# print('\n********** Estimate Velocity **********') inps.velFile = 'velocity.h5' velCmd = 'timeseries2velocity.py {} -t {} -o {}'.format( inps.timeseriesFile, inps.templateFile, inps.velFile) print(velCmd) if ut.update_file(inps.velFile, [inps.timeseriesFile, inps.templateFile]): status = subprocess.Popen(velCmd, shell=True).wait() if status is not 0: raise Exception( 'Error while estimating linear velocity from time-series.\n') # Velocity from Tropospheric delay if inps.tropFile: suffix = os.path.splitext(os.path.basename(inps.tropFile))[0].title() inps.tropVelFile = '{}{}.h5'.format( os.path.splitext(inps.velFile)[0], suffix) velCmd = 'timeseries2velocity.py {} -t {} -o {}'.format( inps.tropFile, inps.templateFile, inps.tropVelFile) print(velCmd) if ut.update_file(inps.tropVelFile, [inps.tropFile, inps.templateFile]): status = subprocess.Popen(velCmd, shell=True).wait() ############################################ # Post-processing # Geocodeing --> Masking --> KMZ & HDF-EOS5 ############################################ print('\n********** Post-processing **********') if template['pysar.save.hdfEos5'] is True and template[ 'pysar.geocode'] is False: print('Turn ON pysar.geocode to be able to save to HDF-EOS5 format.') template['pysar.geocode'] = True # Geocoding if not inps.geocoded: if template['pysar.geocode'] is True: print('\n--------------------------------------------') geo_dir = os.path.abspath('./GEOCODE') if not os.path.isdir(geo_dir): os.makedirs(geo_dir) print('create directory: {}'.format(geo_dir)) geoCmd = ('geocode.py {v} {c} {t} {g} -l {l} -t {e}' ' --outdir {d} --update').format(v=inps.velFile, c=inps.tempCohFile, t=inps.timeseriesFile, g=inps.geomFile, l=inps.lookupFile, e=inps.templateFile, d=geo_dir) print(geoCmd) status = subprocess.Popen(geoCmd, shell=True).wait() if status is not 0: raise Exception('Error while geocoding.\n') else: inps.velFile = os.path.join( geo_dir, 'geo_' + os.path.basename(inps.velFile)) inps.tempCohFile = os.path.join( geo_dir, 'geo_' + os.path.basename(inps.tempCohFile)) inps.timeseriesFile = os.path.join( geo_dir, 'geo_' + os.path.basename(inps.timeseriesFile)) inps.geomFile = os.path.join( geo_dir, 'geo_' + os.path.basename(inps.geomFile)) inps.geocoded = True # generate mask based on geocoded temporal coherence print('\n--------------------------------------------') outName = os.path.join(geo_dir, 'geo_maskTempCoh.h5') genCmd = 'generate_mask.py {} -m {} -o {}'.format( inps.tempCohFile, inps.minTempCoh, outName) print(genCmd) if ut.update_file(outName, inps.tempCohFile): status = subprocess.Popen(genCmd, shell=True).wait() inps.maskFile = outName # mask velocity file if inps.velFile and inps.maskFile: outName = '{}_masked.h5'.format(os.path.splitext(inps.velFile)[0]) maskCmd = 'mask.py {} -m {} -o {}'.format(inps.velFile, inps.maskFile, outName) print(maskCmd) if ut.update_file(outName, [inps.velFile, inps.maskFile]): status = subprocess.Popen(maskCmd, shell=True).wait() try: inps.velFile = glob.glob(outName)[0] except: inps.velFile = None # Save to Google Earth KML file if inps.geocoded and inps.velFile and template['pysar.save.kml'] is True: print('\n--------------------------------------------') print('creating Google Earth KMZ file for geocoded velocity file: ...') outName = '{}.kmz'.format( os.path.splitext(os.path.basename(inps.velFile))[0]) kmlCmd = 'save_kml.py {} -o {}'.format(inps.velFile, outName) print(kmlCmd) if ut.update_file(outName, inps.velFile, check_readable=False): status = subprocess.Popen(kmlCmd, shell=True).wait() if status is not 0: raise Exception( 'Error while generating Google Earth KMZ file.') ############################################# # Save Timeseries to HDF-EOS5 format ############################################# if template['pysar.save.hdfEos5'] is True: print('\n********** Save Time-series in HDF-EOS5 Format **********') if not inps.geocoded: warnings.warn( 'Dataset is in radar coordinates, skip saving to HDF-EOS5 format.' ) else: # Add attributes from custom template to timeseries file if templateCustom is not None: ut.add_attribute(inps.timeseriesFile, templateCustom) # Save to HDF-EOS5 format print('--------------------------------------------') hdfeos5Cmd = ('save_hdfeos5.py {t} -c {c} -m {m} -g {g}' ' -t {e}').format(t=inps.timeseriesFile, c=inps.tempCohFile, m=inps.maskFile, g=inps.geomFile, e=inps.templateFile) print(hdfeos5Cmd) SAT = hdfeos5.get_mission_name(atr) try: inps.hdfeos5File = ut.get_file_list('{}_*.he5'.format(SAT))[0] except: inps.hdfeos5File = None if ut.update_file(inps.hdfeos5File, [ inps.timeseriesFile, inps.tempCohFile, inps.maskFile, inps.geomFile ]): status = subprocess.Popen(hdfeos5Cmd, shell=True).wait() if status is not 0: raise Exception( 'Error while generating HDF-EOS5 time-series file.\n') ############################################# # Plot Figures ############################################# inps.plotShellFile = os.path.join(os.path.dirname(__file__), '../sh/plot_pysarApp.sh') plotCmd = './' + os.path.basename(inps.plotShellFile) inps.plot = template['pysar.plot'] if inps.plot is True: print('\n********** Plot Results / Save to PIC **********') # Copy to workding directory if not existed yet. if not os.path.isfile(plotCmd): print('copy {} to work directory: {}'.format( inps.plotShellFile, inps.workDir)) shutil.copy2(inps.plotShellFile, inps.workDir) if inps.plot and os.path.isfile(plotCmd): print(plotCmd) status = subprocess.Popen(plotCmd, shell=True).wait() print('\n' + '-' * 50) print('For better figures:') print( ' 1) Edit parameters in plot_pysarApp.sh and re-run this script.') print( ' 2) Play with view.py, tsview.py and save_kml.py for more advanced/customized figures.' ) if status is not 0: raise Exception( 'Error while plotting data files using {}'.format(plotCmd)) ############################################# # Time # ############################################# m, s = divmod(time.time() - start_time, 60) print('\ntime used: {:02.0f} mins {:02.1f} secs'.format(m, s)) print('\n###############################################') print('End of PySAR processing!') print('################################################\n')
def run_tropospheric_delay_correction(self, step_name): """Correct tropospheric delays.""" geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2] mask_file = 'maskTempCoh.h5' fnames = self.get_timeseries_filename(self.template)[step_name] in_file = fnames['input'] out_file = fnames['output'] if in_file != out_file: poly_order = self.template['pysar.troposphericDelay.polyOrder'] tropo_model = self.template['pysar.troposphericDelay.weatherModel'] weather_dir = self.template['pysar.troposphericDelay.weatherDir'] method = self.template['pysar.troposphericDelay.method'] def get_dataset_size(fname): atr = readfile.read_attribute(fname) return (atr['LENGTH'], atr['WIDTH']) # Phase/Elevation Ratio (Doin et al., 2009) if method == 'height_correlation': tropo_look = self.template['pysar.troposphericDelay.looks'] tropo_min_cor = self.template['pysar.troposphericDelay.minCorrelation'] scp_args = '{f} -g {g} -p {p} -m {m} -o {o} -l {l} -t {t}'.format(f=in_file, g=geom_file, p=poly_order, m=mask_file, o=out_file, l=tropo_look, t=tropo_min_cor) print('tropospheric delay correction with height-correlation approach') print('tropo_phase_elevation.py', scp_args) if ut.run_or_skip(out_file=out_file, in_file=in_file) == 'run': pysar.tropo_phase_elevation.main(scp_args.split()) # Weather Re-analysis Data (Jolivet et al., 2011;2014) elif method == 'pyaps': scp_args = '-f {f} --model {m} -g {g} -w {w}'.format(f=in_file, m=tropo_model, g=geom_file, w=weather_dir) print('Atmospheric correction using Weather Re-analysis dataset (PyAPS, Jolivet et al., 2011)') print('Weather Re-analysis dataset:', tropo_model) tropo_file = './INPUTS/{}.h5'.format(tropo_model) if ut.run_or_skip(out_file=out_file, in_file=[in_file, tropo_file]) == 'run': if os.path.isfile(tropo_file) and get_dataset_size(tropo_file) == get_dataset_size(in_file): scp_args = '{f} {t} -o {o} --force'.format(f=in_file, t=tropo_file, o=out_file) print('--------------------------------------------') print('Use existed tropospheric delay file: {}'.format(tropo_file)) print('diff.py', scp_args) pysar.diff.main(scp_args.split()) else: # opt 1 - using tropo_pyaps as python module and call its main function # prefered, disabled for now to make it compatible with python2-pyaps #print('tropo_pyaps.py', scp_args) #from pysar import tropo_pyaps #tropo_pyaps.main(scp_args.split()) # opt 2 - using tropo_pyaps as executable script # will be deprecated after python3-pyaps is fully funcational cmd = 'tropo_pyaps.py '+scp_args print(cmd) status = subprocess.Popen(cmd, shell=True).wait() else: print('No tropospheric delay correction.') return