Exemplo n.º 1
0
def save_hdfeos5(inps, customTemplate=None):
    if not inps.geocoded:
        warnings.warn('Dataset is in radar coordinates, skip writting to HDF-EOS5 format.')
    else:
        # Add attributes from custom template to timeseries file
        if customTemplate is not None:
            ut.add_attribute(inps.timeseriesFile, customTemplate)

        # Save to HDF-EOS5 format
        print('--------------------------------------------')
        hdfeos5Cmd = ('save_hdfeos5.py {t} -c {c} -m {m} -g {g}'
                      ' -t {e}').format(t=inps.timeseriesFile,
                                        c=inps.tempCohFile,
                                        m=inps.maskFile,
                                        g=inps.geomFile,
                                        e=inps.templateFile)
        print(hdfeos5Cmd)
        atr = readfile.read_attribute(inps.timeseriesFile)
        SAT = sensor.get_unavco_mission_name(atr)
        try:
            inps.hdfeos5File = ut.get_file_list('{}_*.he5'.format(SAT))[0]
        except:
            inps.hdfeos5File = None
        if ut.run_or_skip(out_file=inps.hdfeos5File, in_file=[inps.timeseriesFile,
                                                              inps.tempCohFile,
                                                              inps.maskFile,
                                                              inps.geomFile]) == 'run':
            status = subprocess.Popen(hdfeos5Cmd, shell=True).wait()
            if status is not 0:
                raise Exception('Error while generating HDF-EOS5 time-series file.\n')
    return
Exemplo n.º 2
0
def main(iargs=None):
    # check inputs
    inps = cmd_line_parse(iargs)

    # update mode
    if inps.update_mode and run_or_skip(inps) == 'skip':
        return inps.ifgram_file

    start_time = time.time()
    # run bridging
    run_unwrap_error_bridge(inps.ifgram_file,
                            water_mask_file=inps.waterMaskFile,
                            ramp_type=inps.ramp,
                            radius=inps.bridgePtsRadius,
                            dsNameIn=inps.datasetNameIn,
                            dsNameOut=inps.datasetNameOut)

    # config parameter
    if os.path.splitext(inps.ifgram_file)[1] in ['.h5', '.he5']:
        print('add/update the following configuration metadata to file:')
        config_metadata = dict()
        for key in configKeys:
            config_metadata[key_prefix + key] = str(vars(inps)[key])
        ut.add_attribute(inps.ifgram_file, config_metadata, print_msg=True)

    m, s = divmod(time.time() - start_time, 60)
    print('\ntime used: {:02.0f} mins {:02.1f} secs\nDone.'.format(m, s))
    return inps.ifgram_file
Exemplo n.º 3
0
    def generate_temporal_coherence_mask(self):
        """Generate reliable pixel mask from temporal coherence"""
        geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2]
        tcoh_file = 'temporalCoherence.h5'
        mask_file = 'maskTempCoh.h5'
        tcoh_min = self.template['pysar.networkInversion.minTempCoh']

        scp_args = '{} -m {} -o {} --shadow {}'.format(tcoh_file, tcoh_min,
                                                       mask_file, geom_file)
        print('generate_mask.py', scp_args)

        # update mode: run only if:
        # 1) output file exists and newer than input file, AND
        # 2) all config keys are the same
        config_keys = ['pysar.networkInversion.minTempCoh']
        print('update mode: ON')
        flag = 'skip'
        if ut.run_or_skip(out_file=mask_file,
                          in_file=tcoh_file,
                          print_msg=False) == 'run':
            flag = 'run'
        else:
            print(
                '1) output file: {} already exists and newer than input file: {}'
                .format(mask_file, tcoh_file))
            atr = readfile.read_attribute(mask_file)
            if any(
                    str(self.template[i]) != atr.get(i, 'False')
                    for i in config_keys):
                flag = 'run'
                print(
                    '2) NOT all key configration parameters are the same: {}'.
                    format(config_keys))
            else:
                print('2) all key configuration parameters are the same: {}'.
                      format(config_keys))
        print('run or skip: {}'.format(flag))

        if flag == 'run':
            pysar.generate_mask.main(scp_args.split())
            # update configKeys
            atr = {}
            for key in config_keys:
                atr[key] = self.template[key]
            ut.add_attribute(mask_file, atr)

        # check number of pixels selected in mask file for following analysis
        num_pixel = np.sum(readfile.read(mask_file)[0] != 0.)
        print('number of reliable pixels: {}'.format(num_pixel))

        min_num_pixel = float(
            self.template['pysar.networkInversion.minNumPixel'])
        if num_pixel < min_num_pixel:
            msg = "Not enough reliable pixels (minimum of {}). ".format(
                int(min_num_pixel))
            msg += "Try the following:\n"
            msg += "1) Check the reference pixel and make sure it's not in areas with unwrapping errors\n"
            msg += "2) Check the network and make sure it's fully connected without subsets"
            raise RuntimeError(msg)
        return
Exemplo n.º 4
0
def get_temporal_coherence_mask(inps, template):
    """Generate mask from temporal coherence"""
    configKeys = ['pysar.networkInversion.minTempCoh']
    inps.maskFile = 'maskTempCoh.h5'
    inps.minTempCoh = template['pysar.networkInversion.minTempCoh']
    maskCmd = 'generate_mask.py {} -m {} -o {} --shadow {}'.format(
        inps.tempCohFile, inps.minTempCoh, inps.maskFile, inps.geomFile)
    print(maskCmd)

    # update mode checking
    # run if 1) output file exists; 2) newer than input file and 3) all config keys are the same
    run = False
    if ut.run_or_skip(out_file=inps.maskFile,
                      in_file=inps.tempCohFile,
                      print_msg=False) == 'run':
        run = True
    else:
        print(
            '  1) output file: {} already exists and newer than input file: {}'
            .format(inps.maskFile, inps.tempCohFile))
        meta_dict = readfile.read_attribute(inps.maskFile)
        if any(
                str(template[i]) != meta_dict.get(i, 'False')
                for i in configKeys):
            run = True
            print(
                '  2) NOT all key configration parameters are the same --> run.\n\t{}'
                .format(configKeys))
        else:
            print('  2) all key configuration parameters are the same:\n\t{}'.
                  format(configKeys))
    # result
    print('run this step:', run)
    if run:
        status = subprocess.Popen(maskCmd, shell=True).wait()
        if status is not 0:
            raise Exception(
                'Error while generating mask file from temporal coherence.')

        # update configKeys
        meta_dict = {}
        for key in configKeys:
            meta_dict[key] = template[key]
        ut.add_attribute(inps.maskFile, meta_dict)

    # check number of pixels selected in mask file for following analysis
    min_num_pixel = float(template['pysar.networkInversion.minNumPixel'])
    msk = readfile.read(inps.maskFile)[0]
    num_pixel = np.sum(msk != 0.)
    print('number of pixels selected: {}'.format(num_pixel))
    if num_pixel < min_num_pixel:
        msg = "Not enought coherent pixels selected (minimum of {}). ".format(
            int(min_num_pixel))
        msg += "Try the following:\n"
        msg += "1) Check the reference pixel and make sure it's not in areas with unwrapping errors\n"
        msg += "2) Check the network and make sure it's fully connected without subsets"
        raise RuntimeError(msg)
    del msk
    return
Exemplo n.º 5
0
    def run_load_data(self, step_name):
        """Load InSAR stacks into HDF5 files in ./INPUTS folder.
        It 1) copy auxiliary files into work directory (for Unvi of Miami only)
           2) load all interferograms stack files into PYSAR/INPUTS directory.
           3) check loading result
           4) add custom metadata (optional, for HDF-EOS5 format only)
        """
        # 1) copy aux files (optional)
        self._copy_aux_file()

        # 2) loading data
        scp_args = '--template {}'.format(self.templateFile)
        if self.customTemplateFile:
            scp_args += ' {}'.format(self.customTemplateFile)
        if self.projectName:
            scp_args += ' --project {}'.format(self.projectName)
        # run
        print("load_data.py", scp_args)
        pysar.load_data.main(scp_args.split())
        os.chdir(self.workDir)

        # 3) check loading result
        load_complete, stack_file, geom_file = ut.check_loaded_dataset(self.workDir, print_msg=True)[0:3]

        # 3.1) output waterMask.h5
        water_mask_file = 'waterMask.h5'
        if 'waterMask' in readfile.get_dataset_list(geom_file):
            print('generate {} from {} for conveniency'.format(water_mask_file, geom_file))
            if ut.run_or_skip(out_file=water_mask_file, in_file=geom_file) == 'run':
                water_mask, atr = readfile.read(geom_file, datasetName='waterMask')
                atr['FILE_TYPE'] = 'waterMask'
                writefile.write(water_mask, out_file=water_mask_file, metadata=atr)

        # 4) add custom metadata (optional)
        if self.customTemplateFile:
            print('updating {}, {} metadata based on custom template file: {}'.format(
                os.path.basename(stack_file),
                os.path.basename(geom_file),
                os.path.basename(self.customTemplateFile)))
            # use ut.add_attribute() instead of add_attribute.py because of
            # better control of special metadata, such as SUBSET_X/YMIN
            ut.add_attribute(stack_file, self.customTemplate)
            ut.add_attribute(geom_file, self.customTemplate)

        # 5) if not load_complete, plot and raise exception
        if not load_complete:
            # plot result if error occured
            self.plot_result(print_aux=False, plot=plot)

            # go back to original directory
            print('Go back to directory:', self.cwd)
            os.chdir(self.cwd)

            # raise error
            msg = 'step {}: NOT all required dataset found, exit.'.format(step_name)
            raise RuntimeError(msg)
        return
Exemplo n.º 6
0
def update_file_attribute(fname, atr_new):
    # Read Original Attributes
    atr = readfile.read_attribute(fname)
    print('update {} file attribute: {}'.format(atr['FILE_TYPE'], fname))

    ext = os.path.splitext(fname)[1]
    if ext in ['.h5', '.he5']:
        fname = ut.add_attribute(fname, atr_new)
    else:
        if not ut.update_attribute_or_not(atr_new, atr):
            print(
                'All updated (removed) attributes already exists (do not exists) and have the same value, skip update.'
            )
        else:
            for key, value in iter(atr_new.items()):
                if value == 'None':
                    try:
                        atr.pop(key)
                    except:
                        pass
                else:
                    atr[key] = value

            rsc_file = '{}.rsc'.format(fname)
            print('writing >>> {}'.format(rsc_file))
            writefile.write_roipac_rsc(atr, out_file=rsc_file)
    return fname
Exemplo n.º 7
0
def main(iargs=None):
    # check inputs
    inps = cmd_line_parse(iargs)
    if inps.template_file:
        inps = read_template2inps(inps.template_file, inps)

    # update mode
    if inps.update_mode and run_or_skip(inps) == 'skip':
        return inps.ifgram_file

    # check maskConnComp.h5
    if os.path.isfile(str(inps.waterMaskFile)) and not inps.maskFile:
        atr = readfile.read_attribute(inps.ifgram_file)
        ref_yx = (int(atr['REF_Y']), int(atr['REF_X']))
        inps.maskFile = water_mask2conn_comp_mask(inps.waterMaskFile,
                                                  ref_yx=ref_yx,
                                                  min_num_pixel=1e4)
    if not inps.maskFile:
        msg =  'No mask of connected components file found. Bridging method is NOT automatic, you need to:\n'
        msg += '  Prepare the connected components mask file to mark each area with the same unwrapping error\n'
        msg += 'Check the following Jupyter Notebook for an example:\n'
        msg += '  https://github.com/yunjunz/pysar/blob/master/examples/run_unwrap_error_bridging.ipynb'
        raise SystemExit(msg)

    # run bridging
    start_time = time.time()
    bridges = search_bridge(inps.maskFile,
                            radius=inps.bridgePtsRadius,
                            coh_mask_file=inps.cohMaskFile)

    run_unwrap_error_bridge(inps.ifgram_file,
                            inps.maskFile,
                            bridges,
                            dsNameIn=inps.datasetNameIn,
                            dsNameOut=inps.datasetNameOut,
                            ramp_type=inps.ramp)

    # config parameter
    print('add/update the following configuration metadata to file:')
    config_metadata = dict()
    for key in configKeys:
        config_metadata[key_prefix+key] = str(vars(inps)[key])
    ut.add_attribute(inps.ifgram_file, config_metadata, print_msg=True)

    m, s = divmod(time.time()-start_time, 60)
    print('\ntime used: {:02.0f} mins {:02.1f} secs\nDone.'.format(m, s))
    return inps.ifgram_file
Exemplo n.º 8
0
def remove_reference_pixel(File):
    """Remove reference pixel info from input file"""
    print("remove REF_Y/X and/or REF_LAT/LON from file: " + File)
    atrDrop = {}
    for i in ['REF_X', 'REF_Y', 'REF_LAT', 'REF_LON']:
        atrDrop[i] = 'None'
    File = ut.add_attribute(File, atrDrop)
    return File
Exemplo n.º 9
0
    def run_save2hdfeos5(self, step_name):
        """Save displacement time-series and its aux data in geo coordinate into HDF-EOS5 format"""
        if self.template['pysar.save.hdfEos5'] is True:
            # input
            ts_file = self.get_timeseries_filename(
                self.template)[step_name]['input']
            # Add attributes from custom template to timeseries file
            if self.customTemplate is not None:
                ut.add_attribute(ts_file, self.customTemplate)

            tcoh_file = 'temporalCoherence.h5'
            mask_file = 'geo_maskTempCoh.h5'
            geom_file = ut.check_loaded_dataset(self.workDir,
                                                print_msg=False)[2]
            if 'GEOCODE' in ts_file:
                tcoh_file = './GEOCODE/geo_temporalCoherence.h5'
                mask_file = './GEOCODE/geo_maskTempCoh.h5'
                geom_file = './GEOCODE/geo_{}'.format(
                    os.path.basename(geom_file))

            # cmd
            print('--------------------------------------------')
            scp_args = '{f} -c {c} -m {m} -g {g} -t {t}'.format(
                f=ts_file,
                c=tcoh_file,
                m=mask_file,
                g=geom_file,
                t=self.templateFile)
            print('save_hdfeos5.py', scp_args)

            # output (check existing file)
            atr = readfile.read_attribute(ts_file)
            SAT = sensor.get_unavco_mission_name(atr)
            try:
                hdfeos5_file = get_file_list('{}_*.he5'.format(SAT))[0]
            except:
                hdfeos5_file = None
            if ut.run_or_skip(
                    out_file=hdfeos5_file,
                    in_file=[ts_file, tcoh_file, mask_file,
                             geom_file]) == 'run':
                pysar.save_hdfeos5.main(scp_args.split())
        else:
            print('save time-series to HDF-EOS5 format is OFF.')
        return
Exemplo n.º 10
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # --update option
    if inps.update_mode and run_or_skip(inps) == 'skip':
        return inps.outfile

    out_file = ut.run_deramp(inps.file,
                             ramp_type=inps.surface_type,
                             mask_file=inps.mask_file,
                             out_file=inps.outfile,
                             datasetName=inps.dset)

    # config parameter
    print('add/update the following configuration metadata to file:\n{}'.format(configKeys))
    atr_new = {}
    atr_new['pysar.deramp'] = inps.surface_type
    atr_new['pysar.deramp.maskFile'] = inps.mask_file
    ut.add_attribute(out_file, atr_new)
    return
Exemplo n.º 11
0
def prepare_metadata4giant(fname, meta_files=None):
    """Extract metadata from xml files for GIAnT time-series file."""
    # check xml files
    if not meta_files:
        meta_files = auto_xml_file4giant(fname)
    if not meta_files:
        raise FileNotFoundError("no xml file found.")

    # extract metadata from xml files
    rsc_files = [i for i in meta_files if i.endswith('.rsc')]
    xml_files = [i for i in meta_files if i.endswith('.xml')]
    xml_dict = {}
    for rsc_file in rsc_files:
        print('reading {}'.format(rsc_file))
        rsc_dict = readfile.read_roipac_rsc(rsc_file)
        for key in ['length', 'LENGTH', 'FILE_LENGTH', 'width', 'WIDTH']:
            try:
                rsc_dict.pop(key)
            except:
                pass
        xml_dict.update(rsc_dict)
    for xml_file in xml_files:
        print('reading {}'.format(xml_file))
        xml_dict.update(read_giant_xml(xml_file))

    if not xml_dict:
        raise ValueError('No metadata found in file: ' + xml_file)

    # standardize metadata names
    xml_dict = readfile.standardize_metadata(xml_dict)

    # project name
    sensor_name, project_name = sensor.project_name2sensor_name(
        os.path.abspath(fname))
    if sensor_name:
        xml_dict['PLATFORM'] = sensor_name
    if project_name:
        xml_dict['PROJECT_NAME'] = project_name
        if sensor_name in project_name:
            tmp = project_name.split(sensor_name)[1][0]
            if tmp == 'A':
                xml_dict['ORBIT_DIRECTION'] = 'ASCENDING'
            else:
                xml_dict['ORBIT_DIRECTION'] = 'DESCENDING'

    # update GIAnT HDF5 file
    fname = ut.add_attribute(fname, xml_dict, print_msg=True)
    return fname
Exemplo n.º 12
0
def reference_file(inps):
    """Seed input file with option from input namespace
    Return output file name if succeed; otherwise, return None
    """
    if not inps:
        inps = cmd_line_parse([''])
    atr = readfile.read_attribute(inps.file)
    if (inps.ref_y and inps.ref_x and 'REF_Y' in atr.keys()
            and inps.ref_y == int(atr['REF_Y'])
            and inps.ref_x == int(atr['REF_X']) and not inps.force):
        print(
            'Same reference pixel is already selected/saved in file, skip updating.'
        )
        return inps.file

    # Get stack and mask
    stack = ut.temporal_average(inps.file,
                                datasetName='unwrapPhase',
                                updateMode=True,
                                outFile=False)[0]
    mask = np.multiply(~np.isnan(stack), stack != 0.)
    if np.nansum(mask) == 0.0:
        raise ValueError(
            'no pixel found with valid phase value in all datasets.')

    if inps.ref_y and inps.ref_x and mask[inps.ref_y, inps.ref_x] == 0.:
        raise ValueError(
            'reference y/x have nan value in some dataset. Please re-select.')

    # Find reference y/x
    if not inps.ref_y or not inps.ref_x:
        if inps.method == 'maxCoherence':
            inps.ref_y, inps.ref_x = select_max_coherence_yx(
                coh_file=inps.coherenceFile,
                mask=mask,
                min_coh=inps.minCoherence)
        elif inps.method == 'random':
            inps.ref_y, inps.ref_x = random_select_reference_yx(mask)
        elif inps.method == 'manual':
            inps = manual_select_reference_yx(stack, inps, mask)
    if not inps.ref_y or not inps.ref_x:
        raise ValueError('ERROR: no reference y/x found.')

    # Seeding file with reference y/x
    atrNew = reference_point_attribute(atr, y=inps.ref_y, x=inps.ref_x)
    if not inps.write_data:
        print('Add/update ref_x/y attribute to file: ' + inps.file)
        print(atrNew)
        inps.outfile = ut.add_attribute(inps.file, atrNew)

    else:
        if not inps.outfile:
            inps.outfile = '{}_seeded{}'.format(
                os.path.splitext(inps.file)[0],
                os.path.splitext(inps.file)[1])
        k = atr['FILE_TYPE']

        # For ifgramStack file, update data value directly, do not write to new file
        if k == 'ifgramStack':
            f = h5py.File(inps.file, 'r+')
            ds = f[k].get('unwrapPhase')
            for i in range(ds.shape[0]):
                ds[i, :, :] -= ds[i, inps.ref_y, inps.ref_x]
            f[k].attrs.update(atrNew)
            f.close()
            inps.outfile = inps.file

        elif k == 'timeseries':
            data = timeseries(inps.file).read()
            for i in range(data.shape[0]):
                data[i, :, :] -= data[i, inps.ref_y, inps.ref_x]
            obj = timeseries(inps.outfile)
            atr.update(atrNew)
            obj.write2hdf5(data=data, metadata=atr, refFile=inps.file)
            obj.close()
        else:
            print('writing >>> ' + inps.outfile)
            data = readfile.read(inps.file)[0]
            data -= data[inps.ref_y, inps.ref_x]
            atr.update(atrNew)
            writefile.write(data, out_file=inps.outfile, metadata=atr)
    ut.touch([inps.coherenceFile, inps.maskFile])
    return inps.outfile
Exemplo n.º 13
0
def main(iargs=None):
    start_time = time.time()
    inps = cmd_line_parse(iargs)
    if inps.version:
        raise SystemExit(version.version_description)

    #########################################
    # Initiation
    #########################################
    print(version.logo)

    # Project Name
    inps.projectName = None
    if inps.customTemplateFile:
        inps.customTemplateFile = os.path.abspath(inps.customTemplateFile)
        inps.projectName = os.path.splitext(os.path.basename(inps.customTemplateFile))[0]
        print('Project name:', inps.projectName)

    # Work directory
    if not inps.workDir:
        if autoPath and 'SCRATCHDIR' in os.environ and inps.projectName:
            inps.workDir = os.path.join(os.getenv('SCRATCHDIR'), inps.projectName, 'PYSAR')
        else:
            inps.workDir = os.getcwd()
    inps.workDir = os.path.abspath(inps.workDir)
    if not os.path.isdir(inps.workDir):
        os.makedirs(inps.workDir)
    os.chdir(inps.workDir)
    print("Go to work directory:", inps.workDir)

    copy_aux_file(inps)

    inps, template, customTemplate = read_template(inps)

    #########################################
    # Loading Data
    #########################################
    print('\n**********  Load Data  **********')
    loadCmd = 'load_data.py --template {}'.format(inps.templateFile)
    if inps.customTemplateFile:
        loadCmd += ' {}'.format(inps.customTemplateFile)
    if inps.projectName:
        loadCmd += ' --project {}'.format(inps.projectName)
    print(loadCmd)
    status = subprocess.Popen(loadCmd, shell=True).wait()
    os.chdir(inps.workDir)

    print('-'*50)
    inps, atr = ut.check_loaded_dataset(inps.workDir, inps)

    # Add template options into HDF5 file metadata
    if inps.customTemplateFile:
        #metaCmd = 'add_attribute.py {} {}'.format(inps.stackFile, inps.customTemplateFile)
        #print(metaCmd)
        #status = subprocess.Popen(metaCmd, shell=True).wait()
        # better control of special metadata, such as SUBSET_X/YMIN
        print('updating {} metadata based on custom template file: {}'.format(
            os.path.basename(inps.stackFile), inps.customTemplateFile))
        ut.add_attribute(inps.stackFile, customTemplate)

    if inps.load_dataset:
        raise SystemExit('Exit as planned after loading/checking the dataset.')

    if inps.reset:
        print('Reset dataset attributtes for a fresh re-run.\n'+'-'*50)
        # Reset reference pixel
        refPointCmd = 'reference_point.py {} --reset'.format(inps.stackFile)
        print(refPointCmd)
        status = subprocess.Popen(refPointCmd, shell=True).wait()
        # Reset network modification
        networkCmd = 'modify_network.py {} --reset'.format(inps.stackFile)
        print(networkCmd)
        status = subprocess.Popen(networkCmd, shell=True).wait()

    #########################################
    # Generating Aux files
    #########################################
    print('\n**********  Generate Auxiliary Files  **********')
    inps.waterMaskFile = 'waterMask.h5'
    if not os.path.isfile(inps.waterMaskFile):
        inps.waterMaskFile = None

    # Initial mask (pixels with valid unwrapPhase or connectComponent in ALL interferograms)
    inps.maskFile = 'mask.h5'
    maskCmd = 'generate_mask.py {} --nonzero -o {} --update'.format(inps.stackFile, inps.maskFile)
    print(maskCmd)
    status = subprocess.Popen(maskCmd, shell=True).wait()

    # Average phase velocity - Stacking
    inps.avgPhaseVelFile = 'avgPhaseVelocity.h5'
    avgCmd = 'temporal_average.py {i} --dataset unwrapPhase -o {o} --update'.format(i=inps.stackFile,
                                                                                    o=inps.avgPhaseVelFile)
    print(avgCmd)
    status = subprocess.Popen(avgCmd, shell=True).wait()

    # Average spatial coherence
    inps.avgSpatialCohFile = 'avgSpatialCoherence.h5'
    avgCmd = 'temporal_average.py {i} --dataset coherence -o {o} --update'.format(i=inps.stackFile,
                                                                                  o=inps.avgSpatialCohFile)
    print(avgCmd)
    status = subprocess.Popen(avgCmd, shell=True).wait()

    # mask based on average spatial coherence
    inps.maskSpatialCohFile = 'maskSpatialCoh.h5'
    if ut.run_or_skip(out_file=inps.maskSpatialCohFile, in_file=inps.avgSpatialCohFile) == 'run':
        maskCmd = 'generate_mask.py {i} -m 0.7 -o {o}'.format(i=inps.avgSpatialCohFile,
                                                              o=inps.maskSpatialCohFile)
        if inps.waterMaskFile:
            maskCmd += ' --base {}'.format(inps.waterMaskFile)
        print(maskCmd)
        status = subprocess.Popen(maskCmd, shell=True).wait()


    #########################################
    # Referencing Interferograms in Space
    #########################################
    print('\n**********  Select Reference Point  **********')
    refPointCmd = 'reference_point.py {} -t {} -c {}'.format(inps.stackFile,
                                                             inps.templateFile,
                                                             inps.avgSpatialCohFile)
    print(refPointCmd)
    status = subprocess.Popen(refPointCmd, shell=True).wait()
    if status is not 0:
        raise Exception('Error while finding reference pixel in space.\n')

    ############################################
    # Unwrapping Error Correction (Optional)
    #    based on the consistency of triplets
    #    of interferograms
    ############################################
    correct_unwrap_error(inps, template)

    #########################################
    # Network Modification (Optional)
    #########################################
    print('\n**********  Modify Network  **********')
    networkCmd = 'modify_network.py {} -t {}'.format(inps.stackFile,
                                                     inps.templateFile)
    print(networkCmd)
    status = subprocess.Popen(networkCmd, shell=True).wait()
    if status is not 0:
        raise Exception('Error while modifying the network of interferograms.\n')

    # Plot network colored in spatial coherence
    print('--------------------------------------------------')
    plotCmd = 'plot_network.py {} --template {} --nodisplay'.format(inps.stackFile,
                                                                    inps.templateFile)
    print(plotCmd)
    inps.cohSpatialAvgFile = '{}_coherence_spatialAverage.txt'.format(
        os.path.splitext(os.path.basename(inps.stackFile))[0])
    try:
        outFile = [i for i in ['Network.pdf', 'PIC/Network.pdf'] if os.path.isfile(i)][0]
    except:
        outFile = None
    if ut.run_or_skip(out_file=outFile,
                      in_file=[inps.stackFile,
                               inps.cohSpatialAvgFile,
                               inps.templateFile],
                      check_readable=False) == 'run':
        status = subprocess.Popen(plotCmd, shell=True).wait()

    if inps.modify_network:
        raise SystemExit('Exit as planned after network modification.')

    #########################################
    # Inversion of Interferograms
    ########################################
    print('\n**********  Invert Network of Interferograms into Time-series  **********')
    invCmd = 'ifgram_inversion.py {} --template {} --update '.format(inps.stackFile,
                                                                     inps.templateFile)
    if inps.fast:
        invCmd += ' --fast'
    if inps.waterMaskFile:
        invCmd += ' -m {}'.format(inps.waterMaskFile)
    print(invCmd)
    inps.timeseriesFile = 'timeseries.h5'
    inps.tempCohFile = 'temporalCoherence.h5'
    inps.timeseriesFiles = ['timeseries.h5']       #all ts files
    status = subprocess.Popen(invCmd, shell=True).wait()
    if status is not 0:
        raise Exception('Error while inverting network interferograms into timeseries')

    print('\n--------------------------------------------')
    print('Update Mask based on Temporal Coherence ...')
    get_temporal_coherence_mask(inps, template)

    if inps.invert_network:
        raise SystemExit('Exit as planned after network inversion.')

    ##############################################
    # LOD (Local Oscillator Drift) Correction
    #   for Envisat data in radar coord only
    ##############################################
    if atr['PLATFORM'].lower().startswith('env'):
        print('\n**********  Local Oscillator Drift Correction for Envisat  **********')
        outName = os.path.splitext(inps.timeseriesFile)[0]+'_LODcor.h5'
        lodCmd = 'local_oscilator_drift.py {} {} -o {}'.format(inps.timeseriesFile,
                                                               inps.geomFile,
                                                               outName)
        print(lodCmd)
        if ut.run_or_skip(out_file=outName, in_file=[inps.timeseriesFile, inps.geomFile]) == 'run':
            status = subprocess.Popen(lodCmd, shell=True).wait()
            if status is not 0:
                raise Exception('Error while correcting Local Oscillator Drift.\n')
        inps.timeseriesFile = outName
        inps.timeseriesFiles.append(outName)

    ##############################################
    # Tropospheric Delay Correction (Optional)
    ##############################################
    print('\n**********  Tropospheric Delay Correction  **********')
    correct_tropospheric_delay(inps, template)

    ##############################################
    # Phase Ramp Correction (Optional)
    ##############################################
    print('\n**********  Remove Phase Ramp  **********')
    inps.derampMaskFile = template['pysar.deramp.maskFile']
    inps.derampMethod = template['pysar.deramp']
    if inps.derampMethod:
        print('Phase Ramp Removal method: {}'.format(inps.derampMethod))
        ramp_list = ['linear', 'quadratic',
                     'linear_range', 'quadratic_range',
                     'linear_azimuth', 'quadratic_azimuth']
        if inps.derampMethod in ramp_list:
            outName = '{}_ramp.h5'.format(os.path.splitext(inps.timeseriesFile)[0])
            derampCmd = 'remove_ramp.py {} -s {} -m {} -o {}'.format(inps.timeseriesFile,
                                                                     inps.derampMethod,
                                                                     inps.derampMaskFile,
                                                                     outName)
            print(derampCmd)
            if ut.run_or_skip(out_file=outName, in_file=inps.timeseriesFile) == 'run':
                status = subprocess.Popen(derampCmd, shell=True).wait()
                if status is not 0:
                    raise Exception('Error while removing phase ramp for time-series.\n')
            inps.timeseriesFile = outName
            inps.timeseriesFiles.append(outName)
        else:
            msg = 'un-recognized phase ramp method: {}'.format(inps.derampMethod)
            msg += '\navailable ramp types:\n{}'.format(ramp_list)
            raise ValueError(msg)
    else:
        print('No phase ramp removal.')

    ##############################################
    # Topographic (DEM) Residuals Correction (Optional)
    ##############################################
    print('\n**********  Topographic Residual (DEM error) Correction  **********')
    outName = os.path.splitext(inps.timeseriesFile)[0]+'_demErr.h5'
    topoCmd = 'dem_error.py {i} -t {t} -o {o} --update '.format(i=inps.timeseriesFile,
                                                                t=inps.templateFile,
                                                                o=outName)
    if not inps.fast:
        topoCmd += ' -g {}'.format(inps.geomFile)
    print(topoCmd)
    inps.timeseriesResFile = None
    if template['pysar.topographicResidual']:
        status = subprocess.Popen(topoCmd, shell=True).wait()
        if status is not 0:
            raise Exception('Error while correcting topographic phase residual.\n')
        inps.timeseriesFile = outName
        inps.timeseriesResFile = 'timeseriesResidual.h5'
        inps.timeseriesFiles.append(outName)
    else:
        print('No correction for topographic residuals.')

    # Timeseries Residual Standard Deviation
    print('\n**********  Timeseries Residual Root Mean Square  **********')
    if inps.timeseriesResFile:
        rmsCmd = 'timeseries_rms.py {} -t {}'.format(inps.timeseriesResFile,
                                                     inps.templateFile)
        print(rmsCmd)
        status = subprocess.Popen(rmsCmd, shell=True).wait()
        if status is not 0:
            raise Exception('Error while calculating RMS of time series phase residual.\n')
    else:
        print('No timeseries residual file found! Skip residual RMS analysis.')

    # Reference in Time
    print('\n**********  Select Reference Date  **********')
    if template['pysar.reference.date']:
        refCmd = 'reference_date.py -t {} '.format(inps.templateFile)
        for fname in inps.timeseriesFiles:
            refCmd += ' {}'.format(fname)
        print(refCmd)
        status = subprocess.Popen(refCmd, shell=True).wait()
        if status is not 0:
            raise Exception('Error while changing reference date.\n')
    else:
        print('No reference change in time.')

    #############################################
    # Velocity and rmse maps
    #############################################
    print('\n**********  Estimate Velocity  **********')
    inps.velFile = 'velocity.h5'
    velCmd = 'timeseries2velocity.py {} -t {} -o {} --update'.format(inps.timeseriesFile,
                                                                     inps.templateFile,
                                                                     inps.velFile)
    print(velCmd)
    status = subprocess.Popen(velCmd, shell=True).wait()
    if status is not 0:
        raise Exception('Error while estimating linear velocity from time-series.\n')

    # Velocity from Tropospheric delay
    if inps.tropFile:
        suffix = os.path.splitext(os.path.basename(inps.tropFile))[0].title()
        inps.tropVelFile = '{}{}.h5'.format(os.path.splitext(inps.velFile)[0], suffix)
        velCmd = 'timeseries2velocity.py {} -t {} -o {} --update'.format(inps.tropFile,
                                                                         inps.templateFile,
                                                                         inps.tropVelFile)
        print(velCmd)
        status = subprocess.Popen(velCmd, shell=True).wait()

    ############################################
    # Post-processing
    # Geocodeing --> Masking --> KMZ & HDF-EOS5
    ############################################
    print('\n**********  Post-processing  **********')
    if template['pysar.save.hdfEos5'] is True and template['pysar.geocode'] is False:
        print('Turn ON pysar.geocode to be able to save to HDF-EOS5 format.')
        template['pysar.geocode'] = True

    # Geocoding
    if not inps.geocoded:
        if template['pysar.geocode'] is True:
            print('\n--------------------------------------------')
            geo_dir = os.path.abspath('./GEOCODE')
            if not os.path.isdir(geo_dir):
                os.makedirs(geo_dir)
                print('create directory: {}'.format(geo_dir))
            geoCmd = ('geocode.py {v} {c} {t} {g} -l {l} -t {e}'
                      ' --outdir {d} --update').format(v=inps.velFile,
                                                       c=inps.tempCohFile,
                                                       t=inps.timeseriesFile,
                                                       g=inps.geomFile,
                                                       l=inps.lookupFile,
                                                       e=inps.templateFile,
                                                       d=geo_dir)
            print(geoCmd)
            status = subprocess.Popen(geoCmd, shell=True).wait()
            if status is not 0:
                raise Exception('Error while geocoding.\n')
            else:
                inps.velFile        = os.path.join(geo_dir, 'geo_'+os.path.basename(inps.velFile))
                inps.tempCohFile    = os.path.join(geo_dir, 'geo_'+os.path.basename(inps.tempCohFile))
                inps.timeseriesFile = os.path.join(geo_dir, 'geo_'+os.path.basename(inps.timeseriesFile))
                inps.geomFile       = os.path.join(geo_dir, 'geo_'+os.path.basename(inps.geomFile))
                inps.geocoded = True

            # generate mask based on geocoded temporal coherence
            print('\n--------------------------------------------')
            outName = os.path.join(geo_dir, 'geo_maskTempCoh.h5')
            genCmd = 'generate_mask.py {} -m {} -o {}'.format(inps.tempCohFile,
                                                              inps.minTempCoh,
                                                              outName)
            print(genCmd)
            if ut.run_or_skip(out_file=outName, in_file=inps.tempCohFile) == 'run':
                status = subprocess.Popen(genCmd, shell=True).wait()
            inps.maskFile = outName

    # mask velocity file
    if inps.velFile and inps.maskFile:
        outName = '{}_masked.h5'.format(os.path.splitext(inps.velFile)[0])
        maskCmd = 'mask.py {} -m {} -o {}'.format(inps.velFile,
                                                  inps.maskFile,
                                                  outName)
        print(maskCmd)
        if ut.run_or_skip(out_file=outName, in_file=[inps.velFile, inps.maskFile]) == 'run':
            status = subprocess.Popen(maskCmd, shell=True).wait()
        try:
            inps.velFile = glob.glob(outName)[0]
        except:
            inps.velFile = None

    # Save to Google Earth KML file
    if inps.geocoded and inps.velFile and template['pysar.save.kml'] is True:
        print('\n--------------------------------------------')
        print('creating Google Earth KMZ file for geocoded velocity file: ...')
        outName = '{}.kmz'.format(os.path.splitext(os.path.basename(inps.velFile))[0])
        kmlCmd = 'save_kml.py {} -o {}'.format(inps.velFile, outName)
        print(kmlCmd)
        try:
            outFile = [i for i in [outName, 'PIC/{}'.format(outName)] if os.path.isfile(i)][0]
        except:
            outFile = None
        if ut.run_or_skip(out_file=outFile, in_file=inps.velFile, check_readable=False) == 'run':
            status = subprocess.Popen(kmlCmd, shell=True).wait()
            if status is not 0:
                raise Exception('Error while generating Google Earth KMZ file.')

    #############################################
    # Save Timeseries to HDF-EOS5 format
    #############################################
    if template['pysar.save.hdfEos5'] is True:
        print('\n**********  Save Time-series in HDF-EOS5 Format  **********')
        save_hdfeos5(inps, customTemplate)

    #############################################
    # Plot Figures
    #############################################
    if template['pysar.plot']:
        plot_pysarApp(inps)

    #############################################
    # Timing                                    #
    #############################################
    m, s = divmod(time.time()-start_time, 60)
    print('\n###############################################')
    print('End of PySAR Routine Processing Workflow!')
    print('###############################################\n')
    print('time used: {:02.0f} mins {:02.1f} secs'.format(m, s))
Exemplo n.º 14
0
def main(iargs=None):
    start_time = time.time()
    inps = cmd_line_parse(iargs)
    if inps.version:
        raise SystemExit(version.version_description)

    #########################################
    # Initiation
    #########################################
    print(version.logo)

    # Project Name
    inps.projectName = None
    if inps.templateFileCustom:
        inps.templateFileCustom = os.path.abspath(inps.templateFileCustom)
        inps.projectName = os.path.splitext(
            os.path.basename(inps.templateFileCustom))[0]
        print('Project name: ' + inps.projectName)

    # Work directory
    if not inps.workDir:
        if autoPath and 'SCRATCHDIR' in os.environ and inps.projectName:
            inps.workDir = os.path.join(os.getenv('SCRATCHDIR'),
                                        inps.projectName, 'PYSAR')
        else:
            inps.workDir = os.getcwd()
    inps.workDir = os.path.abspath(inps.workDir)
    if not os.path.isdir(inps.workDir):
        os.makedirs(inps.workDir)
    os.chdir(inps.workDir)
    print("Go to work directory: " + inps.workDir)

    copy_aux_file(inps)

    inps, template, templateCustom = read_template(inps)

    #########################################
    # Loading Data
    #########################################
    print('\n**********  Load Data  **********')
    loadCmd = 'load_data.py --template {}'.format(inps.templateFile)
    if inps.projectName:
        loadCmd += ' --project {}'.format(inps.projectName)
    print(loadCmd)
    status = subprocess.Popen(loadCmd, shell=True).wait()
    os.chdir(inps.workDir)

    print('-' * 50)
    inps, atr = ut.check_loaded_dataset(inps.workDir, inps)

    # Add template options into HDF5 file metadata
    # if inps.templateFileCustom:
    #    atrCmd = 'add_attribute.py {} {}'.format(inps.stackFile, inps.templateFileCustom)
    #    print(atrCmd)
    #    status = subprocess.Popen(atrCmd, shell=True).wait()
    #ut.add_attribute(inps.stackFile, template)

    if inps.load_dataset:
        raise SystemExit('Exit as planned after loading/checking the dataset.')

    if inps.reset:
        print('Reset dataset attributtes for a fresh re-run.\n' + '-' * 50)
        # Reset reference pixel
        refPointCmd = 'reference_point.py {} --reset'.format(inps.stackFile)
        print(refPointCmd)
        status = subprocess.Popen(refPointCmd, shell=True).wait()
        # Reset network modification
        networkCmd = 'modify_network.py {} --reset'.format(inps.stackFile)
        print(networkCmd)
        status = subprocess.Popen(networkCmd, shell=True).wait()

    #########################################
    # Generating Aux files
    #########################################
    print('\n**********  Generate Auxiliary Files  **********')
    # Initial mask (pixels with valid unwrapPhase or connectComponent in ALL interferograms)
    inps.maskFile = 'mask.h5'
    if ut.update_file(inps.maskFile, inps.stackFile):
        maskCmd = 'generate_mask.py {} --nonzero -o {}'.format(
            inps.stackFile, inps.maskFile)
        print(maskCmd)
        status = subprocess.Popen(maskCmd, shell=True).wait()

    # Average spatial coherence
    inps.avgSpatialCohFile = 'avgSpatialCoherence.h5'
    if ut.update_file(inps.avgSpatialCohFile, inps.stackFile):
        avgCmd = 'temporal_average.py {} --dataset coherence -o {}'.format(
            inps.stackFile, inps.avgSpatialCohFile)
        print(avgCmd)
        status = subprocess.Popen(avgCmd, shell=True).wait()

    #########################################
    # Referencing Interferograms in Space
    #########################################
    print('\n**********  Select Reference Point  **********')
    refPointCmd = 'reference_point.py {} -t {} -c {}'.format(
        inps.stackFile, inps.templateFile, inps.avgSpatialCohFile)
    print(refPointCmd)
    status = subprocess.Popen(refPointCmd, shell=True).wait()
    if status is not 0:
        raise Exception('Error while finding reference pixel in space.\n')

    ############################################
    # Unwrapping Error Correction (Optional)
    #    based on the consistency of triplets
    #    of interferograms
    ############################################
    if template['pysar.unwrapError.method']:
        print('\n**********  Unwrapping Error Correction  **********')
        outName = '{}_unwCor.h5'.format(os.path.splitext(inps.stackFile)[0])
        unwCmd = 'unwrap_error.py {} --mask {} --template {}'.format(
            inps.stackFile, inps.maskFile, inps.templateFile)
        print(unwCmd)
        if ut.update_file(outName, inps.stackFile):
            print(
                'This might take a while depending on the size of your data set!'
            )
            status = subprocess.Popen(unwCmd, shell=True).wait()
            if status is not 0:
                raise Exception(
                    'Error while correcting phase unwrapping errors.\n')
        inps.stackFile = outName

    #########################################
    # Network Modification (Optional)
    #########################################
    print('\n**********  Modify Network  **********')
    networkCmd = 'modify_network.py {} -t {}'.format(inps.stackFile,
                                                     inps.templateFile)
    print(networkCmd)
    status = subprocess.Popen(networkCmd, shell=True).wait()
    if status is not 0:
        raise Exception(
            'Error while modifying the network of interferograms.\n')

    # Plot network colored in spatial coherence
    print('--------------------------------------------------')
    plotCmd = 'plot_network.py {} --template {} --nodisplay'.format(
        inps.stackFile, inps.templateFile)
    print(plotCmd)
    inps.cohSpatialAvgFile = '{}_coherence_spatialAverage.txt'.format(
        os.path.splitext(os.path.basename(inps.stackFile))[0])
    if ut.update_file(
            'Network.pdf',
            check_readable=False,
            inFile=[inps.stackFile, inps.cohSpatialAvgFile,
                    inps.templateFile]):
        status = subprocess.Popen(plotCmd, shell=True).wait()

    if inps.modify_network:
        raise SystemExit('Exit as planned after network modification.')

    #########################################
    # Inversion of Interferograms
    ########################################
    print(
        '\n**********  Invert Network of Interferograms into Time-series  **********'
    )
    invCmd = 'ifgram_inversion.py {} --template {}'.format(
        inps.stackFile, inps.templateFile)
    print(invCmd)
    inps.timeseriesFile = 'timeseries.h5'
    inps.tempCohFile = 'temporalCoherence.h5'
    if ut.update_file(inps.timeseriesFile, inps.stackFile):
        status = subprocess.Popen(invCmd, shell=True).wait()
        if status is not 0:
            raise Exception(
                'Error while inverting network interferograms into timeseries')

    print('\n--------------------------------------------')
    print('Update Mask based on Temporal Coherence ...')
    inps.maskFile = 'maskTempCoh.h5'
    inps.minTempCoh = template['pysar.networkInversion.minTempCoh']
    maskCmd = 'generate_mask.py {} -m {} -o {}'.format(inps.tempCohFile,
                                                       inps.minTempCoh,
                                                       inps.maskFile)
    print(maskCmd)
    if ut.update_file(inps.maskFile, inps.tempCohFile):
        status = subprocess.Popen(maskCmd, shell=True).wait()
        if status is not 0:
            raise Exception(
                'Error while generating mask file from temporal coherence.')

    if inps.invert_network:
        raise SystemExit('Exit as planned after network inversion.')

    # check number of pixels selected in mask file for following analysis
    min_num_pixel = float(template['pysar.networkInversion.minNumPixel'])
    msk = readfile.read(inps.maskFile)[0]
    num_pixel = np.sum(msk != 0.)
    print('number of pixels selected: {}'.format(num_pixel))
    if num_pixel < min_num_pixel:
        msg = "Not enought coherent pixels selected (minimum of {}). ".format(
            int(min_num_pixel))
        msg += "Try the following:\n"
        msg += "1) Check the reference pixel and make sure it's not in areas with unwrapping errors\n"
        msg += "2) Check the network and make sure it's fully connected without subsets"
        raise RuntimeError(msg)
    del msk

    ##############################################
    # LOD (Local Oscillator Drift) Correction
    #   for Envisat data in radar coord only
    ##############################################
    if atr['PLATFORM'].lower().startswith('env'):
        print(
            '\n**********  Local Oscillator Drift Correction for Envisat  **********'
        )
        outName = os.path.splitext(inps.timeseriesFile)[0] + '_LODcor.h5'
        lodCmd = 'local_oscilator_drift.py {} {} -o {}'.format(
            inps.timeseriesFile, inps.geomFile, outName)
        print(lodCmd)
        if ut.update_file(outName, [inps.timeseriesFile, inps.geomFile]):
            status = subprocess.Popen(lodCmd, shell=True).wait()
            if status is not 0:
                raise Exception(
                    'Error while correcting Local Oscillator Drift.\n')
        inps.timeseriesFile = outName

    ##############################################
    # Tropospheric Delay Correction (Optional)
    ##############################################
    print('\n**********  Tropospheric Delay Correction  **********')
    inps.tropPolyOrder = template['pysar.troposphericDelay.polyOrder']
    inps.tropModel = template['pysar.troposphericDelay.weatherModel']
    inps.tropMethod = template['pysar.troposphericDelay.method']
    try:
        fileList = [
            os.path.join(inps.workDir, 'INPUTS/{}.h5'.format(inps.tropModel))
        ]
        inps.tropFile = ut.get_file_list(fileList)[0]
    except:
        inps.tropFile = None

    if inps.tropMethod:
        # Check Conflict with base_trop_cor
        if template['pysar.deramp'] == 'base_trop_cor':
            msg = """
            Method Conflict: base_trop_cor is in conflict with {} option!
            base_trop_cor applies simultaneous ramp removal AND tropospheric correction.
            IGNORE base_trop_cor input and continue pysarApp.py.
            """
            warnings.warn(msg)
            template['pysar.deramp'] = False

        fbase = os.path.splitext(inps.timeseriesFile)[0]
        # Call scripts
        if inps.tropMethod == 'height_correlation':
            outName = '{}_tropHgt.h5'.format(fbase)
            print(
                'tropospheric delay correction with height-correlation approach'
            )
            tropCmd = ('tropcor_phase_elevation.py {t} -d {d} -p {p}'
                       ' -m {m} -o {o}').format(t=inps.timeseriesFile,
                                                d=inps.geomFile,
                                                p=inps.tropPolyOrder,
                                                m=inps.maskFile,
                                                o=outName)
            print(tropCmd)
            if ut.update_file(outName, inps.timeseriesFile):
                status = subprocess.Popen(tropCmd, shell=True).wait()
                if status is not 0:
                    raise Exception(
                        'Error while correcting tropospheric delay.\n')
            inps.timeseriesFile = outName

        elif inps.tropMethod == 'pyaps':
            inps.weatherDir = template['pysar.troposphericDelay.weatherDir']
            outName = '{}_{}.h5'.format(fbase, inps.tropModel)
            print(('Atmospheric correction using Weather Re-analysis dataset'
                   ' (PyAPS, Jolivet et al., 2011)'))
            print('Weather Re-analysis dataset: ' + inps.tropModel)
            tropCmd = ('tropcor_pyaps.py -f {t} --model {m} --dem {d}'
                       ' -i {i} -w {w}').format(t=inps.timeseriesFile,
                                                m=inps.tropModel,
                                                d=inps.geomFile,
                                                i=inps.geomFile,
                                                w=inps.weatherDir)
            print(tropCmd)
            if ut.update_file(outName, inps.timeseriesFile):
                if inps.tropFile:
                    tropCmd = 'diff.py {} {} -o {}'.format(
                        inps.timeseriesFile, inps.tropFile, outName)
                    print('--------------------------------------------')
                    print('Use existed tropospheric delay file: {}'.format(
                        inps.tropFile))
                    print(tropCmd)
                status = subprocess.Popen(tropCmd, shell=True).wait()
                if status is not 0:
                    print(
                        '\nError while correcting tropospheric delay, try the following:'
                    )
                    print('1) Check the installation of PyAPS')
                    print(
                        '   http://earthdef.caltech.edu/projects/pyaps/wiki/Main'
                    )
                    print('   Try in command line: python -c "import pyaps"')
                    print(
                        '2) Use other tropospheric correction method, height-correlation, for example'
                    )
                    print(
                        '3) or turn off the option by setting pysar.troposphericDelay.method = no.\n'
                    )
                    raise RuntimeError()
            inps.timeseriesFile = outName
        else:
            print('No atmospheric delay correction.')

    # Grab tropospheric delay file
    try:
        fileList = [
            os.path.join(inps.workDir, 'INPUTS/{}.h5'.format(inps.tropModel))
        ]
        inps.tropFile = ut.get_file_list(fileList)[0]
    except:
        inps.tropFile = None

    ##############################################
    # Topographic (DEM) Residuals Correction (Optional)
    ##############################################
    print(
        '\n**********  Topographic Residual (DEM error) Correction  **********'
    )
    outName = os.path.splitext(inps.timeseriesFile)[0] + '_demErr.h5'
    topoCmd = 'dem_error.py {} -g {} -t {} -o {}'.format(
        inps.timeseriesFile, inps.geomFile, inps.templateFile, outName)
    print(topoCmd)
    inps.timeseriesResFile = None
    if template['pysar.topographicResidual']:
        if ut.update_file(outName, inps.timeseriesFile):
            status = subprocess.Popen(topoCmd, shell=True).wait()
            if status is not 0:
                raise Exception(
                    'Error while correcting topographic phase residual.\n')
        inps.timeseriesFile = outName
        inps.timeseriesResFile = 'timeseriesResidual.h5'
    else:
        print('No correction for topographic residuals.')

    ##############################################
    # Timeseries Residual Standard Deviation
    ##############################################
    print('\n**********  Timeseries Residual Root Mean Square  **********')
    if inps.timeseriesResFile:
        rmsCmd = 'timeseries_rms.py {} -t {}'.format(inps.timeseriesResFile,
                                                     inps.templateFile)
        print(rmsCmd)
        status = subprocess.Popen(rmsCmd, shell=True).wait()
        if status is not 0:
            raise Exception(
                'Error while calculating RMS of time series phase residual.\n')
    else:
        print('No timeseries residual file found! Skip residual RMS analysis.')

    ##############################################
    # Reference in Time
    ##############################################
    print('\n**********  Select Reference Date  **********')
    if template['pysar.reference.date']:
        outName = '{}_refDate.h5'.format(
            os.path.splitext(inps.timeseriesFile)[0])
        refCmd = 'reference_date.py {} -t {} -o {}'.format(
            inps.timeseriesFile, inps.templateFile, outName)
        print(refCmd)
        if ut.update_file(outName, inps.timeseriesFile):
            status = subprocess.Popen(refCmd, shell=True).wait()
            if status is not 0:
                raise Exception('Error while changing reference date.\n')
        inps.timeseriesFile = outName
    else:
        print('No reference change in time.')

    ##############################################
    # Phase Ramp Correction (Optional)
    ##############################################
    print('\n**********  Remove Phase Ramp  **********')
    inps.derampMaskFile = template['pysar.deramp.maskFile']
    inps.derampMethod = template['pysar.deramp']
    if inps.derampMethod:
        print('Phase Ramp Removal method: {}'.format(inps.derampMethod))
        if inps.geocoded and inps.derampMethod in [
                'baseline_cor', 'base_trop_cor'
        ]:
            warnings.warn(
                ('dataset is in geo coordinates,'
                 ' can not apply {} method').format(inps.derampMethod))
            print('skip deramping and continue.')

        # Get executable command and output name
        derampCmd = None
        fbase = os.path.splitext(inps.timeseriesFile)[0]
        if inps.derampMethod in [
                'plane', 'quadratic', 'plane_range', 'quadratic_range',
                'plane_azimuth', 'quadratic_azimuth'
        ]:
            outName = '{}_{}.h5'.format(fbase, inps.derampMethod)
            derampCmd = 'remove_ramp.py {} -s {} -m {} -o {}'.format(
                inps.timeseriesFile, inps.derampMethod, inps.derampMaskFile,
                outName)

        elif inps.derampMethod == 'baseline_cor':
            outName = '{}_baselineCor.h5'.format(fbase)
            derampCmd = 'baseline_error.py {} {}'.format(
                inps.timeseriesFile, inps.maskFile)

        elif inps.derampMethod in [
                'base_trop_cor', 'basetropcor', 'baselinetropcor'
        ]:
            print('Joint estimation of Baseline error and tropospheric delay')
            print('\t[height-correlation approach]')
            outName = '{}_baseTropCor.h5'.format(fbase)
            derampCmd = ('baseline_trop.py {t} {d} {p}'
                         ' range_and_azimuth {m}').format(
                             t=inps.timeseriesFile,
                             d=inps.geomFile,
                             p=inps.tropPolyOrder,
                             m=inps.maskFile)
        else:
            warnings.warn('Unrecognized phase ramp method: {}'.format(
                template['pysar.deramp']))

        # Execute command
        if derampCmd:
            print(derampCmd)
            if ut.update_file(outName, inps.timeseriesFile):
                status = subprocess.Popen(derampCmd, shell=True).wait()
                if status is not 0:
                    raise Exception(
                        'Error while removing phase ramp for time-series.\n')
            inps.timeseriesFile = outName
    else:
        print('No phase ramp removal.')

    #############################################
    # Velocity and rmse maps
    #############################################
    print('\n**********  Estimate Velocity  **********')
    inps.velFile = 'velocity.h5'
    velCmd = 'timeseries2velocity.py {} -t {} -o {}'.format(
        inps.timeseriesFile, inps.templateFile, inps.velFile)
    print(velCmd)
    if ut.update_file(inps.velFile, [inps.timeseriesFile, inps.templateFile]):
        status = subprocess.Popen(velCmd, shell=True).wait()
        if status is not 0:
            raise Exception(
                'Error while estimating linear velocity from time-series.\n')

    # Velocity from Tropospheric delay
    if inps.tropFile:
        suffix = os.path.splitext(os.path.basename(inps.tropFile))[0].title()
        inps.tropVelFile = '{}{}.h5'.format(
            os.path.splitext(inps.velFile)[0], suffix)
        velCmd = 'timeseries2velocity.py {} -t {} -o {}'.format(
            inps.tropFile, inps.templateFile, inps.tropVelFile)
        print(velCmd)
        if ut.update_file(inps.tropVelFile,
                          [inps.tropFile, inps.templateFile]):
            status = subprocess.Popen(velCmd, shell=True).wait()

    ############################################
    # Post-processing
    # Geocodeing --> Masking --> KMZ & HDF-EOS5
    ############################################
    print('\n**********  Post-processing  **********')
    if template['pysar.save.hdfEos5'] is True and template[
            'pysar.geocode'] is False:
        print('Turn ON pysar.geocode to be able to save to HDF-EOS5 format.')
        template['pysar.geocode'] = True

    # Geocoding
    if not inps.geocoded:
        if template['pysar.geocode'] is True:
            print('\n--------------------------------------------')
            geo_dir = os.path.abspath('./GEOCODE')
            if not os.path.isdir(geo_dir):
                os.makedirs(geo_dir)
                print('create directory: {}'.format(geo_dir))
            geoCmd = ('geocode.py {v} {c} {t} {g} -l {l} -t {e}'
                      ' --outdir {d} --update').format(v=inps.velFile,
                                                       c=inps.tempCohFile,
                                                       t=inps.timeseriesFile,
                                                       g=inps.geomFile,
                                                       l=inps.lookupFile,
                                                       e=inps.templateFile,
                                                       d=geo_dir)
            print(geoCmd)
            status = subprocess.Popen(geoCmd, shell=True).wait()
            if status is not 0:
                raise Exception('Error while geocoding.\n')
            else:
                inps.velFile = os.path.join(
                    geo_dir, 'geo_' + os.path.basename(inps.velFile))
                inps.tempCohFile = os.path.join(
                    geo_dir, 'geo_' + os.path.basename(inps.tempCohFile))
                inps.timeseriesFile = os.path.join(
                    geo_dir, 'geo_' + os.path.basename(inps.timeseriesFile))
                inps.geomFile = os.path.join(
                    geo_dir, 'geo_' + os.path.basename(inps.geomFile))
                inps.geocoded = True

            # generate mask based on geocoded temporal coherence
            print('\n--------------------------------------------')
            outName = os.path.join(geo_dir, 'geo_maskTempCoh.h5')
            genCmd = 'generate_mask.py {} -m {} -o {}'.format(
                inps.tempCohFile, inps.minTempCoh, outName)
            print(genCmd)
            if ut.update_file(outName, inps.tempCohFile):
                status = subprocess.Popen(genCmd, shell=True).wait()
            inps.maskFile = outName

    # mask velocity file
    if inps.velFile and inps.maskFile:
        outName = '{}_masked.h5'.format(os.path.splitext(inps.velFile)[0])
        maskCmd = 'mask.py {} -m {} -o {}'.format(inps.velFile, inps.maskFile,
                                                  outName)
        print(maskCmd)
        if ut.update_file(outName, [inps.velFile, inps.maskFile]):
            status = subprocess.Popen(maskCmd, shell=True).wait()
        try:
            inps.velFile = glob.glob(outName)[0]
        except:
            inps.velFile = None

    # Save to Google Earth KML file
    if inps.geocoded and inps.velFile and template['pysar.save.kml'] is True:
        print('\n--------------------------------------------')
        print('creating Google Earth KMZ file for geocoded velocity file: ...')
        outName = '{}.kmz'.format(
            os.path.splitext(os.path.basename(inps.velFile))[0])
        kmlCmd = 'save_kml.py {} -o {}'.format(inps.velFile, outName)
        print(kmlCmd)
        if ut.update_file(outName, inps.velFile, check_readable=False):
            status = subprocess.Popen(kmlCmd, shell=True).wait()
            if status is not 0:
                raise Exception(
                    'Error while generating Google Earth KMZ file.')

    #############################################
    # Save Timeseries to HDF-EOS5 format
    #############################################
    if template['pysar.save.hdfEos5'] is True:
        print('\n**********  Save Time-series in HDF-EOS5 Format  **********')
        if not inps.geocoded:
            warnings.warn(
                'Dataset is in radar coordinates, skip saving to HDF-EOS5 format.'
            )
        else:
            # Add attributes from custom template to timeseries file
            if templateCustom is not None:
                ut.add_attribute(inps.timeseriesFile, templateCustom)

            # Save to HDF-EOS5 format
            print('--------------------------------------------')
            hdfeos5Cmd = ('save_hdfeos5.py {t} -c {c} -m {m} -g {g}'
                          ' -t {e}').format(t=inps.timeseriesFile,
                                            c=inps.tempCohFile,
                                            m=inps.maskFile,
                                            g=inps.geomFile,
                                            e=inps.templateFile)
            print(hdfeos5Cmd)
            SAT = hdfeos5.get_mission_name(atr)
            try:
                inps.hdfeos5File = ut.get_file_list('{}_*.he5'.format(SAT))[0]
            except:
                inps.hdfeos5File = None
            if ut.update_file(inps.hdfeos5File, [
                    inps.timeseriesFile, inps.tempCohFile, inps.maskFile,
                    inps.geomFile
            ]):
                status = subprocess.Popen(hdfeos5Cmd, shell=True).wait()
                if status is not 0:
                    raise Exception(
                        'Error while generating HDF-EOS5 time-series file.\n')

    #############################################
    # Plot Figures
    #############################################
    inps.plotShellFile = os.path.join(os.path.dirname(__file__),
                                      '../sh/plot_pysarApp.sh')
    plotCmd = './' + os.path.basename(inps.plotShellFile)
    inps.plot = template['pysar.plot']
    if inps.plot is True:
        print('\n**********  Plot Results / Save to PIC  **********')
        # Copy to workding directory if not existed yet.
        if not os.path.isfile(plotCmd):
            print('copy {} to work directory: {}'.format(
                inps.plotShellFile, inps.workDir))
            shutil.copy2(inps.plotShellFile, inps.workDir)

    if inps.plot and os.path.isfile(plotCmd):
        print(plotCmd)
        status = subprocess.Popen(plotCmd, shell=True).wait()
        print('\n' + '-' * 50)
        print('For better figures:')
        print(
            '  1) Edit parameters in plot_pysarApp.sh and re-run this script.')
        print(
            '  2) Play with view.py, tsview.py and save_kml.py for more advanced/customized figures.'
        )
        if status is not 0:
            raise Exception(
                'Error while plotting data files using {}'.format(plotCmd))

    #############################################
    # Time                                      #
    #############################################
    m, s = divmod(time.time() - start_time, 60)
    print('\ntime used: {:02.0f} mins {:02.1f} secs'.format(m, s))
    print('\n###############################################')
    print('End of PySAR processing!')
    print('################################################\n')