Esempio n. 1
0
def plot_bar4date_rms(inps):
    inps.figName = os.path.splitext(inps.rmsFile)[0] + '.pdf'
    if ut.update_file(inps.figName,
                      [inps.exDateFile, inps.refDateFile, inps.template_file],
                      check_readable=False):
        if inps.fig_size:
            fig = plt.figure(figsize=inps.fig_size)
        else:
            fig = plt.figure()
        ax = fig.add_subplot(111)
        font_size = 12

        dates, datevector = ptime.date_list2vector(inps.dateList)
        try:
            bar_width = ut.most_common(np.diff(dates).tolist()) * 3 / 4
        except:
            bar_width = np.min(np.diff(dates).tolist()) * 3 / 4
        x_list = [i - bar_width / 2 for i in dates]

        inps.rmsList = [i * 1000. for i in inps.rmsList]
        min_rms = inps.min_rms * 1000.
        # Plot all dates
        ax.bar(x_list, inps.rmsList, bar_width.days)

        # Plot reference date
        ax.bar(x_list[inps.refDateIndex],
               inps.rmsList[inps.refDateIndex],
               bar_width.days,
               label='Reference date')

        # Plot exclude dates
        if inps.exIdxList:
            ex_x_list = [x_list[i] for i in inps.exIdxList]
            inps.exRmsList = [inps.rmsList[i] for i in inps.exIdxList]
            ax.bar(ex_x_list,
                   inps.exRmsList,
                   bar_width.days,
                   color='darkgray',
                   label='Exclude date(s)')

        # Plot min_rms line
        ax, xmin, xmax = pp.auto_adjust_xaxis_date(
            ax, datevector, font_size, every_year=inps.tick_year_num)
        ax.plot(np.array([xmin, xmax]), np.array([min_rms, min_rms]), '--k')

        # axis format
        ax = pp.auto_adjust_yaxis(ax,
                                  inps.rmsList + [min_rms],
                                  font_size,
                                  ymin=0.0)
        ax.set_xlabel('Time [years]', fontsize=font_size)
        ax.set_ylabel('Root Mean Square [mm]', fontsize=font_size)
        ax.yaxis.set_ticks_position('both')
        ax.tick_params(labelsize=font_size)
        plt.legend(fontsize=font_size)

        # save figure
        fig.savefig(inps.figName, bbox_inches='tight', transparent=True)
        print('save figure to file: ' + inps.figName)
    return inps
Esempio n. 2
0
def update_object(outFile, inObj, box, updateMode=True):
    """Do not write h5 file if: 1) h5 exists and readable,
                                2) it contains all date12 from ifgramStackDict,
                                            or all datasets from geometryDict"""
    updateFile = True
    if updateMode and not ut.update_file(outFile, check_readable=True):
        if inObj.name == 'ifgramStack':
            outObj = ifgramStack(outFile)
            if (outObj.get_size() == inObj.get_size(box=box)
                    and sorted(outObj.get_date12_list(dropIfgram=False))
                    == sorted(inObj.get_date12_list())):
                print((
                    'All date12   exists in file {} with same size as required,'
                    ' no need to re-load.'.format(outFile)))
                updateFile = False

        elif inObj.name == 'geometry':
            outObj = geometry(outFile)
            outObj.open(print_msg=False)
            if (outObj.get_size() == inObj.get_size(box=box)
                    and all(i in outObj.datasetNames
                            for i in inObj.get_dataset_list())):
                print((
                    'All datasets exists in file{} with same size as required,'
                    ' no need to re-load.'.format(outFile)))
                updateFile = False
    return updateFile
Esempio n. 3
0
def save_date2txt_file(inps):
    inps.refDateFile = 'reference_date.txt'
    if ut.update_file(
            inps.refDateFile,
        [inps.timeseries_file, inps.mask_file, inps.template_file],
            check_readable=False):
        f = open(inps.refDateFile, 'w')
        f.write(inps.refDate + '\n')
        f.close()
        print('save date to file: ' + inps.refDateFile)

    inps.exDateFile = 'exclude_date.txt'
    if inps.exIdxList and ut.update_file(
            inps.exDateFile,
        [inps.timeseries_file, inps.mask_file, inps.template_file],
            check_readable=False):
        f = open(inps.exDateFile, 'w')
        for i in inps.exIdxList:
            f.write(inps.dateList[i] + '\n')
        f.close()
        print('save date(s) to file: ' + inps.exDateFile)
    else:
        print('None.')
    return inps
Esempio n. 4
0
def run_resample(inps):
    """resample all input files"""
    start_time = time.time()

    # Prepare geometry for geocoding
    res_obj = resample(lookupFile=inps.lookupFile, dataFile=inps.file[0],
                       SNWE=inps.SNWE, laloStep=inps.laloStep)
    res_obj.get_geometry_definition()

    inps.nprocs = multiprocessing.cpu_count()

    # resample input files one by one
    for infile in inps.file:
        print('-' * 50+'\nresampling file: {}'.format(infile))
        outfile = auto_output_filename(infile, inps)
        if inps.updateMode and not ut.update_file(outfile, [infile, inps.lookupFile]):
            print('update mode is ON, skip geocoding.')
            return outfile

        # read source data and resample
        dsNames = readfile.get_dataset_list(infile, datasetName=inps.dset)
        maxDigit = max([len(i) for i in dsNames])
        dsResDict = dict()
        for dsName in dsNames:
            print('resampling {d:<{w}} from {f} using {n} processor cores ...'.format(
                d=dsName, w=maxDigit, f=os.path.basename(infile), n=inps.nprocs))
            data = readfile.read(infile, datasetName=dsName, print_msg=False)[0]
            res_data = resample_data(data, inps, res_obj)
            dsResDict[dsName] = res_data

        # update metadata
        atr = readfile.read_attribute(infile, datasetName=inps.dset)
        if inps.radar2geo:
            atr = metadata_radar2geo(atr, res_obj)
        else:
            atr = metadata_geo2radar(atr, res_obj)
        if len(dsNames) == 1 and dsName not in ['timeseries']:
            atr['FILE_TYPE'] = dsNames[0]
            infile = None

        writefile.write(dsResDict, out_file=outfile, metadata=atr, ref_file=infile)

    m, s = divmod(time.time()-start_time, 60)
    print('\ntime used: {:02.0f} mins {:02.1f} secs\nDone.'.format(m, s))
    return outfile
Esempio n. 5
0
def copy_aux_file(inps):
    # for Univ of Miami
    fileList = [
        'PROCESS/unavco_attributes.txt', 'PROCESS/bl_list.txt',
        'SLC/summary*slc.jpg'
    ]
    try:
        projectDir = os.path.join(os.getenv('SCRATCHDIR'), inps.projectName)
        fileList = ut.get_file_list(
            [os.path.join(projectDir, i) for i in fileList], abspath=True)
        for file in fileList:
            if ut.update_file(os.path.basename(file),
                              file,
                              check_readable=False):
                shutil.copy2(file, inps.workDir)
                print('copy {} to work directory'.format(
                    os.path.basename(file)))
    except:
        pass
    return inps
Esempio n. 6
0
def ifgram_inversion(ifgram_file='ifgramStack.h5', inps=None):
    """Implementation of the SBAS algorithm.
    Parameters: ifgram_file : string,
                    HDF5 file name of the interferograms stck
                inps : namespace, including the following options:
    Returns:    timeseriesFile : string
                    HDF5 file name of the output timeseries
                tempCohFile : string
                    HDF5 file name of temporal coherence
    Example:
        inps = cmd_line_parse()
        ifgram_inversion('ifgramStack.h5', inps)
    """
    start_time = time.time()

    # Check Inputs
    if not inps:
        inps = cmd_line_parse()

    if inps.update_mode and not ut.update_file(inps.timeseriesFile,
                                               ifgram_file):
        return inps.timeseriesFile, inps.tempCohFile

    A = check_design_matrix(ifgram_file, weight_func=inps.weightFunc)
    num_date = A.shape[1] + 1

    # split ifgram_file into blocks to save memory
    box_list = split_into_boxes(ifgram_file, chunk_size=inps.chunk_size)
    num_box = len(box_list)

    if inps.split_file:
        # split ifgram_file into small files and write each of them
        print(
            '\n---------------------------- Splitting Input File -----------------------------'
        )
        ifgram_files = split_ifgram_file(ifgram_file,
                                         chunk_size=inps.chunk_size)
        num_file = len(ifgram_files)

        # Loop
        for fname in ifgram_files:
            if num_file > 1:
                print('\n------- Processing {} ({} in total) --------------'.
                      format(fname, num_file))
            ifgram_inversion_patch(fname,
                                   box=None,
                                   ref_phase=None,
                                   weight_func=inps.weightFunc,
                                   mask_dataset_name=inps.maskDataset,
                                   mask_threshold=inps.maskThreshold,
                                   water_mask_file=inps.waterMaskFile,
                                   skip_zero_phase=inps.skip_zero_phase)
    else:
        # read ifgram_file in small patches and write them together
        ref_phase = get_ifgram_reference_phase(ifgram_file,
                                               skip_reference=inps.skip_ref)

        # Initialization of output matrix
        stack_obj = ifgramStack(ifgram_file)
        stack_obj.open(print_msg=False)
        ts = np.zeros((num_date, stack_obj.length, stack_obj.width),
                      np.float32)
        temp_coh = np.zeros((stack_obj.length, stack_obj.width), np.float32)
        ts_std = np.zeros(ts.shape, np.float32)
        num_inv_ifgram = np.zeros(temp_coh.shape, np.int16)

        # Loop
        for i in range(num_box):
            box = box_list[i]
            if num_box > 1:
                print('\n------- Processing Patch {} out of {} --------------'.
                      format(i + 1, num_box))
            (tsi, temp_cohi, ts_stdi, ifg_numi) = ifgram_inversion_patch(
                ifgram_file,
                box=box,
                ref_phase=ref_phase,
                weight_func=inps.weightFunc,
                mask_dataset_name=inps.maskDataset,
                mask_threshold=inps.maskThreshold,
                water_mask_file=inps.waterMaskFile,
                skip_zero_phase=inps.skip_zero_phase)

            temp_coh[box[1]:box[3], box[0]:box[2]] = temp_cohi
            ts[:, box[1]:box[3], box[0]:box[2]] = tsi
            ts_std[:, box[1]:box[3], box[0]:box[2]] = ts_stdi
            num_inv_ifgram[box[1]:box[3], box[0]:box[2]] = ifg_numi

        # metadata
        metadata = dict(stack_obj.metadata)
        metadata[key_prefix + 'weightFunc'] = inps.weightFunc
        write2hdf5_file(ifgram_file,
                        metadata,
                        ts,
                        temp_coh,
                        ts_std,
                        num_inv_ifgram,
                        suffix='')

    m, s = divmod(time.time() - start_time, 60)
    print('\ntime used: {:02.0f} mins {:02.1f} secs\nDone.'.format(m, s))
    return
Esempio n. 7
0
def main(iargs=None):
    start_time = time.time()
    inps = cmd_line_parse(iargs)
    if inps.version:
        raise SystemExit(version.version_description)

    #########################################
    # Initiation
    #########################################
    print(version.logo)

    # Project Name
    inps.projectName = None
    if inps.templateFileCustom:
        inps.templateFileCustom = os.path.abspath(inps.templateFileCustom)
        inps.projectName = os.path.splitext(
            os.path.basename(inps.templateFileCustom))[0]
        print('Project name: ' + inps.projectName)

    # Work directory
    if not inps.workDir:
        if autoPath and 'SCRATCHDIR' in os.environ and inps.projectName:
            inps.workDir = os.path.join(os.getenv('SCRATCHDIR'),
                                        inps.projectName, 'PYSAR')
        else:
            inps.workDir = os.getcwd()
    inps.workDir = os.path.abspath(inps.workDir)
    if not os.path.isdir(inps.workDir):
        os.makedirs(inps.workDir)
    os.chdir(inps.workDir)
    print("Go to work directory: " + inps.workDir)

    copy_aux_file(inps)

    inps, template, templateCustom = read_template(inps)

    #########################################
    # Loading Data
    #########################################
    print('\n**********  Load Data  **********')
    loadCmd = 'load_data.py --template {}'.format(inps.templateFile)
    if inps.projectName:
        loadCmd += ' --project {}'.format(inps.projectName)
    print(loadCmd)
    status = subprocess.Popen(loadCmd, shell=True).wait()
    os.chdir(inps.workDir)

    print('-' * 50)
    inps, atr = ut.check_loaded_dataset(inps.workDir, inps)

    # Add template options into HDF5 file metadata
    # if inps.templateFileCustom:
    #    atrCmd = 'add_attribute.py {} {}'.format(inps.stackFile, inps.templateFileCustom)
    #    print(atrCmd)
    #    status = subprocess.Popen(atrCmd, shell=True).wait()
    #ut.add_attribute(inps.stackFile, template)

    if inps.load_dataset:
        raise SystemExit('Exit as planned after loading/checking the dataset.')

    if inps.reset:
        print('Reset dataset attributtes for a fresh re-run.\n' + '-' * 50)
        # Reset reference pixel
        refPointCmd = 'reference_point.py {} --reset'.format(inps.stackFile)
        print(refPointCmd)
        status = subprocess.Popen(refPointCmd, shell=True).wait()
        # Reset network modification
        networkCmd = 'modify_network.py {} --reset'.format(inps.stackFile)
        print(networkCmd)
        status = subprocess.Popen(networkCmd, shell=True).wait()

    #########################################
    # Generating Aux files
    #########################################
    print('\n**********  Generate Auxiliary Files  **********')
    # Initial mask (pixels with valid unwrapPhase or connectComponent in ALL interferograms)
    inps.maskFile = 'mask.h5'
    if ut.update_file(inps.maskFile, inps.stackFile):
        maskCmd = 'generate_mask.py {} --nonzero -o {}'.format(
            inps.stackFile, inps.maskFile)
        print(maskCmd)
        status = subprocess.Popen(maskCmd, shell=True).wait()

    # Average spatial coherence
    inps.avgSpatialCohFile = 'avgSpatialCoherence.h5'
    if ut.update_file(inps.avgSpatialCohFile, inps.stackFile):
        avgCmd = 'temporal_average.py {} --dataset coherence -o {}'.format(
            inps.stackFile, inps.avgSpatialCohFile)
        print(avgCmd)
        status = subprocess.Popen(avgCmd, shell=True).wait()

    #########################################
    # Referencing Interferograms in Space
    #########################################
    print('\n**********  Select Reference Point  **********')
    refPointCmd = 'reference_point.py {} -t {} -c {}'.format(
        inps.stackFile, inps.templateFile, inps.avgSpatialCohFile)
    print(refPointCmd)
    status = subprocess.Popen(refPointCmd, shell=True).wait()
    if status is not 0:
        raise Exception('Error while finding reference pixel in space.\n')

    ############################################
    # Unwrapping Error Correction (Optional)
    #    based on the consistency of triplets
    #    of interferograms
    ############################################
    if template['pysar.unwrapError.method']:
        print('\n**********  Unwrapping Error Correction  **********')
        outName = '{}_unwCor.h5'.format(os.path.splitext(inps.stackFile)[0])
        unwCmd = 'unwrap_error.py {} --mask {} --template {}'.format(
            inps.stackFile, inps.maskFile, inps.templateFile)
        print(unwCmd)
        if ut.update_file(outName, inps.stackFile):
            print(
                'This might take a while depending on the size of your data set!'
            )
            status = subprocess.Popen(unwCmd, shell=True).wait()
            if status is not 0:
                raise Exception(
                    'Error while correcting phase unwrapping errors.\n')
        inps.stackFile = outName

    #########################################
    # Network Modification (Optional)
    #########################################
    print('\n**********  Modify Network  **********')
    networkCmd = 'modify_network.py {} -t {}'.format(inps.stackFile,
                                                     inps.templateFile)
    print(networkCmd)
    status = subprocess.Popen(networkCmd, shell=True).wait()
    if status is not 0:
        raise Exception(
            'Error while modifying the network of interferograms.\n')

    # Plot network colored in spatial coherence
    print('--------------------------------------------------')
    plotCmd = 'plot_network.py {} --template {} --nodisplay'.format(
        inps.stackFile, inps.templateFile)
    print(plotCmd)
    inps.cohSpatialAvgFile = '{}_coherence_spatialAverage.txt'.format(
        os.path.splitext(os.path.basename(inps.stackFile))[0])
    if ut.update_file(
            'Network.pdf',
            check_readable=False,
            inFile=[inps.stackFile, inps.cohSpatialAvgFile,
                    inps.templateFile]):
        status = subprocess.Popen(plotCmd, shell=True).wait()

    if inps.modify_network:
        raise SystemExit('Exit as planned after network modification.')

    #########################################
    # Inversion of Interferograms
    ########################################
    print(
        '\n**********  Invert Network of Interferograms into Time-series  **********'
    )
    invCmd = 'ifgram_inversion.py {} --template {}'.format(
        inps.stackFile, inps.templateFile)
    print(invCmd)
    inps.timeseriesFile = 'timeseries.h5'
    inps.tempCohFile = 'temporalCoherence.h5'
    if ut.update_file(inps.timeseriesFile, inps.stackFile):
        status = subprocess.Popen(invCmd, shell=True).wait()
        if status is not 0:
            raise Exception(
                'Error while inverting network interferograms into timeseries')

    print('\n--------------------------------------------')
    print('Update Mask based on Temporal Coherence ...')
    inps.maskFile = 'maskTempCoh.h5'
    inps.minTempCoh = template['pysar.networkInversion.minTempCoh']
    maskCmd = 'generate_mask.py {} -m {} -o {}'.format(inps.tempCohFile,
                                                       inps.minTempCoh,
                                                       inps.maskFile)
    print(maskCmd)
    if ut.update_file(inps.maskFile, inps.tempCohFile):
        status = subprocess.Popen(maskCmd, shell=True).wait()
        if status is not 0:
            raise Exception(
                'Error while generating mask file from temporal coherence.')

    if inps.invert_network:
        raise SystemExit('Exit as planned after network inversion.')

    # check number of pixels selected in mask file for following analysis
    min_num_pixel = float(template['pysar.networkInversion.minNumPixel'])
    msk = readfile.read(inps.maskFile)[0]
    num_pixel = np.sum(msk != 0.)
    print('number of pixels selected: {}'.format(num_pixel))
    if num_pixel < min_num_pixel:
        msg = "Not enought coherent pixels selected (minimum of {}). ".format(
            int(min_num_pixel))
        msg += "Try the following:\n"
        msg += "1) Check the reference pixel and make sure it's not in areas with unwrapping errors\n"
        msg += "2) Check the network and make sure it's fully connected without subsets"
        raise RuntimeError(msg)
    del msk

    ##############################################
    # LOD (Local Oscillator Drift) Correction
    #   for Envisat data in radar coord only
    ##############################################
    if atr['PLATFORM'].lower().startswith('env'):
        print(
            '\n**********  Local Oscillator Drift Correction for Envisat  **********'
        )
        outName = os.path.splitext(inps.timeseriesFile)[0] + '_LODcor.h5'
        lodCmd = 'local_oscilator_drift.py {} {} -o {}'.format(
            inps.timeseriesFile, inps.geomFile, outName)
        print(lodCmd)
        if ut.update_file(outName, [inps.timeseriesFile, inps.geomFile]):
            status = subprocess.Popen(lodCmd, shell=True).wait()
            if status is not 0:
                raise Exception(
                    'Error while correcting Local Oscillator Drift.\n')
        inps.timeseriesFile = outName

    ##############################################
    # Tropospheric Delay Correction (Optional)
    ##############################################
    print('\n**********  Tropospheric Delay Correction  **********')
    inps.tropPolyOrder = template['pysar.troposphericDelay.polyOrder']
    inps.tropModel = template['pysar.troposphericDelay.weatherModel']
    inps.tropMethod = template['pysar.troposphericDelay.method']
    try:
        fileList = [
            os.path.join(inps.workDir, 'INPUTS/{}.h5'.format(inps.tropModel))
        ]
        inps.tropFile = ut.get_file_list(fileList)[0]
    except:
        inps.tropFile = None

    if inps.tropMethod:
        # Check Conflict with base_trop_cor
        if template['pysar.deramp'] == 'base_trop_cor':
            msg = """
            Method Conflict: base_trop_cor is in conflict with {} option!
            base_trop_cor applies simultaneous ramp removal AND tropospheric correction.
            IGNORE base_trop_cor input and continue pysarApp.py.
            """
            warnings.warn(msg)
            template['pysar.deramp'] = False

        fbase = os.path.splitext(inps.timeseriesFile)[0]
        # Call scripts
        if inps.tropMethod == 'height_correlation':
            outName = '{}_tropHgt.h5'.format(fbase)
            print(
                'tropospheric delay correction with height-correlation approach'
            )
            tropCmd = ('tropcor_phase_elevation.py {t} -d {d} -p {p}'
                       ' -m {m} -o {o}').format(t=inps.timeseriesFile,
                                                d=inps.geomFile,
                                                p=inps.tropPolyOrder,
                                                m=inps.maskFile,
                                                o=outName)
            print(tropCmd)
            if ut.update_file(outName, inps.timeseriesFile):
                status = subprocess.Popen(tropCmd, shell=True).wait()
                if status is not 0:
                    raise Exception(
                        'Error while correcting tropospheric delay.\n')
            inps.timeseriesFile = outName

        elif inps.tropMethod == 'pyaps':
            inps.weatherDir = template['pysar.troposphericDelay.weatherDir']
            outName = '{}_{}.h5'.format(fbase, inps.tropModel)
            print(('Atmospheric correction using Weather Re-analysis dataset'
                   ' (PyAPS, Jolivet et al., 2011)'))
            print('Weather Re-analysis dataset: ' + inps.tropModel)
            tropCmd = ('tropcor_pyaps.py -f {t} --model {m} --dem {d}'
                       ' -i {i} -w {w}').format(t=inps.timeseriesFile,
                                                m=inps.tropModel,
                                                d=inps.geomFile,
                                                i=inps.geomFile,
                                                w=inps.weatherDir)
            print(tropCmd)
            if ut.update_file(outName, inps.timeseriesFile):
                if inps.tropFile:
                    tropCmd = 'diff.py {} {} -o {}'.format(
                        inps.timeseriesFile, inps.tropFile, outName)
                    print('--------------------------------------------')
                    print('Use existed tropospheric delay file: {}'.format(
                        inps.tropFile))
                    print(tropCmd)
                status = subprocess.Popen(tropCmd, shell=True).wait()
                if status is not 0:
                    print(
                        '\nError while correcting tropospheric delay, try the following:'
                    )
                    print('1) Check the installation of PyAPS')
                    print(
                        '   http://earthdef.caltech.edu/projects/pyaps/wiki/Main'
                    )
                    print('   Try in command line: python -c "import pyaps"')
                    print(
                        '2) Use other tropospheric correction method, height-correlation, for example'
                    )
                    print(
                        '3) or turn off the option by setting pysar.troposphericDelay.method = no.\n'
                    )
                    raise RuntimeError()
            inps.timeseriesFile = outName
        else:
            print('No atmospheric delay correction.')

    # Grab tropospheric delay file
    try:
        fileList = [
            os.path.join(inps.workDir, 'INPUTS/{}.h5'.format(inps.tropModel))
        ]
        inps.tropFile = ut.get_file_list(fileList)[0]
    except:
        inps.tropFile = None

    ##############################################
    # Topographic (DEM) Residuals Correction (Optional)
    ##############################################
    print(
        '\n**********  Topographic Residual (DEM error) Correction  **********'
    )
    outName = os.path.splitext(inps.timeseriesFile)[0] + '_demErr.h5'
    topoCmd = 'dem_error.py {} -g {} -t {} -o {}'.format(
        inps.timeseriesFile, inps.geomFile, inps.templateFile, outName)
    print(topoCmd)
    inps.timeseriesResFile = None
    if template['pysar.topographicResidual']:
        if ut.update_file(outName, inps.timeseriesFile):
            status = subprocess.Popen(topoCmd, shell=True).wait()
            if status is not 0:
                raise Exception(
                    'Error while correcting topographic phase residual.\n')
        inps.timeseriesFile = outName
        inps.timeseriesResFile = 'timeseriesResidual.h5'
    else:
        print('No correction for topographic residuals.')

    ##############################################
    # Timeseries Residual Standard Deviation
    ##############################################
    print('\n**********  Timeseries Residual Root Mean Square  **********')
    if inps.timeseriesResFile:
        rmsCmd = 'timeseries_rms.py {} -t {}'.format(inps.timeseriesResFile,
                                                     inps.templateFile)
        print(rmsCmd)
        status = subprocess.Popen(rmsCmd, shell=True).wait()
        if status is not 0:
            raise Exception(
                'Error while calculating RMS of time series phase residual.\n')
    else:
        print('No timeseries residual file found! Skip residual RMS analysis.')

    ##############################################
    # Reference in Time
    ##############################################
    print('\n**********  Select Reference Date  **********')
    if template['pysar.reference.date']:
        outName = '{}_refDate.h5'.format(
            os.path.splitext(inps.timeseriesFile)[0])
        refCmd = 'reference_date.py {} -t {} -o {}'.format(
            inps.timeseriesFile, inps.templateFile, outName)
        print(refCmd)
        if ut.update_file(outName, inps.timeseriesFile):
            status = subprocess.Popen(refCmd, shell=True).wait()
            if status is not 0:
                raise Exception('Error while changing reference date.\n')
        inps.timeseriesFile = outName
    else:
        print('No reference change in time.')

    ##############################################
    # Phase Ramp Correction (Optional)
    ##############################################
    print('\n**********  Remove Phase Ramp  **********')
    inps.derampMaskFile = template['pysar.deramp.maskFile']
    inps.derampMethod = template['pysar.deramp']
    if inps.derampMethod:
        print('Phase Ramp Removal method: {}'.format(inps.derampMethod))
        if inps.geocoded and inps.derampMethod in [
                'baseline_cor', 'base_trop_cor'
        ]:
            warnings.warn(
                ('dataset is in geo coordinates,'
                 ' can not apply {} method').format(inps.derampMethod))
            print('skip deramping and continue.')

        # Get executable command and output name
        derampCmd = None
        fbase = os.path.splitext(inps.timeseriesFile)[0]
        if inps.derampMethod in [
                'plane', 'quadratic', 'plane_range', 'quadratic_range',
                'plane_azimuth', 'quadratic_azimuth'
        ]:
            outName = '{}_{}.h5'.format(fbase, inps.derampMethod)
            derampCmd = 'remove_ramp.py {} -s {} -m {} -o {}'.format(
                inps.timeseriesFile, inps.derampMethod, inps.derampMaskFile,
                outName)

        elif inps.derampMethod == 'baseline_cor':
            outName = '{}_baselineCor.h5'.format(fbase)
            derampCmd = 'baseline_error.py {} {}'.format(
                inps.timeseriesFile, inps.maskFile)

        elif inps.derampMethod in [
                'base_trop_cor', 'basetropcor', 'baselinetropcor'
        ]:
            print('Joint estimation of Baseline error and tropospheric delay')
            print('\t[height-correlation approach]')
            outName = '{}_baseTropCor.h5'.format(fbase)
            derampCmd = ('baseline_trop.py {t} {d} {p}'
                         ' range_and_azimuth {m}').format(
                             t=inps.timeseriesFile,
                             d=inps.geomFile,
                             p=inps.tropPolyOrder,
                             m=inps.maskFile)
        else:
            warnings.warn('Unrecognized phase ramp method: {}'.format(
                template['pysar.deramp']))

        # Execute command
        if derampCmd:
            print(derampCmd)
            if ut.update_file(outName, inps.timeseriesFile):
                status = subprocess.Popen(derampCmd, shell=True).wait()
                if status is not 0:
                    raise Exception(
                        'Error while removing phase ramp for time-series.\n')
            inps.timeseriesFile = outName
    else:
        print('No phase ramp removal.')

    #############################################
    # Velocity and rmse maps
    #############################################
    print('\n**********  Estimate Velocity  **********')
    inps.velFile = 'velocity.h5'
    velCmd = 'timeseries2velocity.py {} -t {} -o {}'.format(
        inps.timeseriesFile, inps.templateFile, inps.velFile)
    print(velCmd)
    if ut.update_file(inps.velFile, [inps.timeseriesFile, inps.templateFile]):
        status = subprocess.Popen(velCmd, shell=True).wait()
        if status is not 0:
            raise Exception(
                'Error while estimating linear velocity from time-series.\n')

    # Velocity from Tropospheric delay
    if inps.tropFile:
        suffix = os.path.splitext(os.path.basename(inps.tropFile))[0].title()
        inps.tropVelFile = '{}{}.h5'.format(
            os.path.splitext(inps.velFile)[0], suffix)
        velCmd = 'timeseries2velocity.py {} -t {} -o {}'.format(
            inps.tropFile, inps.templateFile, inps.tropVelFile)
        print(velCmd)
        if ut.update_file(inps.tropVelFile,
                          [inps.tropFile, inps.templateFile]):
            status = subprocess.Popen(velCmd, shell=True).wait()

    ############################################
    # Post-processing
    # Geocodeing --> Masking --> KMZ & HDF-EOS5
    ############################################
    print('\n**********  Post-processing  **********')
    if template['pysar.save.hdfEos5'] is True and template[
            'pysar.geocode'] is False:
        print('Turn ON pysar.geocode to be able to save to HDF-EOS5 format.')
        template['pysar.geocode'] = True

    # Geocoding
    if not inps.geocoded:
        if template['pysar.geocode'] is True:
            print('\n--------------------------------------------')
            geo_dir = os.path.abspath('./GEOCODE')
            if not os.path.isdir(geo_dir):
                os.makedirs(geo_dir)
                print('create directory: {}'.format(geo_dir))
            geoCmd = ('geocode.py {v} {c} {t} {g} -l {l} -t {e}'
                      ' --outdir {d} --update').format(v=inps.velFile,
                                                       c=inps.tempCohFile,
                                                       t=inps.timeseriesFile,
                                                       g=inps.geomFile,
                                                       l=inps.lookupFile,
                                                       e=inps.templateFile,
                                                       d=geo_dir)
            print(geoCmd)
            status = subprocess.Popen(geoCmd, shell=True).wait()
            if status is not 0:
                raise Exception('Error while geocoding.\n')
            else:
                inps.velFile = os.path.join(
                    geo_dir, 'geo_' + os.path.basename(inps.velFile))
                inps.tempCohFile = os.path.join(
                    geo_dir, 'geo_' + os.path.basename(inps.tempCohFile))
                inps.timeseriesFile = os.path.join(
                    geo_dir, 'geo_' + os.path.basename(inps.timeseriesFile))
                inps.geomFile = os.path.join(
                    geo_dir, 'geo_' + os.path.basename(inps.geomFile))
                inps.geocoded = True

            # generate mask based on geocoded temporal coherence
            print('\n--------------------------------------------')
            outName = os.path.join(geo_dir, 'geo_maskTempCoh.h5')
            genCmd = 'generate_mask.py {} -m {} -o {}'.format(
                inps.tempCohFile, inps.minTempCoh, outName)
            print(genCmd)
            if ut.update_file(outName, inps.tempCohFile):
                status = subprocess.Popen(genCmd, shell=True).wait()
            inps.maskFile = outName

    # mask velocity file
    if inps.velFile and inps.maskFile:
        outName = '{}_masked.h5'.format(os.path.splitext(inps.velFile)[0])
        maskCmd = 'mask.py {} -m {} -o {}'.format(inps.velFile, inps.maskFile,
                                                  outName)
        print(maskCmd)
        if ut.update_file(outName, [inps.velFile, inps.maskFile]):
            status = subprocess.Popen(maskCmd, shell=True).wait()
        try:
            inps.velFile = glob.glob(outName)[0]
        except:
            inps.velFile = None

    # Save to Google Earth KML file
    if inps.geocoded and inps.velFile and template['pysar.save.kml'] is True:
        print('\n--------------------------------------------')
        print('creating Google Earth KMZ file for geocoded velocity file: ...')
        outName = '{}.kmz'.format(
            os.path.splitext(os.path.basename(inps.velFile))[0])
        kmlCmd = 'save_kml.py {} -o {}'.format(inps.velFile, outName)
        print(kmlCmd)
        if ut.update_file(outName, inps.velFile, check_readable=False):
            status = subprocess.Popen(kmlCmd, shell=True).wait()
            if status is not 0:
                raise Exception(
                    'Error while generating Google Earth KMZ file.')

    #############################################
    # Save Timeseries to HDF-EOS5 format
    #############################################
    if template['pysar.save.hdfEos5'] is True:
        print('\n**********  Save Time-series in HDF-EOS5 Format  **********')
        if not inps.geocoded:
            warnings.warn(
                'Dataset is in radar coordinates, skip saving to HDF-EOS5 format.'
            )
        else:
            # Add attributes from custom template to timeseries file
            if templateCustom is not None:
                ut.add_attribute(inps.timeseriesFile, templateCustom)

            # Save to HDF-EOS5 format
            print('--------------------------------------------')
            hdfeos5Cmd = ('save_hdfeos5.py {t} -c {c} -m {m} -g {g}'
                          ' -t {e}').format(t=inps.timeseriesFile,
                                            c=inps.tempCohFile,
                                            m=inps.maskFile,
                                            g=inps.geomFile,
                                            e=inps.templateFile)
            print(hdfeos5Cmd)
            SAT = hdfeos5.get_mission_name(atr)
            try:
                inps.hdfeos5File = ut.get_file_list('{}_*.he5'.format(SAT))[0]
            except:
                inps.hdfeos5File = None
            if ut.update_file(inps.hdfeos5File, [
                    inps.timeseriesFile, inps.tempCohFile, inps.maskFile,
                    inps.geomFile
            ]):
                status = subprocess.Popen(hdfeos5Cmd, shell=True).wait()
                if status is not 0:
                    raise Exception(
                        'Error while generating HDF-EOS5 time-series file.\n')

    #############################################
    # Plot Figures
    #############################################
    inps.plotShellFile = os.path.join(os.path.dirname(__file__),
                                      '../sh/plot_pysarApp.sh')
    plotCmd = './' + os.path.basename(inps.plotShellFile)
    inps.plot = template['pysar.plot']
    if inps.plot is True:
        print('\n**********  Plot Results / Save to PIC  **********')
        # Copy to workding directory if not existed yet.
        if not os.path.isfile(plotCmd):
            print('copy {} to work directory: {}'.format(
                inps.plotShellFile, inps.workDir))
            shutil.copy2(inps.plotShellFile, inps.workDir)

    if inps.plot and os.path.isfile(plotCmd):
        print(plotCmd)
        status = subprocess.Popen(plotCmd, shell=True).wait()
        print('\n' + '-' * 50)
        print('For better figures:')
        print(
            '  1) Edit parameters in plot_pysarApp.sh and re-run this script.')
        print(
            '  2) Play with view.py, tsview.py and save_kml.py for more advanced/customized figures.'
        )
        if status is not 0:
            raise Exception(
                'Error while plotting data files using {}'.format(plotCmd))

    #############################################
    # Time                                      #
    #############################################
    m, s = divmod(time.time() - start_time, 60)
    print('\ntime used: {:02.0f} mins {:02.1f} secs'.format(m, s))
    print('\n###############################################')
    print('End of PySAR processing!')
    print('################################################\n')
Esempio n. 8
0
def read_template(inps):
    print('\n**********  Read Template File  **********')
    # default template
    inps.templateFile = os.path.join(inps.workDir, 'pysarApp_template.txt')
    if not os.path.isfile(inps.templateFile):
        print('generate default template file: ' + inps.templateFile)
        with open(inps.templateFile, 'w') as f:
            f.write(TEMPLATE)
    else:
        inps.templateFile = check_obsolete_default_template(inps.templateFile)

    # custom template
    templateCustom = None
    if inps.templateFileCustom:
        # Copy custom template file to work directory
        if ut.update_file(os.path.basename(inps.templateFileCustom),
                          inps.templateFileCustom,
                          check_readable=False):
            shutil.copy2(inps.templateFileCustom, inps.workDir)
            print('copy {} to work directory'.format(
                os.path.basename(inps.templateFileCustom)))

        # Read custom template
        print('read custom template file: ' + inps.templateFileCustom)
        templateCustom = readfile.read_template(inps.templateFileCustom)
        # correct some loose type errors
        for key in templateCustom.keys():
            if templateCustom[key].lower() in ['default']:
                templateCustom[key] = 'auto'
            elif templateCustom[key].lower() in ['n', 'off', 'false']:
                templateCustom[key] = 'no'
            elif templateCustom[key].lower() in ['y', 'on', 'true']:
                templateCustom[key] = 'yes'
        for key in ['pysar.deramp', 'pysar.troposphericDelay.method']:
            if key in templateCustom.keys():
                templateCustom[key] = templateCustom[key].lower().replace(
                    '-', '_')
        if 'processor' in templateCustom.keys():
            templateCustom['pysar.load.processor'] = templateCustom[
                'processor']

        # Update default template with custom input template
        print('update default template based on input custom template')
        inps.templateFile = ut.update_template_file(inps.templateFile,
                                                    templateCustom)

    if inps.generate_template:
        raise SystemExit('Exit as planned after template file generation.')

    print('read default template file: ' + inps.templateFile)
    template = readfile.read_template(inps.templateFile)
    template = ut.check_template_auto_value(template)

    # Get existing files name: unavco_attributes.txt
    try:
        inps.unavcoMetadataFile = ut.get_file_list('unavco_attribute*txt',
                                                   abspath=True)[0]
    except:
        inps.unavcoMetadataFile = None
        print('No UNAVCO attributes file found.')

    return inps, template, templateCustom