Example #1
0
def main(argv):
    inps = cmdLineParse()
    inps.outfile = os.path.abspath(inps.outfile)
    atr = readfile.read_attribute(inps.file[0])
    k = atr['FILE_TYPE']

    if not inps.sensor:
        inps.sensor = get_mission_name(atr)
    print 'Sensor name: %s' % (inps.sensor)

    m_date_list = []
    s_date_list = []
    bperp_list = []

    inps.file = ut.get_file_list(inps.file, abspath=True)
    if os.path.splitext(inps.file[0])[1] not in ['.h5', '.he5']:
        ifgramNum = len(inps.file)
        print 'Number of interferograms: %d' % (ifgramNum)
        for fname in inps.file:
            try:
                date12 = str(
                    re.findall('\d{8}[-_]\d{8}',
                               os.path.basename(fname))[0]).replace('_', '-')
            except:
                date12 = str(
                    re.findall('\d{6}[-_]\d{6}',
                               os.path.basename(fname))[0]).replace('_', '-')
            m_date, s_date = date12.split('-')
            bperp = readfile.read_attribute(fname)['P_BASELINE_TOP_HDR']
            m_date_list.append(m_date)
            s_date_list.append(s_date)
            bperp_list.append(bperp)

    else:
        h5 = h5py.File(inps.file[0], 'r')
        ifgram_list = ut.check_drop_ifgram(h5)
        date12_list = ptime.list_ifgram2date12(ifgram_list)
        m_date_list = [date12.split('-')[0] for date12 in date12_list]
        s_date_list = [date12.split('-')[1] for date12 in date12_list]
        for ifgram in ifgram_list:
            bperp = h5[k][ifgram].attrs['P_BASELINE_TOP_HDR']
            bperp_list.append(bperp)
        ifgramNum = len(ifgram_list)

    fout = '{0} {1}     {2:<15}   {3}\n'
    fl = open(inps.outfile, 'w')
    for i in range(ifgramNum):
        fl.write(
            fout.format(m_date_list[i], s_date_list[i], bperp_list[i],
                        inps.sensor))
    fl.close()
    print 'write to %s' % (inps.outfile)
    return
Example #2
0
def temporal_coherence(timeseriesFile, ifgramFile):
    '''Calculate temporal coherence based on input timeseries file and interferograms file
    Inputs:
        timeseriesFile - string, path of time series file
        ifgramFile     - string, path of interferograms file
    Output:
        temp_coh - 2D np.array, temporal coherence in float32
    '''

    # Basic Info
    atr_ts = readfile.read_attribute(timeseriesFile)
    length = int(atr_ts['FILE_LENGTH'])
    width = int(atr_ts['WIDTH'])
    pixel_num = length * width

    # Read time series data
    h5timeseries = h5py.File(timeseriesFile, 'r')
    date_list = sorted(h5timeseries['timeseries'].keys())
    date_num = len(date_list)

    print "load time series: " + timeseriesFile
    print 'number of acquisitions: ' + str(date_num)
    timeseries = np.zeros((date_num, pixel_num), np.float32)
    prog_bar = ptime.progress_bar(maxValue=date_num, prefix='loading: ')
    for i in range(date_num):
        date = date_list[i]
        d = h5timeseries['timeseries'].get(date)[:]
        timeseries[i][:] = d.flatten(0)
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    h5timeseries.close()

    # Convert displacement from meter to radian
    range2phase = -4 * np.pi / float(atr_ts['WAVELENGTH'])
    timeseries *= range2phase

    # interferograms data
    print "interferograms file: " + ifgramFile
    atr_ifgram = readfile.read_attribute(ifgramFile)
    h5ifgram = h5py.File(ifgramFile, 'r')
    ifgram_list = sorted(h5ifgram['interferograms'].keys())
    ifgram_list = ut.check_drop_ifgram(h5ifgram, atr_ifgram, ifgram_list)
    ifgram_num = len(ifgram_list)

    # Design matrix
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    A1, B = ut.design_matrix(ifgramFile, date12_list)
    A0 = -1 * np.ones([ifgram_num, 1])
    A = np.hstack((A0, A1))

    # Get reference pixel
    try:
        ref_x = int(atr_ts['ref_x'])
        ref_y = int(atr_ts['ref_y'])
        print 'find reference pixel in y/x: [%d, %d]' % (ref_y, ref_x)
    except ValueError:
        print 'No ref_x/y found! Can not calculate temporal coherence without it.'

    print 'calculating temporal coherence interferogram by interferogram ...'
    print 'number of interferograms: ' + str(ifgram_num)
    temp_coh = np.zeros(pixel_num, dtype=np.float32) + 0j
    prog_bar = ptime.progress_bar(maxValue=ifgram_num, prefix='calculating: ')
    for i in range(ifgram_num):
        ifgram = ifgram_list[i]
        # read interferogram
        data = h5ifgram['interferograms'][ifgram].get(ifgram)[:]
        data -= data[ref_y, ref_x]
        data = data.flatten(0)

        # calculate difference between observed and estimated data
        dataEst = np.dot(A[i, :], timeseries)
        dataDiff = data - dataEst
        temp_coh += np.exp(1j * dataDiff)
        prog_bar.update(i + 1, suffix=date12_list[i])
    prog_bar.close()
    del timeseries, data, dataEst, dataDiff
    h5ifgram.close()

    temp_coh = np.array((np.absolute(temp_coh) / ifgram_num).reshape(
        (length, width)),
                        dtype=np.float32)
    return temp_coh
Example #3
0
def main(argv):
    inps = cmdLineParse()
    if not inps.disp_fig:
        plt.switch_backend('Agg')
    if inps.template_file:
        inps = read_template2inps(inps.template_file, inps)

    ##### 1. Read Info
    # Read dateList and bperpList
    ext = os.path.splitext(inps.file)[1]
    if ext in ['.h5']:
        atr = readfile.read_attribute(inps.file)
        k = atr['FILE_TYPE']
        print 'reading date and perpendicular baseline from ' + k + ' file: ' + os.path.basename(
            inps.file)
        if not k in multi_group_hdf5_file:
            raise ValueError('only the following file type are supported:\n' +
                             str(multi_group_hdf5_file))
        if not inps.coherence_file and k == 'coherence':
            inps.coherence_file = inps.file
        pbase_list = ut.perp_baseline_ifgram2timeseries(inps.file)[0]
        date8_list = ptime.ifgram_date_list(inps.file)
    else:
        print 'reading date and perpendicular baseline from baseline list file: ' + inps.bl_list_file
        date8_list, pbase_list = pnet.read_baseline_file(
            inps.bl_list_file)[0:2]
    print 'number of acquisitions  : ' + str(len(date8_list))

    # Read Pairs Info
    print 'reading pairs info from file: ' + inps.file
    date12_list = pnet.get_date12_list(inps.file)
    print 'number of interferograms: ' + str(len(date12_list))

    # Read drop_ifgram
    date8_list_drop = []
    date12_list_drop = []
    if ext in ['.h5', '.he5']:
        h5 = h5py.File(inps.file, 'r')
        ifgram_list_all = sorted(h5[k].keys())
        ifgram_list_keep = ut.check_drop_ifgram(h5)
        date12_list_keep = ptime.list_ifgram2date12(ifgram_list_keep)
        # Get date12_list_drop
        date12_list_drop = sorted(
            list(set(date12_list) - set(date12_list_keep)))
        print 'number of interferograms marked as dropped: ' + str(
            len(date12_list_drop))
        print 'number of interferograms marked as kept   : ' + str(
            len(date12_list_keep))

        # Get date_list_drop
        m_dates = [i.split('-')[0] for i in date12_list_keep]
        s_dates = [i.split('-')[1] for i in date12_list_keep]
        date8_list_keep = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
        date8_list_drop = sorted(list(set(date8_list) - set(date8_list_keep)))
        print 'number of acquisitions marked as dropped: ' + str(
            len(date8_list_drop))

    # Read Coherence List
    inps.coherence_list = None
    if inps.coherence_file and os.path.isfile(inps.coherence_file):
        if inps.mask_file and not os.path.isfile(inps.mask_file):
            inps.mask_file = None
        inps.coherence_list, inps.coh_date12_list = ut.spatial_average(inps.coherence_file, inps.mask_file, \
                                                                       saveList=True, checkAoi=False)

        if all(np.isnan(inps.coherence_list)):
            print 'WARNING: all coherence value are nan! Do not use this and continue.'
            inps.coherence_list = None

        # Check subset of date12 info between input file and coherence file
        if not set(inps.coh_date12_list) >= set(date12_list):
            print 'WARNING: not every pair/date12 from input file is in coherence file'
            print 'turn off the color plotting of interferograms based on coherence'
            inps.coherence_list = None
        elif set(inps.coh_date12_list) > set(date12_list):
            print 'extract coherence value for all pair/date12 in input file'
            inps.coherence_list = [
                inps.coherence_list[inps.coh_date12_list.index(i)]
                for i in date12_list
            ]

    #inps.coh_thres = 0.7
    ##### 2. Plot
    inps.cbar_label = 'Average spatial coherence'

    # Fig 1 - Baseline History
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax = pnet.plot_perp_baseline_hist(ax, date8_list, pbase_list, vars(inps),
                                      date8_list_drop)

    figName = 'BperpHistory' + inps.fig_ext
    if inps.save_fig:
        fig.savefig(figName, bbox_inches='tight')
        print 'save figure to ' + figName

    # Fig 2 - Coherence Matrix
    if inps.coherence_list:
        figName = 'CoherenceMatrix' + inps.fig_ext
        if inps.fig_size:
            fig = plt.figure(figsize=inps.fig_size)
        else:
            fig = plt.figure()
        ax = fig.add_subplot(111)
        ax = pnet.plot_coherence_matrix(ax, date12_list, inps.coherence_list,\
                                        date12_list_drop, plot_dict=vars(inps))

        if inps.save_fig:
            fig.savefig(figName, bbox_inches='tight', dpi=150)
            print 'save figure to ' + figName

    # Fig 3 - Min/Max Coherence History
    if inps.coherence_list:
        figName = 'CoherenceHistory' + inps.fig_ext
        fig = plt.figure()
        ax = fig.add_subplot(111)
        ax = pnet.plot_coherence_history(ax, date12_list, inps.coherence_list)

        if inps.save_fig:
            fig.savefig(figName, bbox_inches='tight')
            print 'save figure to ' + figName

    # Fig 4 - Interferogram Network
    if inps.fig_size:
        fig = plt.figure(figsize=inps.fig_size)
    else:
        fig = plt.figure()
    ax = fig.add_subplot(111)
    ax = pnet.plot_network(ax, date12_list, date8_list, pbase_list, vars(inps),
                           date12_list_drop)

    figName = 'Network' + inps.fig_ext
    if inps.save_fig:
        fig.savefig(figName, bbox_inches='tight')
        print 'save figure to ' + figName

    if inps.save_list:
        txtFile = os.path.splitext(inps.file)[0] + '_date12_list.txt'
        np.savetxt(txtFile, date12_list, fmt='%s')
        print 'save pairs/date12 info to file: ' + txtFile

    if inps.disp_fig:
        plt.show()
Example #4
0
def main(argv):
    inps = cmdLineParse()
    if not inps.disp_fig:
        plt.switch_backend('Agg')
    #print '\n******************** Plot Network **********************'

    ##### 1. Read Info
    # Read dateList and bperpList
    ext = os.path.splitext(inps.file)[1]
    if ext in ['.h5']:
        atr = readfile.read_attribute(inps.file)
        k = atr['FILE_TYPE']
        print 'reading date and perpendicular baseline from '+k+' file: '+os.path.basename(inps.file)
        if not k in multi_group_hdf5_file:
            raise ValueError('only the following file type are supported:\n'+str(multi_group_hdf5_file))
        pbase_list = ut.perp_baseline_ifgram2timeseries(inps.file)[0]
        date8_list = ptime.ifgram_date_list(inps.file)
    else:
        print 'reading date and perpendicular baseline from baseline list file: '+inps.bl_list_file
        date8_list, pbase_list = pnet.read_baseline_file(inps.bl_list_file)[0:2]
    print 'number of acquisitions  : '+str(len(date8_list))

    # Read Pairs Info
    print 'reading pairs info from file: '+inps.file
    date12_list = pnet.get_date12_list(inps.file)
    print 'number of interferograms: '+str(len(date12_list))

    # Read drop_ifgram 
    date8_list_drop = []
    date12_list_drop = []
    if ext in ['.h5','.he5']:
        h5 = h5py.File(inps.file, 'r')
        ifgram_list_all = sorted(h5[k].keys())
        ifgram_list_keep = ut.check_drop_ifgram(h5, atr, ifgram_list_all)
        date12_list_keep = ptime.list_ifgram2date12(ifgram_list_keep)
        # Get date12_list_drop
        date12_list_drop = sorted(list(set(date12_list) - set(date12_list_keep)))
        print 'number of interferograms marked as dropped: '+str(len(date12_list_drop))

        # Get date_list_drop
        m_dates = [i.split('-')[0] for i in date12_list_keep]
        s_dates = [i.split('-')[1] for i in date12_list_keep]
        date8_list_keep = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
        date8_list_drop = sorted(list(set(date8_list) - set(date8_list_keep)))
        print 'number of acquisitions marked as dropped: '+str(len(date8_list_drop))

    # Read Coherence List
    inps.coherence_list = None
    if inps.coherence_file and os.path.isfile(inps.coherence_file):
        ext = os.path.splitext(inps.coherence_file)[1]
        if ext in ['.h5']:
            listFile = os.path.splitext(inps.coherence_file)[0]+'_spatialAverage.txt'
            if os.path.isfile(listFile):
                print 'reading coherence value from existed '+listFile
                fcoh = np.loadtxt(listFile, dtype=str)
                inps.coherence_list  = [float(i) for i in fcoh[:,1]]
                inps.coh_date12_list = [i        for i in fcoh[:,0]]
            else:
                print 'calculating average coherence value from '+inps.coherence_file
                if inps.mask_file:
                    mask = readfile.read(inps.mask_file)[0]
                else:
                    mask = None
                inps.coherence_list  = ut.spatial_average(inps.coherence_file, mask, saveList=True)
                inps.coh_date12_list = pnet.get_date12_list(inps.coherence_file)
        else:
            print 'reading coherence value from '+inps.coherence_file
            fcoh = np.loadtxt(inps.coherence_file, dtype=str)
            inps.coherence_list  = [float(i) for i in fcoh[:,1]]
            inps.coh_date12_list = [i        for i in fcoh[:,0]]

        # Check length of coherence file and input file
        if not set(inps.coh_date12_list) == set(date12_list):
            print 'WARNING: input coherence list has different pairs/date12 from input file'
            print 'turn off the color plotting of interferograms based on coherence'
            inps.coherence_list = None

    #inps.coh_thres = 0.7
    ##### 2. Plot
    # Fig 1 - Baseline History
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax = pnet.plot_perp_baseline_hist(ax, date8_list, pbase_list, vars(inps), date8_list_drop)

    figName = 'BperpHistory'+inps.fig_ext
    if inps.save_fig:
        fig.savefig(figName,bbox_inches='tight')
        print 'save figure to '+figName

    # Fig 2 - Coherence Matrix
    if inps.coherence_list:
        figName = 'CoherenceMatrix'+inps.fig_ext
        if inps.fig_size:
            fig = plt.figure(figsize=inps.fig_size)
        else:
            fig = plt.figure()
        ax = fig.add_subplot(111)
        ax = pnet.plot_coherence_matrix(ax, date12_list, inps.coherence_list)

        if inps.save_fig:
            fig.savefig(figName, bbox_inches='tight')
            print 'save figure to '+figName

    # Fig 3 - Min/Max Coherence History
    if inps.coherence_list:
        figName = 'CoherenceHistory'+inps.fig_ext
        fig = plt.figure()
        ax = fig.add_subplot(111)
        ax = pnet.plot_coherence_history(ax, date12_list, inps.coherence_list)

        if inps.save_fig:
            fig.savefig(figName, bbox_inches='tight')
            print 'save figure to '+figName

    # Fig 4 - Interferogram Network
    if inps.fig_size:
        fig = plt.figure(figsize=inps.fig_size)
    else:
        fig = plt.figure()
    ax = fig.add_subplot(111)
    ax = pnet.plot_network(ax, date12_list, date8_list, pbase_list, vars(inps), date12_list_drop)

    figName = 'Network'+inps.fig_ext
    if inps.save_fig:
        fig.savefig(figName,bbox_inches='tight')
        print 'save figure to '+figName

    if inps.save_list:
        txtFile = os.path.splitext(inps.file)[0]+'_date12_list.txt'
        np.savetxt(txtFile, date12_list, fmt='%s')
        print 'save pairs/date12 info to file: '+txtFile


    if inps.disp_fig:
        plt.show() 
Example #5
0
def main(argv):
    ##### Read Inputs
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    date12_orig = pnet.get_date12_list(inps.file[0])
    print 'input file(s) to be modified: ' + str(inps.file)
    print 'number of interferograms: ' + str(len(date12_orig))
    atr = readfile.read_attribute(inps.file[0])

    # Update inps if template is input
    if inps.template_file:
        inps = read_template2inps(inps.template_file, inps)

    if all(not i for i in [inps.reference_file, inps.max_temp_baseline, inps.max_perp_baseline,\
                           inps.exclude_ifg_index, inps.exclude_date, inps.coherence_based,\
                           inps.start_date, inps.end_date, inps.reset]):
        # Display network for manually modification when there is no other modification input.
        print 'No input option found to remove interferogram'
        if inps.template_file:
            print 'Keep all interferograms by enable --reset option'
            inps.reset = True
        else:
            print 'To manually modify network, please use --manual option '
            return

    if inps.reset:
        print '----------------------------------------------------------------------------'
        for file in inps.file:
            reset_pairs(file)
        mean_coh_txt_file = os.path.splitext(
            inps.coherence_file)[0] + '_spatialAverage.txt'
        if os.path.isfile(mean_coh_txt_file):
            rmCmd = 'rm ' + mean_coh_txt_file
            print rmCmd
            os.system(rmCmd)
        return

    # Convert index : input to continous index list
    if inps.exclude_ifg_index:
        ifg_index = list(inps.exclude_ifg_index)
        inps.exclude_ifg_index = []
        for index in ifg_index:
            index_temp = [int(i) for i in index.split(':')]
            index_temp.sort()
            if len(index_temp) == 2:
                for j in range(index_temp[0], index_temp[1] + 1):
                    inps.exclude_ifg_index.append(j)
            elif len(index_temp) == 1:
                inps.exclude_ifg_index.append(int(index))
            else:
                print 'Unrecoganized input: ' + index
        inps.exclude_ifg_index = sorted(inps.exclude_ifg_index)
        if max(inps.exclude_ifg_index) > len(date12_orig):
            raise Exception('Input index out of range!\n'+\
                            'input index:'+str(inps.exclude_ifg_index)+'\n'+\
                            'index range of file: '+str(len(date12_orig)))

    ##### Get date12_to_rmv
    date12_to_rmv = []

    # 1. Update date12_to_rmv from reference file
    if inps.reference_file:
        date12_to_keep = pnet.get_date12_list(inps.reference_file,
                                              check_drop_ifgram=True)
        print '----------------------------------------------------------------------------'
        print 'use reference pairs info from file: ' + inps.reference_file
        print 'number of interferograms in reference: ' + str(
            len(date12_to_keep))
        print 'date12 not in reference file:'
        date12_to_rmv_temp = []
        for date12 in date12_orig:
            if date12 not in date12_to_keep:
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.1 Update date12_to_rmv from coherence file
    if inps.coherence_based and os.path.isfile(inps.coherence_file):
        print '----------------------------------------------------------------------------'
        print 'use coherence-based network modification from coherence file: ' + inps.coherence_file
        # check mask AOI in lalo
        if inps.aoi_geo_box and inps.lookup_file:
            print 'input AOI in (lon0, lat1, lon1, lat0): ' + str(
                inps.aoi_geo_box)
            inps.aoi_pix_box = subset.bbox_geo2radar(inps.aoi_geo_box, atr,
                                                     inps.lookup_file)
        if inps.aoi_pix_box:
            # check mask AOI within the data coverage
            inps.aoi_pix_box = subset.check_box_within_data_coverage(
                inps.aoi_pix_box, atr)
            print 'input AOI in (x0,y0,x1,y1): ' + str(inps.aoi_pix_box)

        # Calculate spatial average coherence
        coh_list, coh_date12_list = ut.spatial_average(inps.coherence_file, inps.mask_file,\
                                                           inps.aoi_pix_box, saveList=True)

        # MST network
        if inps.keep_mst:
            print 'Get minimum spanning tree (MST) of interferograms with inverse of coherence.'
            print 'date12 with 1) average coherence < ' + str(
                inps.min_coherence) + ' AND 2) not in MST network: '
            mst_date12_list = pnet.threshold_coherence_based_mst(
                coh_date12_list, coh_list)
        else:
            print 'date12 with average coherence < ' + str(inps.min_coherence)
            mst_date12_list = []

        date12_to_rmv_temp = []
        for i in range(len(coh_date12_list)):
            date12 = coh_date12_list[i]
            if coh_list[
                    i] < inps.min_coherence and date12 not in mst_date12_list:
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.2 Update date12_to_rmv from temp baseline threshold
    if inps.max_temp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with temporal baseline > ' + str(
            inps.max_temp_baseline) + ' days'
        date8_list = ptime.ifgram_date_list(inps.file[0])
        date6_list = ptime.yymmdd(date8_list)
        tbase_list = ptime.date_list2tbase(date8_list)[0]
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            idx1 = date6_list.index(date1)
            idx2 = date6_list.index(date2)
            t_diff = tbase_list[idx2] - tbase_list[idx1]
            if t_diff > inps.max_temp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print 'number of pairs to drop: %d' % (len(date12_to_rmv_temp))
        print date12_to_rmv_temp

    # 2.3 Update date12_to_rmv from perp baseline threshold
    if inps.max_perp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with perpendicular spatial baseline > ' + str(
            inps.max_perp_baseline) + ' meters'
        ifg_bperp_list = pnet.igram_perp_baseline_list(inps.file[0])
        date12_to_rmv_temp = []
        for i in range(len(ifg_bperp_list)):
            if abs(ifg_bperp_list[i]) > inps.max_perp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print 'number of pairs to drop: %d' % (len(date12_to_rmv_temp))
        print date12_to_rmv_temp

    # 2.4 Update date12_to_rmv from exclude_ifg_index
    if inps.exclude_ifg_index:
        print '----------------------------------------------------------------------------'
        print 'drop date12/pair with the following index number:'
        for index in inps.exclude_ifg_index:
            date12 = date12_orig[index - 1]
            date12_to_rmv.append(date12)
            print str(index) + '    ' + date12

    # 2.5 Update date12_to_rmv from exclude_date
    if inps.exclude_date:
        inps.exclude_date = ptime.yymmdd(inps.exclude_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs including the following dates: \n' + str(
            inps.exclude_date)
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            if (date1 in inps.exclude_date) or (date2 in inps.exclude_date):
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.6 Update date12_to_rmv from start_date
    if inps.start_date:
        inps.start_date = ptime.yymmdd(inps.start_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with date earlier than start-date: ' + inps.start_date
        min_date = int(ptime.yyyymmdd(inps.start_date))
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) < min_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.7 Update date12_to_rmv from end_date
    if inps.end_date:
        inps.end_date = ptime.yymmdd(inps.end_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with date earlier than end-date: ' + inps.end_date
        max_date = int(ptime.yyyymmdd(inps.end_date))
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) > max_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 3. Manually drop pairs
    if inps.disp_network:
        date12_click = manual_select_pairs_to_remove(inps.file[0])
        for date12 in list(date12_click):
            if date12 not in date12_orig:
                date12_click.remove(date12)
        print 'date12 selected to remove:'
        print date12_click
        date12_to_rmv += date12_click

    # 4. drop duplicate date12 and sort in order
    date12_to_rmv = sorted(list(set(date12_to_rmv)))
    date12_keep = sorted(list(set(date12_orig) - set(date12_to_rmv)))
    print '----------------------------------------------------------------------------'
    print 'number of interferograms to remove: ' + str(len(date12_to_rmv))
    print 'number of interferograms kept     : ' + str(len(date12_keep))

    ##### Calculated date12_to_drop v.s. existing date12_to_drop
    # Get list of date12 of interferograms already been marked to drop
    k = readfile.read_attribute(inps.file[0])['FILE_TYPE']
    h5 = h5py.File(inps.file[0], 'r')
    ifgram_list_all = sorted(h5[k].keys())
    ifgram_list_keep = ut.check_drop_ifgram(h5, print_msg=False)
    ifgram_list_dropped = sorted(
        list(set(ifgram_list_all) - set(ifgram_list_keep)))
    date12_list_dropped = ptime.list_ifgram2date12(ifgram_list_dropped)
    h5.close()

    if date12_to_rmv == date12_list_dropped and inps.mark_attribute:
        print 'Calculated date12 to drop is the same as exsiting marked input file, skip update file attributes.'
        date12_to_rmv = []

    ##### Update date12 to drop
    if date12_to_rmv:
        ##### Update Input Files with date12_to_rmv
        Modified_CoherenceFile = 'Modified_coherence.h5'
        for File in inps.file:
            Modified_File = modify_file_date12_list(File, date12_to_rmv,
                                                    inps.mark_attribute)

            k = readfile.read_attribute(File)['FILE_TYPE']
            # Update Mask File
            if k == 'interferograms' and inps.update_aux:
                print 'update mask file for input ' + k + ' file based on ' + Modified_File
                inps.mask_file = 'mask.h5'
                print 'writing >>> ' + inps.mask_file
                ut.nonzero_mask(Modified_File, inps.mask_file)

            elif k == 'coherence' and inps.update_aux:
                inps.coherence_file = Modified_File
                print 'update average spatial coherence for input ' + k + ' file based on: ' + Modified_File
                outFile = 'averageSpatialCoherence.h5'
                print 'writing >>> ' + outFile
                ut.temporal_average(Modified_File, outFile)

                # Touch spatial average txt file of coherence if it's existed
                coh_spatialAverage_file = os.path.splitext(
                    Modified_File)[0] + '_spatialAverage.txt'
                if os.path.isfile(coh_spatialAverage_file):
                    touchCmd = 'touch ' + coh_spatialAverage_file
                    print touchCmd
                    os.system(touchCmd)

    # Plot result
    if inps.plot:
        print '\nplot modified network and save to file.'
        plotCmd = 'plot_network.py ' + inps.coherence_file + ' --coherence ' + inps.coherence_file + ' --nodisplay'
        if inps.template_file:
            plotCmd += ' --template ' + inps.template_file
        print plotCmd
        os.system(plotCmd)

    print 'Done.'
    return
Example #6
0
def ifgram_inversion(ifgramFile='unwrapIfgram.h5', coherenceFile='coherence.h5', meta=None):
    '''Implementation of the SBAS algorithm.
    modified from sbas.py written by scott baker, 2012 

    Inputs:
        ifgramFile    - string, HDF5 file name of the interferograms
        coherenceFile - string, HDF5 file name of the coherence
        meta          - dict, including the following options:
                        weight_function
                        chunk_size - float, max number of data (ifgram_num*row_num*col_num)
                                     to read per loop; to control the memory
    Output:
        timeseriesFile - string, HDF5 file name of the output timeseries
        tempCohFile    - string, HDF5 file name of temporal coherence
    Example:
        meta = dict()
        meta['weight_function'] = 'variance'
        meta['chunk_size'] = 0.5e9
        meta['timeseriesFile'] = 'timeseries_var.h5'
        meta['tempCohFile'] = 'temporalCoherence_var.h5'
        ifgram_inversion('unwrapIfgram.h5', 'coherence.h5', meta)
    '''
    if 'tempCohFile' not in meta.keys():
        meta['tempCohFile'] = 'temporalCoherence.h5'
    meta['timeseriesStdFile'] = 'timeseriesDecorStd.h5'
    total = time.time()

    if not meta:
        meta = vars(cmdLineParse())

    if meta['update_mode'] and not ut.update_file(meta['timeseriesFile'], ifgramFile):
        return meta['timeseriesFile'], meta['tempCohFile']

    ##### Basic Info
    # length/width
    atr = readfile.read_attribute(ifgramFile)
    length = int(atr['FILE_LENGTH'])
    width  = int(atr['WIDTH'])
    meta['length'] = length
    meta['width']  = width

    # ifgram_list
    h5ifgram = h5py.File(ifgramFile,'r')
    ifgram_list = sorted(h5ifgram['interferograms'].keys())
    #if meta['weight_function'] in ['no','uniform']:
    #    ifgram_list = ut.check_drop_ifgram(h5ifgram)
    ifgram_list = ut.check_drop_ifgram(h5ifgram)
    meta['ifgram_list'] = ifgram_list
    ifgram_num = len(ifgram_list)

    # date12_list/date8_list/tbase_diff
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    m_dates = [i.split('-')[0] for i in date12_list]
    s_dates = [i.split('-')[1] for i in date12_list]
    date8_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
    date_num = len(date8_list)
    meta['date8_list'] = date8_list
    meta['date12_list'] = date12_list

    tbase_list = ptime.date_list2tbase(date8_list)[0]
    tbase_diff = np.diff(tbase_list).reshape((-1,1))
    meta['tbase_diff'] = tbase_diff

    print 'number of interferograms: %d' % (ifgram_num)
    print 'number of acquisitions  : %d' % (date_num)
    print 'number of columns: %d' % (width)
    print 'number of lines  : %d' % (length)

    ##### ref_y/x/value
    try:
        ref_x = int(atr['ref_x'])
        ref_y = int(atr['ref_y'])
        print 'reference pixel in y/x: [%d, %d]' % (ref_y, ref_x)
        ref_value = np.zeros((ifgram_num,1), np.float32)
        for j in range(ifgram_num):
            ifgram = ifgram_list[j]
            dset = h5ifgram['interferograms'][ifgram].get(ifgram)
            ref_value[j] = dset[ref_y,ref_x]
        meta['ref_y'] = ref_y
        meta['ref_x'] = ref_x
        meta['ref_value'] = ref_value
    except:
        if meta['skip_ref']:
            meta['ref_value'] = 0.0
            print 'skip checking reference pixel info - This is for SIMULATION ONLY.'
        else:
            print 'ERROR: No ref_x/y found! Can not invert interferograms without reference in space.'
            print 'run seed_data.py '+ifgramFile+' --mark-attribute for a quick referencing.'
            sys.exit(1)
    h5ifgram.close()

    ##### Rank of Design matrix for weighted inversion
    A, B = ut.design_matrix(ifgramFile, date12_list)
    print '-------------------------------------------------------------------------------'
    if meta['weight_function'] in ['no','uniform']:
        print 'generic least square inversion with min-norm phase velocity'
        print '    based on Berardino et al. (2002, IEEE-TGRS)'
        print '    OLS for pixels with fully     connected network'
        print '    SVD for pixels with partially connected network'
        if np.linalg.matrix_rank(A) < date_num-1:
            print 'WARNING: singular design matrix! Inversion result can be biased!'
            print 'continue using its SVD solution on all pixels'
    else:
        print 'weighted least square (WLS) inversion with min-norm phase, pixelwise'
        if np.linalg.matrix_rank(A) < date_num-1:
            print 'ERROR: singular design matrix!'
            print '    Input network of interferograms is not fully connected!'
            print '    Can not invert the weighted least square solution.'
            print 'You could try:'
            print '    1) Add more interferograms to make the network fully connected:'
            print '       a.k.a., no multiple subsets nor network islands'
            print "    2) Use '-w no' option for non-weighted SVD solution."
            sys.exit(-1)
    print '-------------------------------------------------------------------------------'


    ##### Invert time-series phase
    ##Check parallel environment
    if meta['weight_function'] in ['no','uniform']:
        meta['parallel'] = False
    if meta['parallel']:
        num_cores, meta['parallel'], Parallel, delayed = ut.check_parallel(1000, print_msg=False)

    ##Split into chunks to reduce memory usage
    r_step = meta['chunk_size']/ifgram_num/width         #split in lines
    if meta['weight_function'] not in ['no','uniform']:  #more memory usage (coherence) for WLS
        r_step /= 2.0
        if meta['parallel']:
            r_step /= num_cores
    r_step = int(ceil_to_1(r_step))
    meta['row_step'] = r_step
    chunk_num = int((length-1)/r_step)+1

    if chunk_num > 1:
        print 'maximum chunk size: %.1E' % (meta['chunk_size'])
        print 'split %d lines into %d patches for processing' % (length, chunk_num)
        print '    with each patch up to %d lines' % (r_step)
        if meta['parallel']:
            print 'parallel processing using %d cores ...' % (min([num_cores,chunk_num]))

    ##Computing the inversion
    box_list = []
    for i in range(chunk_num):
        r0 = i*r_step
        r1 = min([length, r0+r_step])
        box = (0,r0,width,r1)
        box_list.append(box)
    box_num = len(box_list)

    if not meta['parallel']:
        timeseries = np.zeros((date_num, length, width), np.float32)
        timeseriesStd = np.zeros((date_num, length, width), np.float32)
        tempCoh = np.zeros((length, width), np.float32)
        for i in range(box_num):
            if box_num > 1:
                print '\n------- Processing Patch %d out of %d --------------' % (i+1, box_num)
            box = box_list[i]
            ts, tcoh, tsStd = ifgram_inversion_patch(ifgramFile, coherenceFile, meta, box)
            tempCoh[box[1]:box[3],box[0]:box[2]] = tcoh
            timeseries[:,box[1]:box[3],box[0]:box[2]] = ts
            timeseriesStd[:,box[1]:box[3],box[0]:box[2]] = tsStd

    else:
        ##Temp file list
        meta['ftemp_base'] = 'timeseries_temp_'
        temp_file_list = [meta['ftemp_base']+str(i)+'.h5' for i in range(chunk_num)]

        ##Computation
        Parallel(n_jobs=num_cores)(delayed(ifgram_inversion_patch)\
                                   (ifgramFile, coherenceFile, meta, box) for box in box_list)

        ##Concatenate temp files
        print 'concatenating temporary timeseries files ...'
        timeseries = np.zeros((date_num, length, width), np.float32)
        tempCoh = np.zeros((length, width), np.float32)
        rmCmd = 'rm'
        for i in range(chunk_num):
            fname = temp_file_list[i]
            box = box_list[i]
            print 'reading '+fname
            h5temp = h5py.File(fname, 'r')
            dset = h5temp['timeseries'].get('timeseries')
            timeseries[:,box[1]:box[3],box[0]:box[2]] = dset[0:-1,:,:]
            tempCoh[box[1]:box[3],box[0]:box[2]] = dset[-1,:,:]
            h5temp.close()
            rmCmd += ' '+fname
        print rmCmd
        os.system(rmCmd)

    print 'converting phase to range'
    phase2range = -1*float(atr['WAVELENGTH'])/(4.*np.pi)
    timeseries *= phase2range
    timeseriesStd *= abs(phase2range)

    ##### Calculate time-series attributes
    print 'calculating perpendicular baseline timeseries'
    pbase, pbase_top, pbase_bottom = ut.perp_baseline_ifgram2timeseries(ifgramFile, ifgram_list)
    pbase = str(pbase.tolist()).translate(None,'[],')  # convert np.array into string separated by white space
    pbase_top = str(pbase_top.tolist()).translate(None,'[],')
    pbase_bottom = str(pbase_bottom.tolist()).translate(None,'[],')
    atr['P_BASELINE_TIMESERIES'] = pbase
    atr['P_BASELINE_TOP_TIMESERIES'] = pbase_top
    atr['P_BASELINE_BOTTOM_TIMESERIES'] = pbase_bottom
    atr['ref_date'] = date8_list[0]
    atr['FILE_TYPE'] = 'timeseries'
    atr['UNIT'] = 'm'

    ##### Output
    ## 1. Write time-series file
    meta['timeseriesFile'] = write_timeseries_hdf5_file(timeseries, date8_list, atr,\
                                                        timeseriesFile=meta['timeseriesFile'])
    if not np.all(timeseriesStd == 0.):
        meta['timeseriesStdFile'] = write_timeseries_hdf5_file(timeseriesStd, date8_list, atr,\
                                                               timeseriesFile=meta['timeseriesStdFile'])

    ## 2. Write Temporal Coherence File
    print 'writing >>> '+meta['tempCohFile']
    atr['FILE_TYPE'] = 'temporal_coherence'
    atr['UNIT'] = '1'
    meta['tempCohFile'] = writefile.write(tempCoh, atr, meta['tempCohFile'])

    print 'Time series inversion took ' + str(time.time()-total) +' secs\nDone.'
    return meta['timeseriesFile'], meta['tempCohFile']
Example #7
0
def ifgram_inversion_patch(ifgramFile, coherenceFile, meta, box=None):
    '''
    Inputs:
        ifgramFile    - string, interferograms hdf5 file
        coherenceFile - string, coherence hdf5 file
        box           - 4-tuple, left, upper, right, and lower pixel coordinate of area of interest
        meta          - dict, including the following attributes:

                        #Interferograms
                        length/width - int, file size for each interferogram
                        ifgram_list  - list of string, interferogram dataset name
                        date12_list  - list of string, YYMMDD-YYMMDD
                        ref_value    - np.array in size of (ifgram_num, 1)
                                       reference pixel coordinate in row/column number
                        ref_y/x      - int, reference pixel coordinate in row/column number

                        #Time-series
                        date8_list   - list of string in YYYYMMDD
                        tbase_diff   - np.array in size of (date_num-1, 1), differential temporal baseline

                        #Inversion
                        weight_function   - no, fim, var, coh
    Outputs:
        ts       - 3D np.array in size of (date_num, row_num, col_num)
        temp_coh - 2D np.array in size of (row_num, col_num)
        tsStd    - 3D np.array in size of (date_num, row_num, col_num)
    '''

    ##### Get patch size/index
    if not box:
        box = (0,0,meta['width'],meta['length'])
    c0,r0,c1,r1 = box
    print 'processing %8d/%d lines ...' % (r1, meta['length'])

    ## Initiate output data matrixs
    row_num = r1-r0
    col_num = c1-c0
    pixel_num = row_num * col_num
    date_num = len(meta['date8_list'])
    ts = np.zeros((date_num, pixel_num), np.float32)
    tsStd = np.zeros((date_num, pixel_num), np.float32)
    temp_coh = np.zeros(pixel_num, np.float32)

    ##### Mask for pixels to invert
    mask = np.ones(pixel_num, np.bool_)
    ## 1 - Water Mask
    if meta['water_mask_file']:
        print 'skip pixels on water with mask from file: %s' % (os.path.basename(meta['water_mask_file']))
        try:    waterMask = readfile.read(meta['water_mask_file'], epoch='waterMask')[0][r0:r1,c0:c1].flatten()
        except: waterMask = readfile.read(meta['water_mask_file'], epoch='mask')[0][r0:r1,c0:c1].flatten()
        mask *= np.array(waterMask, np.bool_)

    ## 2 - Mask for Zero Phase in ALL ifgrams
    print 'skip pixels with zero/nan value in all interferograms'
    ifgram_stack = ut.get_file_stack(ifgramFile)[r0:r1,c0:c1].flatten()
    mask *= ~np.isnan(ifgram_stack)
    mask *= ifgram_stack != 0.

    ## Invert pixels on mask 1+2
    pixel_num2inv = np.sum(mask)
    pixel_idx2inv = np.where(mask)[0]
    print 'number of pixels to invert: %s out of %s' % (pixel_num2inv, pixel_num)
    if pixel_num2inv < 1:
        ts = ts.reshape(date_num, row_num, col_num)
        temp_coh = temp_coh.reshape(row_num, col_num)
        tsStd = tsStd.reshape(date_num, row_num, col_num)
        return ts, temp_coh, tsStd

    ##### Read interferograms
    ifgram_num = len(meta['ifgram_list'])
    ifgram_data = np.zeros((ifgram_num, pixel_num), np.float32)
    date12_list = meta['date12_list']

    if meta['skip_zero_phase']:
        print 'skip zero phase value (masked out and filled during phase unwrapping)'
    atr = readfile.read_attribute(ifgramFile)
    h5ifgram = h5py.File(ifgramFile,'r')
    for j in range(ifgram_num):
        ifgram = meta['ifgram_list'][j]
        d = h5ifgram['interferograms'][ifgram].get(ifgram)[r0:r1,c0:c1].flatten()
        if meta['skip_zero_phase']:
            d[d != 0.] -= meta['ref_value'][j]
        else:
            d -= meta['ref_value'][j]
        ifgram_data[j] = d
        sys.stdout.write('\rreading interferograms %s/%s ...' % (j+1, ifgram_num))
        sys.stdout.flush()
    print ' '
    h5ifgram.close()
    #ifgram_data -= meta['ref_value']

    ## 3 - Mask for Non-Zero Phase in ALL ifgrams (share one B in sbas inversion)
    maskAllNet = np.all(ifgram_data, axis=0)
    maskAllNet *= mask
    maskPartNet = mask ^ maskAllNet

    ##### Design matrix
    A,B = ut.design_matrix(ifgramFile, date12_list)
    try:    ref_date = str(np.loadtxt('reference_date.txt', dtype=str))
    except: ref_date = meta['date8_list'][0]
    #print 'calculate decorrelation noise covariance with reference date = %s' % (ref_date)
    refIdx = meta['date8_list'].index(ref_date)
    timeIdx = [i for i in range(date_num)]
    timeIdx.remove(refIdx)
    Astd = ut.design_matrix(ifgramFile, date12_list, referenceDate=ref_date)[0]

    ##### Inversion
    if meta['weight_function'] in ['no','uniform']:
        if np.sum(maskAllNet) > 0:
            print 'inverting pixels with valid phase in all     ifgrams with OLS (%.0f pixels) ...' % (np.sum(maskAllNet))
            ts1, tempCoh1 = network_inversion_sbas(B, ifgram_data[:,maskAllNet], meta['tbase_diff'], skipZeroPhase=False)
            ts[1:,maskAllNet] = ts1
            temp_coh[maskAllNet] = tempCoh1

        if np.sum(maskPartNet) > 0:
            print 'inverting pixels with valid phase in part of ifgrams with SVD ...'
            pixel_num2inv = np.sum(maskPartNet)
            pixel_idx2inv = np.where(maskPartNet)[0]
            prog_bar = ptime.progress_bar(maxValue=pixel_num2inv)
            for i in range(pixel_num2inv):
                idx = pixel_idx2inv[i]
                ts1, tempCoh1 = network_inversion_sbas(B, ifgram_data[:,idx], meta['tbase_diff'], meta['skip_zero_phase'])
                ts[1:, idx] = ts1.flatten()
                temp_coh[idx] = tempCoh1
                prog_bar.update(i+1, every=100, suffix=str(i+1)+'/'+str(pixel_num2inv)+' pixels')
            prog_bar.close()

    else:
        ##### Read coherence
        coh_data = np.zeros((ifgram_num, pixel_num), np.float32)
        h5coh = h5py.File(coherenceFile,'r')
        coh_list = sorted(h5coh['coherence'].keys())
        coh_list = ut.check_drop_ifgram(h5coh)
        for j in range(ifgram_num):
            ifgram = coh_list[j]
            d = h5coh['coherence'][ifgram].get(ifgram)[r0:r1,c0:c1]
            d[np.isnan(d)] = 0.
            coh_data[j] = d.flatten()
            sys.stdout.write('\rreading coherence %s/%s ...' % (j+1, ifgram_num))
            sys.stdout.flush()
        print ' '
        h5coh.close()

        ##### Calculate Weight matrix
        weight = np.array(coh_data, np.float64)
        L = int(atr['ALOOKS']) * int(atr['RLOOKS'])
        epsilon = 1e-4
        if meta['weight_function'].startswith('var'):
            print 'convert coherence to weight using inverse of phase variance'
            print '    with phase PDF for distributed scatterers from Tough et al. (1995)'
            weight = 1.0 / coherence2phase_variance_ds(weight, L, print_msg=True)

        elif meta['weight_function'].startswith(('lin','coh','cor')):
            print 'use coherence as weight directly (Perissin & Wang, 2012; Tong et al., 2016)'
            weight[weight < epsilon] = epsilon

        elif meta['weight_function'].startswith(('fim','fisher')):
            print 'convert coherence to weight using Fisher Information Index (Seymour & Cumming, 1994)'
            weight = coherence2fisher_info_index(weight, L)

        else:
            print 'Un-recognized weight function: %s' % meta['weight_function']
            sys.exit(-1)

        ##### Weighted Inversion pixel by pixel
        print 'inverting time series ...'
        prog_bar = ptime.progress_bar(maxValue=pixel_num2inv)
        for i in range(pixel_num2inv):
            idx = pixel_idx2inv[i]
            ts1, tempCoh1, tsStd1 = network_inversion_wls(A, ifgram_data[:,idx], weight[:,idx], Astd=Astd,\
                                                          skipZeroPhase=meta['skip_zero_phase'])
            ts[1:, idx] = ts1.flatten()
            temp_coh[idx] = tempCoh1
            tsStd[timeIdx, idx] = tsStd1.flatten()
            prog_bar.update(i+1, every=100, suffix=str(i+1)+'/'+str(pixel_num2inv)+' pixels')
        prog_bar.close()

    ts = ts.reshape(date_num, row_num, col_num)
    temp_coh = temp_coh.reshape(row_num, col_num)
    tsStd = tsStd.reshape(date_num, row_num, col_num)


    ##Write to temp hdf5 files for parallel processing
    if meta['parallel']:
        fname = meta['ftemp_base']+str(int(r0/meta['row_step']))+'.h5'
        print 'writing >>> '+fname
        h5temp = h5py.File(fname, 'w')
        group = h5temp.create_group('timeseries')
        dset = group.create_dataset('timeseries', shape=(date_num+1, row_num, col_num), dtype=np.float32)
        dset[0:-1,:,:] = ts
        dset[1,:,:] = temp_coh
        h5temp.close()
        return
    else:
        return ts, temp_coh, tsStd
Example #8
0
def timeseries_inversion(ifgramFile='unwrapIfgram.h5',
                         coherenceFile='coherence.h5',
                         inps_dict=None):
    '''Implementation of the SBAS algorithm.
    modified from sbas.py written by scott baker, 2012 

    Inputs:
        ifgramFile    - string, HDF5 file name of the interferograms
        coherenceFile - string, HDF5 file name of the coherence
        inps_dict     - dict, including the following options:
                        weight_function
                        min_coherence
                        max_coherence
    Output:
        timeseriesFile - string, HDF5 file name of the output timeseries
        tempCohFile    - string, HDF5 file name of temporal coherence
    '''
    total = time.time()

    if not inps_dict:
        inps_dict = vars(cmdLineParse())
    weight_func = inps_dict['weight_function']
    min_coh = inps_dict['min_coherence']
    max_coh = inps_dict['max_coherence']

    # Basic Info
    atr = readfile.read_attribute(ifgramFile)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    pixel_num = length * width

    h5ifgram = h5py.File(ifgramFile, 'r')
    ifgram_list = sorted(h5ifgram['interferograms'].keys())
    if inps_dict['weight_function'] == 'no':
        ifgram_list = ut.check_drop_ifgram(h5ifgram, atr, ifgram_list)
    ifgram_num = len(ifgram_list)

    # Convert ifgram_list to date12/8_list
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    m_dates = [i.split('-')[0] for i in date12_list]
    s_dates = [i.split('-')[1] for i in date12_list]
    date8_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
    date_num = len(date8_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]
    tbase_diff = np.diff(tbase_list).reshape((date_num - 1, 1))

    print 'number of interferograms: ' + str(ifgram_num)
    print 'number of acquisitions  : ' + str(date_num)
    print 'number of pixels: ' + str(pixel_num)

    # Reference pixel in space
    try:
        ref_x = int(atr['ref_x'])
        ref_y = int(atr['ref_y'])
        print 'reference pixel in y/x: [%d, %d]' % (ref_y, ref_x)
    except:
        print 'ERROR: No ref_x/y found! Can not inverse interferograms without reference in space.'
        print 'run seed_data.py ' + ifgramFile + ' --mark-attribute for a quick referencing.'
        sys.exit(1)

    ##### Read Interferograms
    print 'reading interferograms ...'
    ifgram_data = np.zeros((ifgram_num, pixel_num), np.float32)
    prog_bar = ptime.progress_bar(maxValue=ifgram_num)
    for j in range(ifgram_num):
        ifgram = ifgram_list[j]
        d = h5ifgram['interferograms'][ifgram].get(ifgram)[:]
        #d[d != 0.] -= d[ref_y, ref_x]
        d -= d[ref_y, ref_x]
        ifgram_data[j] = d.flatten()
        prog_bar.update(j + 1, suffix=date12_list[j])
    h5ifgram.close()
    prog_bar.close()

    #####---------------------- Inversion ----------------------#####
    # Design matrix
    A, B = ut.design_matrix(ifgramFile, date12_list)

    if weight_func == 'no':
        print 'generalized inversion using SVD (Berardino et al., 2002, IEEE-TGRS)'
        print 'inversing time series ...'
        B_inv = np.array(np.linalg.pinv(B), np.float32)
        ts_rate = np.dot(B_inv, ifgram_data)
        ts1 = ts_rate * np.tile(tbase_diff, (1, pixel_num))
        ts0 = np.array([0.] * pixel_num, np.float32)
        ts_data = np.vstack((ts0, np.cumsum(ts1, axis=0)))
        del ts_rate, ts0, ts1

        # Temporal coherence
        print 'calculating temporal coherence (Tizzani et al., 2007, RSE)'
        temp_coh = np.zeros((1, pixel_num), np.float32) + 0j
        prog_bar = ptime.progress_bar(maxValue=ifgram_num)
        for i in range(ifgram_num):
            ifgram_est = np.dot(A[i, :], ts_data[1:, :])
            ifgram_diff = ifgram_data[i, :] - ifgram_est
            temp_coh += np.exp(1j * ifgram_diff)
            prog_bar.update(i + 1, suffix=date12_list[i])
        prog_bar.close()
        del ifgram_data, ifgram_est, ifgram_diff
        temp_coh = np.array((np.absolute(temp_coh) / ifgram_num).reshape(
            (length, width)),
                            dtype=np.float32)

    else:
        print 'weighted least square (WLS) inversion using coherence pixel by pixel'
        if np.linalg.matrix_rank(A) < date_num - 1:
            print 'ERROR: singular design matrix!'
            print '    Input network of interferograms is not fully connected!'
            print '    Can not inverse the weighted least square solution.'
            print 'You could try:'
            print '    1) Add more interferograms to make the network fully connected:'
            print '       a.k.a., no multiple subsets nor network islands'
            print "    2) Use '-w no' option for non-weighted SVD solution."
            sys.exit(-1)

        pixel_mask = np.ones(pixel_num, np.bool_)
        print 'reading coherence: ' + os.path.basename(coherenceFile)
        h5coh = h5py.File(coherenceFile, 'r')
        coh_list = sorted(h5coh['coherence'].keys())
        coh_data = np.zeros((ifgram_num, pixel_num), np.float32)
        prog_bar = ptime.progress_bar(maxValue=ifgram_num)
        for j in range(ifgram_num):
            ifgram = coh_list[j]
            d = h5coh['coherence'][ifgram].get(ifgram)[:].flatten()
            d[np.isnan(d)] = 0.
            pixel_mask[d == 0.] = 0
            coh_data[j] = d
            prog_bar.update(j + 1, suffix=date12_list[j])
        h5coh.close()
        prog_bar.close()

        # Get mask of valid pixels to inverse
        print 'skip pixels with zero coherence in at least one interferogram'
        print 'skip pixels with zero phase     in all          interferograms'
        ifgram_stack = ut.get_file_stack(ifgramFile).flatten()
        pixel_mask[ifgram_stack == 0.] = 0

        pixel_num2inv = np.sum(pixel_mask)
        pixel_idx2inv = np.where(pixel_mask)[0]
        ifgram_data = ifgram_data[:, pixel_mask]
        coh_data = coh_data[:, pixel_mask]
        print 'number of pixels to inverse: %d' % (pixel_num2inv)

        ##### Calculate Weight matrix
        weight = coh_data
        if weight_func.startswith('var'):
            print 'convert coherence to weight using inverse of variance: x**2/(1-x**2) from Hanssen (2001, for 4.2.32)'
            weight[weight > 0.999] = 0.999
            if weight_func == 'variance-max-coherence':
                print 'constrain the max coherence to %f' % max_coh
                weight[weight > max_coh] = max_coh
            weight = np.square(weight)
            weight *= 1. / (1. - weight)
            if weight_func == 'variance-log':
                print 'use log(1/variance)+1 as weight'
                weight = np.log(weight + 1)
        elif weight_func.startswith('lin'):
            print 'use coherence as weight directly (Tong et al., 2016, RSE)'
        elif weight_func.startswith('norm'):
            print 'convert coherence to weight using CDF of normal distribution: N(%f, %f)' % (
                mu, std)
            mu = (min_coh + max_coh) / 2.0
            std = (max_coh - min_coh) / 6.0
            chunk_size = 1000
            chunk_num = int(pixel_num2inv / chunk_size) + 1
            prog_bar = ptime.progress_bar(maxValue=chunk_num)
            for i in range(chunk_num):
                i0 = (i - 1) * chunk_size
                i1 = min([pixel_num2inv, i0 + chunk_size])
                weight[:, i0:i1] = norm.cdf(weight[:, i0:i1], mu, std)
                prog_bar.update(i + 1, every=10)
            prog_bar.close()
            #weight = norm.cdf(weight, mu, std)
        else:
            print 'Un-recognized weight function: %s' % weight_func
            sys.exit(-1)

        ##### Weighted Inversion pixel by pixel
        print 'inversing time series ...'
        ts_data = np.zeros((date_num, pixel_num), np.float32)
        temp_coh = np.zeros(pixel_num, np.float32)
        prog_bar = ptime.progress_bar(maxValue=pixel_num2inv)
        for i in range(pixel_num2inv):
            # Inverse timeseries
            ifgram_pixel = ifgram_data[:, i]
            weight_pixel = weight[:, i]
            W = np.diag(weight_pixel)
            ts = np.linalg.inv(A.T.dot(W).dot(A)).dot(
                A.T).dot(W).dot(ifgram_pixel)
            ts_data[1:, pixel_idx2inv[i]] = ts

            # Calculate weighted temporal coherence
            ifgram_diff = ifgram_pixel - np.dot(A, ts)
            temp_coh_pixel = np.abs(
                np.sum(np.multiply(weight_pixel, np.exp(1j * ifgram_diff)),
                       axis=0)) / np.sum(weight_pixel)
            temp_coh[pixel_idx2inv[i]] = temp_coh_pixel

            prog_bar.update(i + 1, every=2000, suffix=str(i + 1) + ' pixels')
        prog_bar.close()
        del ifgram_data, weight

    #####---------------------- Outputs ----------------------#####
    ## 1.1 Convert time-series phase to displacement
    print 'converting phase to range'
    phase2range = -1 * float(atr['WAVELENGTH']) / (4. * np.pi)
    ts_data *= phase2range

    ## 1.2 Write time-series data matrix
    timeseriesFile = 'timeseries.h5'
    print 'writing >>> ' + timeseriesFile
    print 'number of acquisitions: ' + str(date_num)
    h5timeseries = h5py.File(timeseriesFile, 'w')
    group = h5timeseries.create_group('timeseries')
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date8_list[i]
        dset = group.create_dataset(date,
                                    data=ts_data[i].reshape(length, width),
                                    compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()

    ## 1.3 Write time-series attributes
    print 'calculating perpendicular baseline timeseries'
    pbase, pbase_top, pbase_bottom = ut.perp_baseline_ifgram2timeseries(
        ifgramFile, ifgram_list)
    pbase = str(pbase.tolist()).translate(
        None, '[],')  # convert np.array into string separated by white space
    pbase_top = str(pbase_top.tolist()).translate(None, '[],')
    pbase_bottom = str(pbase_bottom.tolist()).translate(None, '[],')
    atr['P_BASELINE_TIMESERIES'] = pbase
    atr['P_BASELINE_TOP_TIMESERIES'] = pbase_top
    atr['P_BASELINE_BOTTOM_TIMESERIES'] = pbase_bottom
    atr['ref_date'] = date8_list[0]
    atr['FILE_TYPE'] = 'timeseries'
    atr['UNIT'] = 'm'
    for key, value in atr.iteritems():
        group.attrs[key] = value
    h5timeseries.close()
    del ts_data

    ## 2. Write Temporal Coherence File
    tempCohFile = 'temporalCoherence.h5'
    print 'writing >>> ' + tempCohFile
    atr['FILE_TYPE'] = 'temporal_coherence'
    atr['UNIT'] = '1'
    writefile.write(temp_coh.reshape(length, width), atr, tempCohFile)

    print 'Time series inversion took ' + str(time.time() -
                                              total) + ' secs\nDone.'
    return timeseriesFile, tempCohFile
Example #9
0
def ifgram_inversion_patch(ifgramFile, coherenceFile, meta, box=None):
    '''
    Inputs:
        ifgramFile    - string, interferograms hdf5 file
        coherenceFile - string, coherence hdf5 file
        box           - 4-tuple, left, upper, right, and lower pixel coordinate of area of interest
        meta          - dict, including the following attributes:

                        #Interferograms
                        length/width - int, file size for each interferogram
                        ifgram_list  - list of string, interferogram dataset name
                        date12_list  - list of string, YYMMDD-YYMMDD
                        ref_value    - np.array in size of (ifgram_num, 1)
                                       reference pixel coordinate in row/column number
                        ref_y/x      - int, reference pixel coordinate in row/column number

                        #Time-series
                        date8_list   - list of string in YYYYMMDD
                        tbase_diff   - np.array in size of (date_num-1, 1), differential temporal baseline

                        #Inversion
                        weight_function   - 
    '''

    ##### Get patch size/index
    if not box:
        box = (0, 0, meta['width'], meta['length'])
    c0, r0, c1, r1 = box
    print 'processing %8d/%d lines ...' % (r1, meta['length'])

    ## Initiate output data matrixs
    row_num = r1 - r0
    col_num = c1 - c0
    pixel_num = row_num * col_num
    date_num = len(meta['date8_list'])
    ts = np.zeros((date_num, pixel_num), np.float32)
    temp_coh = np.zeros(pixel_num, np.float32)

    ##### Get mask of non-zero pixels
    print 'skip pixels with zero/nan value in all interferograms'
    ifgram_stack = ut.get_file_stack(ifgramFile)[r0:r1, c0:c1].flatten()
    mask = ~np.isnan(ifgram_stack)
    mask[ifgram_stack == 0.] = 0
    pixel_num2inv = np.sum(mask)
    pixel_idx2inv = np.where(mask)[0]
    print 'number of pixels to inverse: %d' % (pixel_num2inv)
    if pixel_num2inv < 1:
        ts = ts.reshape(date_num, row_num, col_num)
        temp_coh = temp_coh.reshape(row_num, col_num)
        return ts, temp_coh

    ##### Read interferograms
    ifgram_num = len(meta['ifgram_list'])
    ifgram_data = np.zeros((ifgram_num, pixel_num2inv), np.float32)
    date12_list = meta['date12_list']

    atr = readfile.read_attribute(ifgramFile)
    h5ifgram = h5py.File(ifgramFile, 'r')
    for j in range(ifgram_num):
        ifgram = meta['ifgram_list'][j]
        d = h5ifgram['interferograms'][ifgram].get(ifgram)[r0:r1, c0:c1]
        ifgram_data[j] = d.flatten()[mask]
        sys.stdout.write('\rreading interferograms %s/%s ...' %
                         (j + 1, ifgram_num))
        sys.stdout.flush()
    print ' '
    h5ifgram.close()
    ifgram_data -= meta['ref_value']

    ##### Design matrix
    A, B = ut.design_matrix(ifgramFile, date12_list)
    B_inv = np.array(np.linalg.pinv(B), np.float32)

    ##### Inverse
    if meta['weight_function'] in ['no', 'uniform']:
        print 'inversing time-series ...'
        ts[1:, mask] = network_inversion_sbas(B,
                                              ifgram_data,
                                              meta['tbase_diff'],
                                              B_inv=B_inv)

        print 'calculating temporal coherence ...'
        temp_coh[mask] = temporal_coherence(A, ts[1:, mask], ifgram_data)

    else:
        ##### Read coherence
        coh_data = np.zeros((ifgram_num, pixel_num2inv), np.float32)
        h5coh = h5py.File(coherenceFile, 'r')
        coh_list = sorted(h5coh['coherence'].keys())
        coh_list = ut.check_drop_ifgram(h5coh)
        for j in range(ifgram_num):
            ifgram = coh_list[j]
            d = h5coh['coherence'][ifgram].get(ifgram)[r0:r1, c0:c1]
            d[np.isnan(d)] = 0.
            coh_data[j] = d.flatten()[mask]
            sys.stdout.write('\rreading coherence %s/%s ...' %
                             (j + 1, ifgram_num))
            sys.stdout.flush()
        print ' '
        h5coh.close()

        ##### Calculate Weight matrix
        weight = coh_data
        if meta['weight_function'].startswith('var'):
            print 'convert coherence to weight using inverse of phase variance'
            print '    with phase PDF for distributed scatterers from Tough et al. (1995)'
            L = int(atr['ALOOKS']) * int(atr['RLOOKS'])
            lineStr = '    number of multilooks L=%d' % L
            if L > 80:
                L = 80
                lineStr += ', use L=80 to avoid dividing by 0 in calculation with Negligible effect'
            print lineStr
            weight = 1.0 / coherence2phase_variance_ds(weight, L)
        elif meta['weight_function'].startswith(('lin', 'coh', 'cor')):
            print 'use coherence as weight directly (Perissin & Wang, 2012; Tong et al., 2016)'
            epsilon = 1e-4
            weight[weight < epsilon] = epsilon
        else:
            print 'Un-recognized weight function: %s' % meta['weight_function']
            sys.exit(-1)

        ##### Weighted Inversion pixel by pixel
        print 'inversing time series ...'
        prog_bar = ptime.progress_bar(maxValue=pixel_num2inv)
        for i in range(pixel_num2inv):
            ts[1:, pixel_idx2inv[i]] = network_inversion_wls(
                A, ifgram_data[:, i], weight[:, i])[0].flatten()
            prog_bar.update(i + 1,
                            every=100,
                            suffix=str(i + 1) + '/' + str(pixel_num2inv) +
                            ' pixels')
        prog_bar.close()

        print 'calculating temporal coherence ...'
        #temp_coh[mask] = temporal_coherence(A, ts[1:,mask], ifgram_data, weight)
        temp_coh[mask] = temporal_coherence(A, ts[1:, mask], ifgram_data)

    ts = ts.reshape(date_num, row_num, col_num)
    temp_coh = temp_coh.reshape(row_num, col_num)

    ##Write to temp hdf5 files for parallel processing
    if meta['parallel']:
        fname = meta['ftemp_base'] + str(int(r0 / meta['row_step'])) + '.h5'
        print 'writing >>> ' + fname
        h5temp = h5py.File(fname, 'w')
        group = h5temp.create_group('timeseries')
        dset = group.create_dataset('timeseries',
                                    shape=(date_num + 1, row_num, col_num),
                                    dtype=np.float32)
        dset[0:-1, :, :] = ts
        dset[1, :, :] = temp_coh
        h5temp.close()
        return
    else:
        return ts, temp_coh