Ejemplo n.º 1
0
def select_master_interferogram(date12_list, date_list, pbase_list, m_date=None):
    '''Select reference interferogram based on input temp/perp baseline info
    If master_date is specified, select its closest slave_date; otherwise, choose the closest pair
    among all pairs as master interferogram.
    Example:
        master_date12 = pnet.select_master_ifgram(date12_list, date_list, pbase_list)
    '''
    pbase_array = np.array(pbase_list, dtype='float64')
    # Get temporal baseline
    date8_list = ptime.yyyymmdd(date_list)
    date6_list = ptime.yymmdd(date8_list)
    tbase_array = np.array(ptime.date_list2tbase(date8_list)[0], dtype='float64')
    # Normalization (Pepe and Lanari, 2006, TGRS)
    temp2perp_scale = (max(pbase_array)-min(pbase_array)) / (max(tbase_array)-min(tbase_array))
    tbase_array *= temp2perp_scale
    
    # Calculate sqrt of temp/perp baseline for input pairs
    idx1 = np.array([date6_list.index(date12.split('-')[0]) for date12 in date12_list])
    idx2 = np.array([date6_list.index(date12.split('-')[1]) for date12 in date12_list])
    base_distance = np.sqrt((tbase_array[idx2] - tbase_array[idx1])**2 + (pbase_array[idx2] - pbase_array[idx1])**2)
    
    # Get master interferogram index
    if not m_date:
        # Choose pair with shortest temp/perp baseline
        m_date12_idx = np.argmin(base_distance)        
    else:
        m_date = ptime.yymmdd(m_date)
        # Choose pair contains m_date with shortest temp/perp baseline
        m_date12_idx_array = np.array([date12_list.index(date12) for date12 in date12_list if m_date in date12])
        min_base_distance = np.min(base_distance[m_date12_idx_array])
        m_date12_idx = np.where(base_distance == min_base_distance)[0][0]
    
    m_date12 = date12_list[m_date12_idx]
    return m_date12
Ejemplo n.º 2
0
def select_pairs_star(dateList,m_date):
    ## Select Star-like network/interferograms/pairs, it's a single master network, similar to PS approach.
    ##
    ## Usage:
    ##     m_date : master date, choose it based on the following cretiria:
    ##              1) near the center in temporal and spatial baseline
    ##              2) prefer winter season than summer season for less temporal decorrelation
    ##
    ## Reference:
    ##     Ferretti, A., C. Prati, and F. Rocca (2001), Permanent scatterers in SAR interferometry, IEEE TGRS, 39(1), 8-20.
    ##

    ## Pre-process Inputs
    dateList = ptime.yymmdd(sorted(dateList))
    m_date   = ptime.yymmdd(m_date)

    ## Get date index
    idxList = list(range(len(dateList)))
    m_idx = dateList.index(m_date)

    pairs = []
    for idx in idxList:
        if not idx == m_idx:
            pairs.append([m_idx,idx])
    pairs = pair_sort(pairs)

    return pairs
Ejemplo n.º 3
0
def threshold_doppler_overlap(date12_list, date_list, dop_list, bandwidth_az, dop_overlap_min=0.15):
    '''Remove pairs/interoferogram with doppler overlap larger than critical value
    Inputs:
        date12_list : list of string, for date12 in YYMMDD-YYMMDD format
        date_list   : list of string, for date in YYMMDD/YYYYMMDD format, optional
        dop_list    : list of list of 3 float, for centroid Doppler frequency
        bandwidth_az    : float, bandwidth in azimuth direction
        dop_overlap_min : float, minimum overlap of azimuth Doppler frequency
    Outputs:
        date12_list : list of string, for date12 in YYMMDD-YYMMDD format
    '''
    if not date12_list:  return []
    # Get date6_list
    if not date_list:
        m_dates = [date12.split('-')[0] for date12 in date12_list]
        s_dates = [date12.split('-')[1] for date12 in date12_list]
        date_list = sorted(ptime.yyyymmdd(list(set(m_dates + s_dates))))
        if not len(date_list) == len(pbase_list):
            print 'ERROR: number of existing dates is not equal to number of perp baseline!'
            print 'date list is needed for threshold filtering!'
            print 'skip filtering.'
            return date12_list
    date6_list = ptime.yymmdd(date_list)

    # Threshold
    date12_list_out = []
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        dop_overlap = calculate_doppler_overlap(dop_list[idx1], dop_list[idx2], bandwidth_az)
        if dop_overlap >= dop_overlap_min:
            date12_list_out.append(date12)
    return date12_list_out
Ejemplo n.º 4
0
def threshold_temporal_baseline(date12_list, btemp_max, keep_seasonal=True, btemp_min=0.0):
    '''Remove pairs/interferograms out of min/max/seasonal temporal baseline limits
    Inputs:
        date12_list : list of string for date12 in YYMMDD-YYMMDD format
        btemp_max   : float, maximum temporal baseline
        btemp_min   : float, minimum temporal baseline
        keep_seasonal : keep interferograms with seasonal temporal baseline
    Output:
        date12_list_out : list of string for date12 in YYMMDD-YYMMDD format
    Example:
        date12_list = threshold_temporal_baseline(date12_list, 200)
        date12_list = threshold_temporal_baseline(date12_list, 200, False)
    '''
    if not date12_list:  return []
    # Get date list and tbase list
    m_dates = [date12.split('-')[0] for date12 in date12_list]
    s_dates = [date12.split('-')[1] for date12 in date12_list]
    date8_list = sorted(ptime.yyyymmdd(list(set(m_dates + s_dates))))
    date6_list = ptime.yymmdd(date8_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]

    # Threshold
    date12_list_out = []
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        tbase = int(abs(tbase_list[idx1] - tbase_list[idx2]))
        if btemp_min <= tbase <= btemp_max:
            date12_list_out.append(date12)
        elif keep_seasonal and tbase/30 in [11,12]:
            date12_list_out.append(date12)    
    return date12_list_out
Ejemplo n.º 5
0
def coherence_matrix(date12_list, coh_list):
    '''Return coherence matrix based on input date12 list and its coherence
    Inputs:
        date12_list - list of string in YYMMDD-YYMMDD format
        coh_list    - list of float, average coherence for each interferograms
    Output:
        coh_matrix  - 2D np.array with dimension length = date num
                      np.nan value for interferograms non-existed.
                      1.0 for diagonal elements
    '''
    # Get date list
    m_dates = [date12.split('-')[0] for date12 in date12_list]
    s_dates = [date12.split('-')[1] for date12 in date12_list]
    date6_list = ptime.yymmdd(sorted(ptime.yyyymmdd(list(set(m_dates + s_dates)))))
    date_num = len(date6_list)

    coh_mat = np.zeros([date_num, date_num])
    coh_mat[:] = np.nan
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        coh = coh_list[date12_list.index(date12)]
        coh_mat[idx1, idx2] = coh    #symmetric
        coh_mat[idx2, idx1] = coh

    #for i in range(date_num):    # diagonal value
    #    coh_mat[i, i] = 1.0

    return coh_mat
Ejemplo n.º 6
0
def threshold_coherence_based_mst(date12_list, coh_list):
    '''Return a minimum spanning tree of network based on the coherence inverse.
    Inputs:
        date12_list - list of string in YYMMDD-YYMMDD format
        coh_list    - list of float, average coherence for each interferogram
    Output:
        mst_date12_list - list of string in YYMMDD-YYMMDD format, for MST network of interferograms 
    '''
    # coh_list --> coh_mat --> weight_mat
    coh_mat = coherence_matrix(date12_list, coh_list)
    mask = ~np.isnan(coh_mat)
    wei_mat = np.zeros(coh_mat.shape)
    wei_mat[:] = np.inf
    wei_mat[mask] = 1/coh_mat[mask]

    # MST path based on weight matrix
    wei_mat_csr = csr_matrix(wei_mat)
    mst_mat_csr = minimum_spanning_tree(wei_mat_csr)

    # Get date6_list
    m_dates = [date12.split('-')[0] for date12 in date12_list]
    s_dates = [date12.split('-')[1] for date12 in date12_list]
    date6_list = ptime.yymmdd(sorted(ptime.yyyymmdd(list(set(m_dates + s_dates)))))

    # Convert MST index matrix into date12 list
    [s_idx_list, m_idx_list] = [date_idx_array.tolist() for date_idx_array in find(mst_mat_csr)[0:2]]
    mst_date12_list = []
    for i in range(len(m_idx_list)):
        idx = sorted([m_idx_list[i], s_idx_list[i]])
        date12 = date6_list[idx[0]]+'-'+date6_list[idx[1]]
        mst_date12_list.append(date12)
    return mst_date12_list
Ejemplo n.º 7
0
def read_pairs_list(date12ListFile, dateList=[]):
    '''Read Pairs List file like below:
    070311-070426
    070311-070611
    ...
    '''
    # Read date12 list file
    date12List = sorted(list(np.loadtxt(date12ListFile, dtype=str)))

    # Get dateList from date12List
    if not dateList:
        dateList = []
        for date12 in date12List:
            dates = date12.split('-')
            if not dates[0] in dateList: dateList.append(dates[0])
            if not dates[1] in dateList: dateList.append(dates[1])
        dateList.sort()
    date6List = ptime.yymmdd(dateList)
    
    # Get pair index 
    pairs_idx = []
    for date12 in date12List:
        dates = date12.split('-')
        pair_idx = [date6List.index(dates[0]), date6List.index(dates[1])]
        pairs_idx.append(pair_idx)

    return pairs_idx
Ejemplo n.º 8
0
def select_pairs_delaunay(date_list, pbase_list, norm=True):
    '''Select Pairs using Delaunay Triangulation based on temporal/perpendicular baselines
    Inputs:
        date_list  : list of date in YYMMDD/YYYYMMDD format
        pbase_list : list of float, perpendicular spatial baseline
        norm       : normalize temporal baseline to perpendicular baseline
    Key points
        1. Define a ratio between perpendicular and temporal baseline axis units (Pepe and Lanari, 2006, TGRS).
        2. Pairs with too large perpendicular / temporal baseline or Doppler centroid difference should be removed
           after this, using a threshold, to avoid strong decorrelations (Zebker and Villasenor, 1992, TGRS).
    Reference:
        Pepe, A., and R. Lanari (2006), On the extension of the minimum cost flow algorithm for phase unwrapping
        of multitemporal differential SAR interferograms, IEEE TGRS, 44(9), 2374-2383.
        Zebker, H. A., and J. Villasenor (1992), Decorrelation in interferometric radar echoes, IEEE TGRS, 30(5), 950-959.
    '''
    # Get temporal baseline in days
    date6_list = ptime.yymmdd(date_list)
    date8_list = ptime.yyyymmdd(date_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]

    # Normalization (Pepe and Lanari, 2006, TGRS)
    if norm:
        temp2perp_scale = (max(pbase_list)-min(pbase_list)) / (max(tbase_list)-min(tbase_list))
        tbase_list = [tbase*temp2perp_scale for tbase in tbase_list]
    
    # Generate Delaunay Triangulation
    date12_idx_list = Triangulation(tbase_list, pbase_list).edges.tolist()
    date12_idx_list = [sorted(idx) for idx in sorted(date12_idx_list)]

    # Convert index into date12
    date12_list = [date6_list[idx[0]]+'-'+date6_list[idx[1]] for idx in date12_idx_list]
    return date12_list
Ejemplo n.º 9
0
def select_pairs_star(date_list, m_date=None, pbase_list=[]):
    '''Select Star-like network/interferograms/pairs, it's a single master network, similar to PS approach.
    Usage:
        m_date : master date, choose it based on the following cretiria:
                 1) near the center in temporal and spatial baseline
                 2) prefer winter season than summer season for less temporal decorrelation
    Reference:
        Ferretti, A., C. Prati, and F. Rocca (2001), Permanent scatterers in SAR interferometry, IEEE TGRS, 39(1), 8-20.
    '''
    date8_list = sorted(ptime.yyyymmdd(date_list))
    date6_list = ptime.yymmdd(date8_list)
    
    # Select master date if not existed
    if not m_date:
        m_date = select_master_date(date8_list, pbase_list)
        print 'auto select master date: '+m_date
    
    # Check input master date
    m_date8 = ptime.yyyymmdd(m_date)
    if m_date8 not in date8_list:
        print 'Input master date is not existed in date list!'
        print 'Input master date: '+str(m_date8)
        print 'Input date list: '+str(date8_list)
        m_date8 = None
    
    # Generate star/ps network
    m_idx = date8_list.index(m_date8)
    date12_idx_list = [sorted([m_idx, s_idx]) for s_idx in range(len(date8_list)) if s_idx is not m_idx]
    date12_list = [date6_list[idx[0]]+'-'+date6_list[idx[1]] for idx in date12_idx_list]
    
    return date12_list
Ejemplo n.º 10
0
def select_pairs_sequential(date_list, increment_num=2):
    '''Select Pairs in a Sequential way:
        For each acquisition, find its increment_num nearest acquisitions in the past time.
    Inputs:
        date_list  : list of date in YYMMDD/YYYYMMDD format
    Reference:
        Fattahi, H., and F. Amelung (2013), DEM Error Correction in InSAR Time Series, IEEE TGRS, 51(7), 4249-4259.
    '''
    date8_list = sorted(ptime.yyyymmdd(date_list))
    date6_list = ptime.yymmdd(date8_list)
    date_idx_list = list(range(len(date6_list)))

    # Get pairs index list
    date12_idx_list = []
    for date_idx in date_idx_list:
        for i in range(increment_num):
            if date_idx - i - 1 >= 0:
                date12_idx_list.append([date_idx - i - 1, date_idx])
    date12_idx_list = [sorted(idx) for idx in sorted(date12_idx_list)]

    # Convert index into date12
    date12_list = [
        date6_list[idx[0]] + '-' + date6_list[idx[1]]
        for idx in date12_idx_list
    ]
    return date12_list
Ejemplo n.º 11
0
def write_pairs_list(pairs,dateList,outName):
    dateList6 = ptime.yymmdd(dateList)
    fl = open(outName,'w')
    for idx in pairs:
        date12 = dateList6[idx[0]]+'-'+dateList6[idx[1]]+'\n'
        fl.write(date12)
    fl.close()
    return 1
Ejemplo n.º 12
0
def write_pairs_list(pairs, dateList, outName):
    '''Write pairs list file.'''
    dateList6 = ptime.yymmdd(dateList)
    fl = open(outName,'w')
    for idx in pairs:
        date12 = dateList6[idx[0]]+'-'+dateList6[idx[1]]+'\n'
        fl.write(date12)
    fl.close()
    return 1
Ejemplo n.º 13
0
def manual_select_pairs_to_remove(File):
    '''Manually select interferograms to remove'''
    print '----------------------------------------------------------------------------'
    print 'Manually select interferograms to remove'
    print 'Click two dates - points - in the figure to select one pair of interferogram'
    print 'repeat until you select all pairs you would like to remove'
    print 'then close the figure to continue the program ...'
    print '----------------------------------------------------------------------------'
    # Display the network
    fig = plt.figure()
    ax = fig.add_subplot(111)

    pairs_idx = pnet.read_igram_pairs(File)
    bperp_list = ut.Baseline_timeseries(File)
    date8_list = ptime.igram_date_list(File)
    ax = pnet.plot_network(ax, pairs_idx, date8_list, bperp_list)
    print 'display the network of interferogram of file: ' + File

    date12_orig = pnet.get_date12_list(File)
    date6_list = ptime.yymmdd(date8_list)
    dates_array = np.array(ptime.date_list2vector(date8_list)[0])
    dateNum_array = mdates.date2num(dates_array)
    bperp_array = np.array(bperp_list)

    date_click = []
    date12_click = []

    def onclick(event):
        xClick = event.xdata
        yClick = event.ydata
        idx = nearest_neighbor(xClick, yClick, dateNum_array, bperp_array)
        date6 = date6_list[idx]
        print 'click at ' + date6
        date_click.append(date6)
        if len(date_click) % 2 == 0 and date_click[-2] != date_click[-1]:
            [m_date, s_date] = sorted(date_click[-2:])
            m_idx = date6_list.index(m_date)
            s_idx = date6_list.index(s_date)
            date12 = m_date + '-' + s_date
            if date12 in date12_orig:
                print 'select date12: ' + date12
                date12_click.append(date12)
                ax.plot([dateNum_array[m_idx], dateNum_array[s_idx]],
                        [bperp_array[m_idx], bperp_array[s_idx]],
                        'r',
                        lw=4)
            else:
                print date12 + ' is not existed in input file'
        plt.draw()

    cid = fig.canvas.mpl_connect('button_press_event', onclick)
    plt.show()
    return date12_click
Ejemplo n.º 14
0
def select_pairs_all(date_list):
    '''Select All Possible Pairs/Interferograms
    Input : date_list   - list of date in YYMMDD/YYYYMMDD format
    Output: date12_list - list date12 in YYMMDD-YYMMDD format
    Reference:
        Berardino, P., G. Fornaro, R. Lanari, and E. Sansosti (2002), A new algorithm for surface deformation monitoring
        based on small baseline differential SAR interferograms, IEEE TGRS, 40(11), 2375-2383.
    '''
    date8_list = sorted(ptime.yyyymmdd(date_list))
    date6_list = ptime.yymmdd(date8_list)
    date12_list = list(itertools.combinations(date6_list, 2))
    date12_list = [date12[0]+'-'+date12[1] for date12 in date12_list]
    return date12_list
Ejemplo n.º 15
0
def date12_list2index(date12_list, date_list=[]):
    '''Convert list of date12 string into list of index'''
    # Get dateList from date12List
    if not date_list:
        m_dates = [date12.split('-')[0] for date12 in date12_list]
        s_dates = [date12.split('-')[1] for date12 in date12_list]
        date_list = list(set(m_dates + s_dates))
    date6_list = ptime.yymmdd(sorted(ptime.yyyymmdd(date_list)))
    
    # Get pair index 
    pairs_idx = []
    for date12 in date12_list:
        dates = date12.split('-')
        pair_idx = [date6_list.index(dates[0]), date6_list.index(dates[1])]
        pairs_idx.append(pair_idx)

    return pairs_idx
Ejemplo n.º 16
0
def read_baseline_file(baselineFile, exDateList=[]):
    '''Read bl_list.txt without dates listed in exDateList
    # Date  Bperp    dop0/PRF  dop1/PRF   dop2/PRF      PRF    slcDir
    070106     0.0   0.03      0.0000000  0.00000000000 2155.2 /scratch/KyushuT422F650AlosA/SLC/070106/
    070709  2631.9   0.07      0.0000000  0.00000000000 2155.2 /scratch/KyushuT422F650AlosA/SLC/070709/
    070824  2787.3   0.07      0.0000000  0.00000000000 2155.2 /scratch/KyushuT422F650AlosA/SLC/070824/
    ...
    
    Examples:
        date8List, perpBaseList, dopList, prfList, slcDirList = read_baseline_file(baselineFile)
        date8List, perpBaseList, dopList, prfList, slcDirList = read_baseline_file(baselineFile,['080520','100726'])
        date8List, perpBaseList = read_baseline_file(baselineFile)[0:2]
    '''
    exDateList = ptime.yymmdd(exDateList)
    if not exDateList:  exDateList = []

    ## Read baseline file into lines
    fb = open(baselineFile)
    lines = []
    for line in fb.xreadlines():
        l = str.replace(line,'\n','').strip()
        lines.append(l)
    fb.close()

    ## Read each line and put the values into arrays
    date6List    = []
    perpBaseList = []
    dopplerList  = []
    slcDirList   = []
    for line in lines:
        c = line.split()    # splits on white space
        date = c[0]
        if not date in exDateList:
            date6List.append(date)
            perpBaseList.append(float(c[1]))
            dop = np.array([float(c[2]), float(c[3]), float(c[4])])
            prf = float(c[5])
            dop *= prf
            dopplerList.append(dop)
            slcDirList.append(c[6])

    date8List = ptime.yyyymmdd(date6List)
    return date8List, perpBaseList, dopplerList, slcDirList
Ejemplo n.º 17
0
def select_pairs_mst(date_list, pbase_list):
    '''Select Pairs using Minimum Spanning Tree technique
        Connection Cost is calculated using the baseline distance in perp and scaled temporal baseline (Pepe and Lanari,
        2006, TGRS) plane.
    Inputs:
        date_list  : list of date in YYMMDD/YYYYMMDD format
        pbase_list : list of float, perpendicular spatial baseline
    References:
        Pepe, A., and R. Lanari (2006), On the extension of the minimum cost flow algorithm for phase unwrapping
        of multitemporal differential SAR interferograms, IEEE TGRS, 44(9), 2374-2383.
        Perissin D., Wang T. (2012), Repeat-pass SAR interferometry with partially coherent targets. IEEE TGRS. 271-280
    '''
    # Get temporal baseline in days
    date6_list = ptime.yymmdd(date_list)
    date8_list = ptime.yyyymmdd(date_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]
    # Normalization (Pepe and Lanari, 2006, TGRS)
    temp2perp_scale = (max(pbase_list) - min(pbase_list)) / (max(tbase_list) -
                                                             min(tbase_list))
    tbase_list = [tbase * temp2perp_scale for tbase in tbase_list]

    # Get weight matrix
    ttMat1, ttMat2 = np.meshgrid(np.array(tbase_list), np.array(tbase_list))
    ppMat1, ppMat2 = np.meshgrid(np.array(pbase_list), np.array(pbase_list))
    ttMat = np.abs(ttMat1 - ttMat2)  # temporal distance matrix
    ppMat = np.abs(ppMat1 - ppMat2)  # spatial distance matrix

    weightMat = np.sqrt(
        np.square(ttMat) +
        np.square(ppMat))  # 2D distance matrix in temp/perp domain
    weightMat = csr_matrix(weightMat)  # compress sparse row matrix

    # MST path based on weight matrix
    mstMat = minimum_spanning_tree(weightMat)

    # Convert MST index matrix into date12 list
    [s_idx_list, m_idx_list
     ] = [date_idx_array.tolist() for date_idx_array in find(mstMat)[0:2]]
    date12_list = [
        date6_list[m_idx_list[i]] + '-' + date6_list[s_idx_list[i]]
        for i in range(len(m_idx_list))
    ]
    return date12_list
Ejemplo n.º 18
0
def read_pairs_list(listFile,dateList):
    ## Read Pairs List file like below:
    ## 070311-070426
    ## 070311-070611
    ## ...

    dateList6 = ptime.yymmdd(dateList)
    pairs=[]
    fl = open(listFile,'r')
    lines = []
    lines = fl.read().splitlines()
    for line in lines:
        date12 = line.split('-')
        pairs.append([dateList6.index(date12[0]),dateList6.index(date12[1])])
    fl.close()

    pairs = pair_sort(pairs)

    return pairs
Ejemplo n.º 19
0
def threshold_perp_baseline(date12_list,
                            date_list,
                            pbase_list,
                            pbase_max,
                            pbase_min=0.0):
    '''Remove pairs/interoferogram out of [pbase_min, pbase_max]
    Inputs:
        date12_list : list of string for date12 in YYMMDD-YYMMDD format
        date_list   : list of string for date in YYMMDD/YYYYMMDD format, optional
        pbase_list  : list of float for perpendicular spatial baseline
        pbase_max   : float, maximum perpendicular baseline
        pbase_min   : float, minimum perpendicular baseline
    Output:
        date12_list_out : list of string for date12 in YYMMDD-YYMMDD format
    Example:
        date12_list = threshold_perp_baseline(date12_list, date_list, pbase_list, 500)
    '''
    if not date12_list: return []
    # Get date6_list
    if not date_list:
        m_dates = [date12.split('-')[0] for date12 in date12_list]
        s_dates = [date12.split('-')[1] for date12 in date12_list]
        date_list = sorted(ptime.yyyymmdd(list(set(m_dates + s_dates))))
        if not len(date_list) == len(pbase_list):
            print 'ERROR: number of existing dates is not equal to number of perp baseline!'
            print 'date list is needed for threshold filtering!'
            print 'skip filtering.'
            return date12_list
    date6_list = ptime.yymmdd(date_list)

    # Threshold
    date12_list_out = []
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        pbase = abs(pbase_list[idx1] - pbase_list[idx2])
        if pbase_min <= pbase <= pbase_max:
            date12_list_out.append(date12)
    return date12_list_out
Ejemplo n.º 20
0
def igram_pairs(igramFile):
    ## Read Igram file
    h5file = h5py.File(igramFile)
    k = h5file.keys()
    if "interferograms" in k:
        k[0] = "interferograms"
    elif "coherence" in k:
        k[0] = "coherence"
    if k[0] not in ["interferograms", "coherence", "wrapped"]:
        print "Only interferograms / coherence / wrapped are supported."
        sys.exit(1)

    dateList = ptime.date_list(igramFile)
    dateList6 = ptime.yymmdd(dateList)

    pairs = []
    igramList = h5file[k[0]].keys()
    for igram in igramList:
        date12 = h5file[k[0]][igram].attrs["DATE12"].split("-")
        pairs.append([dateList6.index(date12[0]), dateList6.index(date12[1])])

    return pairs
Ejemplo n.º 21
0
def read_igram_pairs(igramFile):
    ## Read Igram file
    h5file = h5py.File(igramFile,'r')
    k = h5file.keys()
    if 'interferograms' in k: k[0] = 'interferograms'
    elif 'coherence'    in k: k[0] = 'coherence'
    if k[0] not in  ['interferograms','coherence','wrapped']:
        print 'Only interferograms / coherence / wrapped are supported.';  sys.exit(1)

    dateList  = ptime.date_list(igramFile)
    dateList6 = ptime.yymmdd(dateList)

    pairs = []
    igramList=h5file[k[0]].keys()
    for igram in igramList:
        date12 = h5file[k[0]][igram].attrs['DATE12'].split('-')
        pairs.append([dateList6.index(date12[0]),dateList6.index(date12[1])])
    h5file.close()

    pairs = pair_sort(pairs)

    return pairs
Ejemplo n.º 22
0
def read_igram_pairs(igramFile):
    '''Read pairs index from hdf5 file'''
    ## Read Igram file
    h5file = h5py.File(igramFile,'r')
    k = h5file.keys()
    if 'interferograms' in k: k[0] = 'interferograms'
    elif 'coherence'    in k: k[0] = 'coherence'
    if k[0] not in  ['interferograms','coherence','wrapped']:
        print 'Only interferograms / coherence / wrapped are supported.';  sys.exit(1)

    dateList  = ptime.ifgram_date_list(igramFile)
    dateList6 = ptime.yymmdd(dateList)

    pairs = []
    igramList=h5file[k[0]].keys()
    for igram in igramList:
        date12 = h5file[k[0]][igram].attrs['DATE12'].split('-')
        pairs.append([dateList6.index(date12[0]),dateList6.index(date12[1])])
    h5file.close()

    pairs = pair_sort(pairs)

    return pairs
Ejemplo n.º 23
0
def main(argv):

    ## Global Variables
    global fontSize, lineWidth, markerColor, markerSize

    ## Default Values
    fontSize = 12
    lineWidth = 2
    markerColor = "orange"
    markerSize = 16
    saveFig = "no"
    dispFig = "yes"
    saveList = "no"

    if len(sys.argv) > 2:
        try:
            opts, args = getopt.getopt(argv, "h:b:f:s:w:l:m:c:o:", ["save", "nodisplay", "list"])
        except getopt.GetoptError:
            Usage()
            sys.exit(1)

        for opt, arg in opts:
            if opt in ("-h", "--help"):
                Usage()
                sys.exit()
            elif opt == "-b":
                baselineFile = arg
            elif opt == "-f":
                igramsFile = arg
            elif opt == "-l":
                listFile = arg
            elif opt == "-s":
                fontSize = int(arg)
            elif opt == "-w":
                lineWidth = int(arg)
            elif opt == "-m":
                markerSize = int(arg)
            elif opt == "-c":
                markerColor = arg
            # elif opt == '-o':        figName2  = arg;   saveFig = 'yes'
            elif opt == "--save":
                saveFig = "yes"
            elif opt == "--nodisplay":
                dispFig = "no"
                saveFig = "yes"
            elif opt == "--list":
                saveList = "yes"

        try:
            igramsFile
        except:
            try:
                baselineFile
            except:
                Usage()
                sys.exit(1)

    elif len(sys.argv) == 2:
        igramsFile = argv[0]
    else:
        Usage()
        sys.exit(1)

    ##### Output figure name
    figName1 = "BperpHist.pdf"
    figName2 = "Network.pdf"
    try:
        igramsFile
        if "Modified" in igramsFile:
            figName1 = "BperpHist_Modified.pdf"
            figName2 = "Network_Modified.pdf"
    except:
        pass

    ############# Read Time and Bperp ################
    print "\n******************** Plot Network **********************"
    try:
        igramsFile
        atr = readfile.read_attributes(igramsFile)
        k = atr["FILE_TYPE"]
        if k not in ["interferograms", "coherence", "wrapped"]:
            print "Only interferograms / coherence / wrapped are supported."
            sys.exit(1)

        print "reading date and perpendicular baseline from " + k
        dateList = ptime.date_list(igramsFile)
        dateList6 = ptime.yymmdd(dateList)
        print "number of acquisitions: " + str(len(dateList))
        Bp = ut.Baseline_timeseries(igramsFile)
    except:
        baselineFile
        print "reading date and perpendicular baseline from " + baselineFile
        dateList, Bp = pnet.read_baseline_file(baselineFile)[0:2]
        dateList6 = ptime.yymmdd(dateList)

    ############# Read Pairs Info ####################
    print "reading pairs info"
    try:
        listFile
        pairs = pnet.read_pairs_list(listFile, dateList)
    except:
        pairs = pnet.read_igram_pairs(igramsFile)
    print "number of pairs       : " + str(len(pairs))

    if saveList == "yes":
        pnet.write_pairs_list(pairs, dateList, "Pairs.list")
        print "save pairs info to Pairs.list"

    ############# Read Unwrapping Error Info #######################
    ## For simulated interferograms only
    ## To plot the interferograms with unwrapping errors with a different color
    # N_unw_err=0
    # try:
    #  for ifgram in  ifgramList:
    #    if h5file[k[0]][ifgram].attrs['unwrap_error']=='yes':
    #       N_unw_err=N_unw_err+1
    # except: pass
    #
    # if N_unw_err>0:
    #   pairs_ue=np.zeros([N_unw_err,2],np.int)
    #   i=0
    #   for ifgram in  ifgramList:
    #     if h5file[k[0]][ifgram].attrs['unwrap_error']=='yes':
    #       date1,date2 = h5file[k[0]][ifgram].attrs['DATE12'].split('-')
    #       pairs_ue[i][0]=dateList6.index(date1)
    #       pairs_ue[i][1]=dateList6.index(date2)
    #       i=i+1
    #

    ############### Fig 1 - Interferogram Network ##################
    fig1 = plt.figure(1)
    fig1 = plot_network(fig1, pairs, dateList, Bp)

    if saveFig == "yes":
        plt.savefig(figName2, bbox_inches="tight")
        print "save figure to " + figName2

    ############## Fig 2 - Baseline History ###################
    fig2 = plt.figure(2)
    fig2 = plot_bperp_hist(fig2, dateList, Bp)

    if saveFig == "yes":
        plt.savefig(figName1, bbox_inches="tight")
        print "save figure to " + figName1

    if dispFig == "yes":
        plt.show()
Ejemplo n.º 24
0
def plot_network(ax, date12_list, date_list, pbase_list, plot_dict={}, date12_list_drop=[]):
    '''Plot Temporal-Perp baseline Network
    Inputs
        ax : matplotlib axes object
        date12_list : list of string for date12 in YYMMDD-YYMMDD format
        date_list   : list of string, for date in YYYYMMDD/YYMMDD format
        pbase_list  : list of float, perp baseline, len=number of acquisition
        plot_dict   : dictionary with the following items:
                      fontsize
                      linewidth
                      markercolor
                      markersize

                      coherence_list : list of float, coherence value of each interferogram, len = number of ifgrams
                      coh_date12_list: list of date, corresponding to coherence_list
                      disp_min/max :  float, min/max range of the color display based on coherence_list
                      colormap : string, colormap name
                      coh_thres : float, coherence of where to cut the colormap for display
                      disp_title : bool, show figure title or not, default: True
    Output
        ax : matplotlib axes object
    '''
    
    # Figure Setting
    keyList = plot_dict.keys()
    if not 'fontsize'    in keyList:   plot_dict['fontsize']    = 12
    if not 'linewidth'   in keyList:   plot_dict['linewidth']   = 2
    if not 'markercolor' in keyList:   plot_dict['markercolor'] = 'orange'
    if not 'markersize'  in keyList:   plot_dict['markersize']  = 16
    # For colorful display of coherence
    if not 'coherence_list' in keyList:  plot_dict['coherence_list'] = None
    if not 'disp_min'       in keyList:  plot_dict['disp_min']       = 0.2
    if not 'disp_max'       in keyList:  plot_dict['disp_max']       = 1.0
    if not 'colormap'       in keyList:  plot_dict['colormap']       = 'RdBu'
    if not 'disp_title'     in keyList:  plot_dict['disp_title']     = True
    transparency = 0.7
    
    # Date Convert
    date8_list = ptime.yyyymmdd(date_list)
    date6_list = ptime.yymmdd(date8_list)
    dates, datevector = ptime.date_list2vector(date8_list)

    # Index of date12 used and dropped
    idx_date12_keep = range(len(date12_list))
    idx_date12_drop = []
    for i in date12_list_drop:
        idx = date12_list.index(i)
        idx_date12_keep.remove(idx)
        idx_date12_drop.append(idx)

    # Index of date used and dropped
    date12_list_keep = sorted(list(set(date12_list) - set(date12_list_drop)))
    m_dates = [i.split('-')[0] for i in date12_list_keep]
    s_dates = [i.split('-')[1] for i in date12_list_keep]
    date8_list_keep = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
    date8_list_drop = sorted(list(set(date8_list) - set(date8_list_keep)))

    idx_date_keep = range(len(date8_list))
    idx_date_drop = []
    for i in date8_list_drop:
        idx = date8_list.index(i)
        idx_date_keep.remove(idx)
        idx_date_drop.append(idx)

    # Ploting
    #ax=fig.add_subplot(111)
    ## Colorbar when conherence is colored
    if plot_dict['coherence_list']:
        data_min = min(plot_dict['coherence_list'])
        data_max = max(plot_dict['coherence_list'])
        # Normalize
        normalization = False
        if normalization:
            plot_dict['coherence_list'] = [(coh-data_min) / (data_min-data_min) for coh in plot_dict['coherence_list']]
            plot_dict['disp_min'] = data_min
            plot_dict['disp_max'] = data_max
        
        print 'showing coherence'
        print 'colormap: '+plot_dict['colormap']
        print 'display range: '+str([plot_dict['disp_min'], plot_dict['disp_max']])
        print 'data    range: '+str([data_min, data_max])

        # Use lower/upper part of colormap to emphasis dropped interferograms
        if 'coh_thres' not in plot_dict.keys() or not plot_dict['coh_thres']:
            # Find proper cut percentage so that all keep pairs are blue and drop pairs are red
            coh_list_keep = [plot_dict['coherence_list'][i] for i in idx_date12_keep]
            coh_list_drop = [plot_dict['coherence_list'][i] for i in idx_date12_drop]
            plot_dict['coh_thres'] = min(coh_list_keep)
            if coh_list_drop:
                plot_dict['coh_thres'] += max(coh_list_drop)
                plot_dict['coh_thres'] /= 2
            plot_dict['coh_thres'] = round(plot_dict['coh_thres'], -int(np.floor(np.log10(abs(plot_dict['coh_thres'])))))

        print 'color jump at '+str(plot_dict['coh_thres'])
        c1_num = (plot_dict['coh_thres'] - plot_dict['disp_min']) / (plot_dict['disp_max'] - plot_dict['disp_min'])
        c1_num = int(c1_num * 200)
        cmap = plt.get_cmap(plot_dict['colormap'])
        colors1 = cmap(np.linspace(0.0, 0.3, c1_num))
        colors2 = cmap(np.linspace(0.6, 1.0, 200 - c1_num))
        cmap = colors.LinearSegmentedColormap.from_list('truncate_RdBu', np.vstack((colors1, colors2)))

        divider = make_axes_locatable(ax)
        cax = divider.append_axes("right", "5%", pad="3%")
        norm = mpl.colors.Normalize(vmin=plot_dict['disp_min'], vmax=plot_dict['disp_max'])
        cbar = mpl.colorbar.ColorbarBase(cax, cmap=cmap, norm=norm)
        cbar.set_label('Spatial Coherence', fontsize=plot_dict['fontsize'])

    ## Dot - SAR Acquisition
    if idx_date_keep:
        x_list = [dates[i] for i in idx_date_keep]
        y_list = [pbase_list[i] for i in idx_date_keep]
        ax.plot(x_list, y_list, 'ko', alpha=0.7, ms=plot_dict['markersize'], mfc=plot_dict['markercolor'])
    if idx_date_drop:
        x_list = [dates[i] for i in idx_date_drop]
        y_list = [pbase_list[i] for i in idx_date_drop]
        ax.plot(x_list, y_list, 'ko', alpha=0.7, ms=plot_dict['markersize'], mfc='gray')

    ## Line - Pair/Interferogram        
    # interferograms kept
    for date12 in date12_list_keep:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        x = np.array([dates[idx1], dates[idx2]])
        y = np.array([pbase_list[idx1], pbase_list[idx2]])
        if plot_dict['coherence_list']:
            coh = plot_dict['coherence_list'][plot_dict['coh_date12_list'].index(date12)]
            coh_idx = (coh - plot_dict['disp_min']) / (plot_dict['disp_max'] - plot_dict['disp_min'])
            ax.plot(x, y, '-', lw=plot_dict['linewidth'], alpha=transparency, c=cmap(coh_idx)) 
        else:
            ax.plot(x, y, '-', lw=plot_dict['linewidth'], alpha=transparency, c='k')

    # interferograms dropped
    for date12 in date12_list_drop:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        x = np.array([dates[idx1], dates[idx2]])
        y = np.array([pbase_list[idx1], pbase_list[idx2]])
        if plot_dict['coherence_list']:
            coh_idx = (plot_dict['coherence_list'][date12_list.index(date12)] - plot_dict['disp_min']) /\
                      (plot_dict['disp_max'] - plot_dict['disp_min'])
            ax.plot(x, y, '--', lw=plot_dict['linewidth'], alpha=transparency, c=cmap(coh_idx)) 
        else:
            ax.plot(x, y, '--', lw=plot_dict['linewidth'], alpha=transparency, c='k')

    if plot_dict['disp_title']:
        ax.set_title('Interferogram Network', fontsize=plot_dict['fontsize'])
    # axis format
    ax = ptime.auto_adjust_xaxis_date(ax, datevector, plot_dict['fontsize'])[0]
    ax = auto_adjust_yaxis(ax, pbase_list, plot_dict['fontsize'])
    ax.set_xlabel('Time [years]',fontsize=plot_dict['fontsize'])
    ax.set_ylabel('Perp Baseline [m]',fontsize=plot_dict['fontsize'])

    return ax
Ejemplo n.º 25
0
def main(argv):
    ##### Read Inputs
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    date12_orig = pnet.get_date12_list(inps.file[0])
    #print '\n****************** Network Modification ********************'

    if (not inps.reference_file and not inps.template_file and\
        not inps.max_temp_baseline and not inps.max_perp_baseline and\
        not inps.drop_ifg_index and not inps.drop_date and \
        not inps.coherence_file):
        # Display network for manually modification when there is no other modification input.
        print 'No input found to remove interferogram, continue by display the network to select it manually ...'
        inps.disp_network = True

    # Update inps if template is input
    if inps.template_file:
        inps = update_inps_with_template(inps, inps.template_file)

    # Convert index : input to continous index list
    if inps.drop_ifg_index:
        ifg_index = list(inps.drop_ifg_index)
        inps.drop_ifg_index = []
        for index in ifg_index:
            index_temp = [int(i) for i in index.split(':')]
            index_temp.sort()
            if len(index_temp) == 2:
                for j in range(index_temp[0], index_temp[1] + 1):
                    inps.drop_ifg_index.append(j)
            elif len(index_temp) == 1:
                inps.drop_ifg_index.append(int(index))
            else:
                print 'Unrecoganized input: ' + index
        inps.drop_ifg_index = sorted(inps.drop_ifg_index)
        if max(inps.drop_ifg_index) > len(date12_orig):
            raise Exception('Input index out of range!\n'+\
                            'input index:'+str(inps.drop_ifg_index)+'\n'+\
                            'index range of file: '+str(len(date12_orig)))

    ##### Get date12_to_rmv
    date12_to_rmv = []

    # 1. Update date12_to_rmv from reference file
    if inps.reference_file:
        date12_to_keep = pnet.get_date12_list(inps.reference_file)
        print '----------------------------------------------------------------------------'
        print 'use reference pairs info from file: ' + inps.reference_file
        print 'number of interferograms in reference: ' + str(
            len(date12_to_keep))
        print 'date12 not in reference file:'
        for date12 in date12_orig:
            if date12 not in date12_to_keep:
                date12_to_rmv.append(date12)
                print date12

    # 2.1 Update date12_to_rmv from coherence file
    if inps.coherence_file:
        print '----------------------------------------------------------------------------'
        print 'use coherence-based network modification from coherence file: ' + inps.coherence_file
        # Calculate spatial average coherence
        if not inps.mask_file:
            mask = readfile.read(inps.mask_file)[0]
            print 'mask coherence with file: ' + inps.mask_file
        else:
            mask = None
        cohTextFile = os.path.splitext(
            inps.coherence_file)[0] + '_spatialAverage.list'
        if os.path.isfile(cohTextFile):
            print 'average coherence in space has been calculated before and store in file: ' + cohTextFile
            print 'read it directly, or delete it and re-run the script to re-calculate the list'
            cohTxt = np.loadtxt(cohTextFile, dtype=str)
            mean_coherence_list = [float(i) for i in cohTxt[:, 1]]
            coh_date12_list = [i for i in cohTxt[:, 0]]
        else:
            print 'calculating average coherence of each interferogram ...'
            mean_coherence_list = ut.spatial_average(inps.coherence_file,
                                                     mask,
                                                     saveList=True)
            coh_date12_list = pnet.get_date12_list(inps.coherence_file)
        print 'date12 with average coherence < ' + str(
            inps.min_coherence) + ': '
        for i in range(len(coh_date12_list)):
            if mean_coherence_list[i] < inps.min_coherence:
                date12 = coh_date12_list[i]
                date12_to_rmv.append(date12)
                print date12

    # 2.2 Update date12_to_rmv from perp baseline threshold
    if inps.max_perp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with perpendicular spatial baseline > ' + str(
            inps.max_perp_baseline) + ' meters'
        ifg_bperp_list = pnet.igram_perp_baseline_list(inps.file[0])
        for i in range(len(ifg_bperp_list)):
            if ifg_bperp_list[i] > inps.max_perp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print date12

    # 2.3 Update date12_to_rmv from temp baseline threshold
    if inps.max_temp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with temporal baseline > ' + str(
            inps.max_temp_baseline) + ' days'
        date8_list = ptime.igram_date_list(inps.file[0])
        date6_list = ptime.yymmdd(date8_list)
        tbase_list = ptime.date_list2tbase(date8_list)[0]
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            idx1 = date6_list.index(date1)
            idx2 = date6_list.index(date2)
            t_diff = tbase_list[idx2] - tbase_list[idx1]
            if t_diff > inps.max_temp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print date12

    # 2.4 Update date12_to_rmv from drop_ifg_index
    if inps.drop_ifg_index:
        print '----------------------------------------------------------------------------'
        print 'drop date12/pair with the following index number:'
        for index in inps.drop_ifg_index:
            date12 = date12_orig[index - 1]
            date12_to_rmv.append(date12)
            print str(index) + '    ' + date12

    # 2.5 Update date12_to_rmv from drop_date
    if inps.drop_date:
        inps.drop_date = ptime.yymmdd(inps.drop_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs including the following dates: \n' + str(
            inps.drop_date)
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            if (date1 in inps.drop_date) or (date2 in inps.drop_date):
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print date12

    # 3. Manually drop pairs
    if inps.disp_network:
        date12_click = manual_select_pairs_to_remove(inps.file[0])
        for date12 in list(date12_click):
            if date12 not in date12_orig:
                date12_click.remove(date12)
        print 'date12 selected to remove:'
        print date12_click
        date12_to_rmv += date12_click

    # 4. drop duplicate date12 and sort in order
    date12_to_rmv = list(set(date12_to_rmv))
    date12_to_rmv = sorted(date12_to_rmv)
    print '----------------------------------------------------------------------------'
    print 'number of interferograms to remove: ' + str(len(date12_to_rmv))
    print 'list   of interferograms to remove:'
    print date12_to_rmv

    if date12_to_rmv:
        ##### Update Input Files with date12_to_rmv
        Modified_CoherenceFile = 'Modified_coherence.h5'
        for File in inps.file:
            Modified_File = modify_file_date12_list(File, date12_to_rmv)

            k = readfile.read_attribute(File)['FILE_TYPE']
            # Update Mask File
            if k == 'interferograms':
                print 'update mask file for input ' + k + ' file based on ' + Modified_File
                outFile = 'Modified_Mask.h5'
                print 'writing >>> ' + outFile
                ut.nonzero_mask(Modified_File, outFile)
            elif k == 'coherence':
                print 'update average spatial coherence for input ' + k + ' file based on: ' + Modified_File
                outFile = 'Modified_average_spatial_coherence.h5'
                print 'writing >>> ' + outFile
                ut.temporal_average(Modified_File, outFile)
                Modified_CoherenceFile = Modified_File

        # Plot result
        if inps.plot:
            print '\nplot modified network and save to file.'
            plotCmd = 'plot_network.py ' + Modified_File + ' --coherence ' + Modified_CoherenceFile + ' --nodisplay'
            print plotCmd
            os.system(plotCmd)

        print 'Done.'
        return
    else:
        print 'No interferogram dropped, skip update.'
        return
Ejemplo n.º 26
0
def main(argv):

    ##### Default
    fontSize    = 12
    lineWidth   = 2
    markerColor = 'crimson'
    markerSize  = 16

    disp_fig  = 'no'
    save_fig  = 'yes'
    save_list = 'yes'

    ref_file  = 'reference_date.txt'
    drop_file = 'drop_date.txt'

    ##### Check Inputs
    if len(sys.argv)>3:
        try:
            opts, args = getopt.getopt(argv,'h:f:m:o:x:y:',['help','circle='])
        except getopt.GetoptError:
            print 'Error in reading input options!';  Usage() ; sys.exit(1)

        for opt,arg in opts:
            if opt in ("-h","--help"):    Usage() ; sys.exit()
            elif opt == '-f':  File      = arg
            elif opt == '-m':  maskFile  = arg
            elif opt == '-x':  xsub = [int(i) for i in arg.split(':')];  xsub.sort()
            elif opt == '-y':  ysub = [int(i) for i in arg.split(':')];  ysub.sort()
            elif opt == '--circle'   :  cir_par   = [i for i in arg.split(';')]
            #elif opt == '-o':  outName   = arg
            
    else:
        try:  File = argv[0]
        except: Usage(); sys.exit(1)
        try:  maskFile = argv[1]
        except: pass

    try:  atr  = readfile.read_attributes(File)
    except: Usage(); sys.exit(1)
    ext      = os.path.splitext(File)[1].lower()
    FileBase = os.path.basename(File).split(ext)[0]
    outNameBase = 'spatialMean_'+FileBase
    print '\n*************** Spatial Average ******************'

    ##### Input File Info
    k = atr['FILE_TYPE']
    print 'Input file is '+k
    width  = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])

    h5file = h5py.File(File)
    epochList = h5file[k].keys();
    epochList = sorted(epochList)
    epochNum  = len(epochList)
    print 'number of epoch: '+str(epochNum)
    dates,datevector = ptime.date_list2vector(epochList)

    ##### Mask Info
    try:
        Mask_orig,Matr = readfile.read(maskFile)
        print 'mask file: '+maskFile
        Masking = 'yes'
    except:
        print 'No mask. Use the whole area for ramp estimation.'
        Masking = 'no'
        Mask_orig=np.ones((length,width))
    Mask = np.zeros((length,width))
    Mask[:] = Mask_orig[:]

    ## Bounding Subset
    try:
        xsub
        ysub
        ysub,xsub = subset.check_subset_range(ysub,xsub,atr)
        Mask[ysub[0]:ysub[1],xsub[0]:xsub[1]] = Mask_orig[ysub[0]:ysub[1],xsub[0]:xsub[1]]*2
        #Mask[0:ysub[0],:]      = 0
        #Mask[ysub[1]:length,:] = 0
        #Mask[:,0:xsub[0]]      = 0
        #Mask[:,xsub[1]:width]  = 0
    except:
        Mask = Mask_orig*2
        print 'No subset input.'

    ## Circle Inputs
    try:
        cir_par
        for i in range(len(cir_par)):
            cir_idx = circle_index(atr,cir_par[i])
            Mask[cir_idx] = Mask_orig[cir_idx]
            print 'Circle '+str(i)+': '+cir_par[i]
    except: print 'No circle of interest input.'
    
    ## Mask output
    idx = Mask == 2
    idxNum = float(sum(sum(idx)))
    
    fig = plt.figure()
    plt.imshow(Mask,cmap='gray')
    plt.savefig(outNameBase+'_mask.png',bbox_inches='tight')
    print 'save mask to '+outNameBase+'_mask.png'
    #fig.clf()

    ##### Calculation
    meanList   = np.zeros(epochNum)
    pixPercent = np.zeros(epochNum)
    pixT = 0.7
    print 'calculating ...'
    print '  Date       Mean   Percentage'
    for i in range(epochNum):
        epoch = epochList[i]
        d      = h5file[k].get(epoch)[:]
        #d[Mask==0]  = np.nan
        
        meanList[i]   = np.nanmean(d[idx])
        pixPercent[i] = np.sum(d[idx] >= pixT)/idxNum
        
        print epoch+' :   %.2f    %.1f%%'%(meanList[i],pixPercent[i]*100)
    del d
    h5file.close()

    ##### Reference date - Max Value
    top3 = sorted(zip(meanList,epochList), reverse=True)[:3]
    print '------------ Top 3 Mean ------------------'
    print top3
    ## Write to txt file
    fref = open(ref_file,'w')
    fref.write(str(top3[0][1])+'\n')
    fref.close()
    print 'write optimal reference date to '+ref_file
    idxMean = meanList == np.nanmax(meanList)

    ##### Drop dates - mean threshold
    #meanT = 0.7
    #idxMean  = meanList < meanT
    #print '------------ Mean Value < '+str(meanT)+' --------'
    #print np.array(epochList)[idxMean]
    #print meanList[idxMean]

    ##### Drop dates - good pixel percentage
    pixNumT = 0.7
    print '------------ Good Pixel Percentage < %.0f%% -------'%(pixNumT*100)
    idxPix = pixPercent < pixNumT
    dropEpochList = np.array(epochList)[idxPix]
    print dropEpochList
    print pixPercent[idxPix]
    ## Write to txt file
    fdrop = open(drop_file,'w')
    for i in range(len(dropEpochList)):
        fdrop.write(str(dropEpochList[i])+'\n')
    fdrop.close()
    print 'write drop dates to '+drop_file
    print '-------------------------------------------'

    ##### Display
    fig = plt.figure(figsize=(12,12))
    ax  = fig.add_subplot(211)
    ax.plot(dates, meanList, '-ko', ms=markerSize, lw=lineWidth, alpha=0.7, mfc=markerColor)
    #ax.plot([dates[0],dates[-1]],[meanT,meanT], '--b', lw=lineWidth)
    #sc = ax.scatter(dates, np.tile(0.5,epochNum), c=meanList, s=22**2, alpha=0.3, vmin=0.0, vmax=1.0)
    #ax.scatter(np.array(dates)[idxMean], 0.5, c=meanList[idxMean], s=22**2, alpha=1.0, vmin=0.0, vmax=1.0)
    ax = ptime.adjust_xaxis_date(ax,datevector)
    ax.set_ylim(0,1)
    ax.set_title('Spatial Average Value', fontsize=fontSize)
    ax.set_xlabel('Time [years]',         fontsize=fontSize)
    #cbar = plt.colorbar(sc)
    #cbar.set_label('Spatial Mean of Normalized Sum Epochs')

    ax  = fig.add_subplot(212)
    ax.plot(dates, pixPercent, '-ko', ms=markerSize, lw=lineWidth, alpha=0.7, mfc=markerColor)
    ax.plot([dates[0],dates[-1]],[pixNumT,pixNumT], '--b', lw=lineWidth)
    ax = ptime.adjust_xaxis_date(ax,datevector)
    ax.set_ylim(0,1)
    ax.set_title('Percenrage of Pixels with Value > '+str(pixNumT), fontsize=fontSize)
    ax.set_xlabel('Time [years]',         fontsize=fontSize)
    vals = ax.get_yticks()
    ax.set_yticklabels(['{:3.0f}%'.format(i*100) for i in vals])

    if save_fig == 'yes':
        plt.savefig(outNameBase+'.png',bbox_inches='tight')
        print 'save figure to '+outNameBase+'.png'

    if disp_fig == 'yes':
        plt.show()

    ##### Output
    if save_list == 'yes':
        epochList6 = ptime.yymmdd(epochList)
        fl = open(outNameBase+'.txt','w')
        for i in range(epochNum):
            str_line = epochList6[i]+'    %.2f    %.2f\n'%(meanList[i],pixPercent[i])
            fl.write(str_line)
        fl.close()
        print 'write data to '+outNameBase+'.txt\n'
Ejemplo n.º 27
0
def plot_network(ax,
                 date12_list,
                 date_list,
                 pbase_list,
                 plot_dict={},
                 date12_list_drop=[]):
    '''Plot Temporal-Perp baseline Network
    Inputs
        ax : matplotlib axes object
        date12_list : list of string for date12 in YYMMDD-YYMMDD format
        date_list   : list of string, for date in YYYYMMDD/YYMMDD format
        pbase_list  : list of float, perp baseline, len=number of acquisition
        plot_dict   : dictionary with the following items:
                      fontsize
                      linewidth
                      markercolor
                      markersize

                      coherence_list : list of float, coherence value of each interferogram, len = number of ifgrams
                      disp_min/max :  float, min/max range of the color display based on coherence_list
                      colormap : string, colormap name
                      coh_thres : float, coherence of where to cut the colormap for display
                      disp_title : bool, show figure title or not, default: True
                      disp_drop: bool, show dropped interferograms or not, default: True
    Output
        ax : matplotlib axes object
    '''

    # Figure Setting
    keyList = plot_dict.keys()
    if not 'fontsize' in keyList: plot_dict['fontsize'] = 12
    if not 'linewidth' in keyList: plot_dict['linewidth'] = 2
    if not 'markercolor' in keyList: plot_dict['markercolor'] = 'orange'
    if not 'markersize' in keyList: plot_dict['markersize'] = 16
    # For colorful display of coherence
    if not 'coherence_list' in keyList: plot_dict['coherence_list'] = None
    if not 'disp_min' in keyList: plot_dict['disp_min'] = 0.2
    if not 'disp_max' in keyList: plot_dict['disp_max'] = 1.0
    if not 'colormap' in keyList: plot_dict['colormap'] = 'RdBu'
    if not 'disp_title' in keyList: plot_dict['disp_title'] = True
    if not 'coh_thres' in keyList: plot_dict['coh_thres'] = None
    if not 'disp_drop' in keyList: plot_dict['disp_drop'] = True
    coh_list = plot_dict['coherence_list']
    disp_min = plot_dict['disp_min']
    disp_max = plot_dict['disp_max']
    coh_thres = plot_dict['coh_thres']
    transparency = 0.7

    # Date Convert
    date8_list = ptime.yyyymmdd(sorted(date_list))
    date6_list = ptime.yymmdd(date8_list)
    dates, datevector = ptime.date_list2vector(date8_list)

    ## Keep/Drop - date12
    date12_list_keep = sorted(list(set(date12_list) - set(date12_list_drop)))
    idx_date12_keep = [date12_list.index(i) for i in date12_list_keep]
    idx_date12_drop = [date12_list.index(i) for i in date12_list_drop]
    if not date12_list_drop:
        plot_dict['disp_drop'] = False

    ## Keep/Drop - date
    m_dates = [i.split('-')[0] for i in date12_list_keep]
    s_dates = [i.split('-')[1] for i in date12_list_keep]
    date8_list_keep = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
    date8_list_drop = sorted(list(set(date8_list) - set(date8_list_keep)))
    idx_date_keep = [date8_list.index(i) for i in date8_list_keep]
    idx_date_drop = [date8_list.index(i) for i in date8_list_drop]

    # Ploting
    #ax=fig.add_subplot(111)
    ## Colorbar when conherence is colored
    if coh_list:
        data_min = min(coh_list)
        data_max = max(coh_list)
        # Normalize
        normalization = False
        if normalization:
            coh_list = [(coh - data_min) / (data_min - data_min)
                        for coh in coh_list]
            disp_min = data_min
            disp_max = data_max

        print 'showing coherence'
        print 'colormap: ' + plot_dict['colormap']
        print 'display range: ' + str([disp_min, disp_max])
        print 'data    range: ' + str([data_min, data_max])

        # Use lower/upper part of colormap to emphasis dropped interferograms
        if not coh_thres:
            # Find proper cut percentage so that all keep pairs are blue and drop pairs are red
            coh_list_keep = [coh_list[i] for i in idx_date12_keep]
            coh_list_drop = [coh_list[i] for i in idx_date12_drop]
            if coh_list_drop:
                coh_thres = max(coh_list_drop)
            else:
                coh_thres = min(coh_list_keep)

        if coh_thres < disp_min:
            print 'data range exceed orginal display range, set new display range to: [0.0, %f]' % (
                disp_max)
            disp_min = 0.0
        c1_num = np.ceil(200.0 * (coh_thres - disp_min) /
                         (disp_max - disp_min)).astype('int')
        coh_thres = c1_num / 200.0 * (disp_max - disp_min) + disp_min
        cmap = plt.get_cmap(plot_dict['colormap'])
        colors1 = cmap(np.linspace(0.0, 0.3, c1_num))
        colors2 = cmap(np.linspace(0.6, 1.0, 200 - c1_num))
        cmap = colors.LinearSegmentedColormap.from_list(
            'truncate_RdBu', np.vstack((colors1, colors2)))
        print 'color jump at ' + str(coh_thres)

        divider = make_axes_locatable(ax)
        cax = divider.append_axes("right", "5%", pad="3%")
        norm = mpl.colors.Normalize(vmin=disp_min, vmax=disp_max)
        cbar = mpl.colorbar.ColorbarBase(cax, cmap=cmap, norm=norm)
        cbar.set_label('Average Spatial Coherence',
                       fontsize=plot_dict['fontsize'])

    ## Dot - SAR Acquisition
    if idx_date_keep:
        x_list = [dates[i] for i in idx_date_keep]
        y_list = [pbase_list[i] for i in idx_date_keep]
        ax.plot(x_list,
                y_list,
                'ko',
                alpha=0.7,
                ms=plot_dict['markersize'],
                mfc=plot_dict['markercolor'])
    if idx_date_drop:
        x_list = [dates[i] for i in idx_date_drop]
        y_list = [pbase_list[i] for i in idx_date_drop]
        ax.plot(x_list,
                y_list,
                'ko',
                alpha=0.7,
                ms=plot_dict['markersize'],
                mfc='gray')

    ## Line - Pair/Interferogram
    # interferograms dropped
    if plot_dict['disp_drop']:
        for date12 in date12_list_drop:
            date1, date2 = date12.split('-')
            idx1 = date6_list.index(date1)
            idx2 = date6_list.index(date2)
            x = np.array([dates[idx1], dates[idx2]])
            y = np.array([pbase_list[idx1], pbase_list[idx2]])
            if coh_list:
                coh = coh_list[date12_list.index(date12)]
                coh_idx = (coh - disp_min) / (disp_max - disp_min)
                ax.plot(x,
                        y,
                        '--',
                        lw=plot_dict['linewidth'],
                        alpha=transparency,
                        c=cmap(coh_idx))
            else:
                ax.plot(x,
                        y,
                        '--',
                        lw=plot_dict['linewidth'],
                        alpha=transparency,
                        c='k')

    # interferograms kept
    for date12 in date12_list_keep:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        x = np.array([dates[idx1], dates[idx2]])
        y = np.array([pbase_list[idx1], pbase_list[idx2]])
        if coh_list:
            coh = coh_list[date12_list.index(date12)]
            coh_idx = (coh - disp_min) / (disp_max - disp_min)
            ax.plot(x,
                    y,
                    '-',
                    lw=plot_dict['linewidth'],
                    alpha=transparency,
                    c=cmap(coh_idx))
        else:
            ax.plot(x,
                    y,
                    '-',
                    lw=plot_dict['linewidth'],
                    alpha=transparency,
                    c='k')

    if plot_dict['disp_title']:
        ax.set_title('Interferogram Network', fontsize=plot_dict['fontsize'])

    # axis format
    ax = ptime.auto_adjust_xaxis_date(ax, datevector, plot_dict['fontsize'])[0]
    ax = auto_adjust_yaxis(ax, pbase_list, plot_dict['fontsize'])
    ax.set_xlabel('Time [years]', fontsize=plot_dict['fontsize'])
    ax.set_ylabel('Perp Baseline [m]', fontsize=plot_dict['fontsize'])

    # Legend
    if plot_dict['disp_drop']:
        solid_line = mlines.Line2D([], [],
                                   color='k',
                                   ls='solid',
                                   label='Interferograms')
        dash_line = mlines.Line2D([], [],
                                  color='k',
                                  ls='dashed',
                                  label='Interferograms dropped')
        ax.legend(handles=[solid_line, dash_line])

    return ax
Ejemplo n.º 28
0
def read_template2inps(template_file, inps=None):
    '''Read input template options into Namespace inps'''
    if not inps:
        inps = cmdLineParse()

    template = readfile.read_template(inps.template_file)
    key_list = template.keys()

    # Coherence-based network modification
    prefix = 'pysar.network.'

    key = prefix + 'coherenceBased'
    if key in key_list and template[key] in ['auto', 'yes']:
        inps.coherence_based = True

    key = prefix + 'keepMinSpanTree'
    if key in key_list and template[key] in ['no']:
        inps.keep_mst = False

    key = prefix + 'coherenceFile'
    if key in key_list:
        if template[key] == 'auto':
            inps.coherence_file = 'coherence.h5'
        else:
            inps.coherence_file = template[key]

    # find coherence file from input files if inps.coherence_file does not exists.
    if inps.coherence_based and not os.path.isfile(inps.coherence_file):
        k_list = [readfile.read_attribute(f)['FILE_TYPE'] for f in inps.file]
        try:
            coh_file_idx = k_list.index('coherence')
        except ValueError:
            print 'No coherence file found! Can not use coherence-based method without it.'
        inps.coherence_file = inps.file[coh_file_idx]

    key = prefix + 'minCoherence'
    if key in key_list:
        if template[key] == 'auto':
            inps.min_coherence = 0.7
        else:
            inps.min_coherence = float(template[key])

    key = prefix + 'maskFile'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            try:
                inps.mask_file = ut.get_file_list(['maskLand.h5',
                                                   'mask.h5'])[0]
            except:
                inps.mask_file = None
        elif value == 'no':
            inps.mask_file = None
        else:
            inps.mask_file = value

    key = prefix + 'maskAoi.yx'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.aoi_pix_box = None
        else:
            tmp = [i.strip() for i in value.split(',')]
            sub_y = sorted([int(i.strip()) for i in tmp[0].split(':')])
            sub_x = sorted([int(i.strip()) for i in tmp[1].split(':')])
            inps.aoi_pix_box = (sub_x[0], sub_y[0], sub_x[1], sub_y[1])

    key = prefix + 'maskAoi.lalo'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.aoi_geo_box = None
        else:
            tmp = [i.strip() for i in value.split(',')]
            sub_lat = sorted([float(i.strip()) for i in tmp[0].split(':')])
            sub_lon = sorted([float(i.strip()) for i in tmp[1].split(':')])
            inps.aoi_geo_box = (sub_lon[0], sub_lat[1], sub_lon[1], sub_lat[0])
            # Check lookup file
            if not inps.lookup_file:
                print 'Warning: no lookup table file found! Can not use ' + key + ' option without it.'
                print 'skip this option.'
                inps.aoi_pix_box = None

    ## Network Modification based on thresholds
    key = prefix + 'tempBaseMax'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.max_temp_baseline = float(value)

    key = prefix + 'perpBaseMax'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.max_perp_baseline = float(value)

    key = prefix + 'referenceFile'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.reference_file = None
        else:
            inps.reference_file = value

    key = prefix + 'excludeDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.exclude_date = [i for i in value.replace(',', ' ').split()]

    key = prefix + 'excludeIfgIndex'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.exclude_ifg_index = [
                i for i in value.replace(',', ' ').split()
            ]

    key = prefix + 'startDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.start_date = ptime.yymmdd(value)

    key = prefix + 'endDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.end_date = ptime.yymmdd(value)

    return inps
Ejemplo n.º 29
0
def main(argv):
    ##### Read Inputs
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    date12_orig = pnet.get_date12_list(inps.file[0])
    print 'input file(s) to be modified: ' + str(inps.file)
    print 'number of interferograms: ' + str(len(date12_orig))
    atr = readfile.read_attribute(inps.file[0])

    # Update inps if template is input
    if inps.template_file:
        inps = read_template2inps(inps.template_file, inps)

    if all(not i for i in [inps.reference_file, inps.max_temp_baseline, inps.max_perp_baseline,\
                           inps.exclude_ifg_index, inps.exclude_date, inps.coherence_based,\
                           inps.start_date, inps.end_date, inps.reset]):
        # Display network for manually modification when there is no other modification input.
        print 'No input option found to remove interferogram'
        if inps.template_file:
            print 'Keep all interferograms by enable --reset option'
            inps.reset = True
        else:
            print 'To manually modify network, please use --manual option '
            return

    if inps.reset:
        print '----------------------------------------------------------------------------'
        for file in inps.file:
            reset_pairs(file)
        mean_coh_txt_file = os.path.splitext(
            inps.coherence_file)[0] + '_spatialAverage.txt'
        if os.path.isfile(mean_coh_txt_file):
            rmCmd = 'rm ' + mean_coh_txt_file
            print rmCmd
            os.system(rmCmd)
        return

    # Convert index : input to continous index list
    if inps.exclude_ifg_index:
        ifg_index = list(inps.exclude_ifg_index)
        inps.exclude_ifg_index = []
        for index in ifg_index:
            index_temp = [int(i) for i in index.split(':')]
            index_temp.sort()
            if len(index_temp) == 2:
                for j in range(index_temp[0], index_temp[1] + 1):
                    inps.exclude_ifg_index.append(j)
            elif len(index_temp) == 1:
                inps.exclude_ifg_index.append(int(index))
            else:
                print 'Unrecoganized input: ' + index
        inps.exclude_ifg_index = sorted(inps.exclude_ifg_index)
        if max(inps.exclude_ifg_index) > len(date12_orig):
            raise Exception('Input index out of range!\n'+\
                            'input index:'+str(inps.exclude_ifg_index)+'\n'+\
                            'index range of file: '+str(len(date12_orig)))

    ##### Get date12_to_rmv
    date12_to_rmv = []

    # 1. Update date12_to_rmv from reference file
    if inps.reference_file:
        date12_to_keep = pnet.get_date12_list(inps.reference_file,
                                              check_drop_ifgram=True)
        print '----------------------------------------------------------------------------'
        print 'use reference pairs info from file: ' + inps.reference_file
        print 'number of interferograms in reference: ' + str(
            len(date12_to_keep))
        print 'date12 not in reference file:'
        date12_to_rmv_temp = []
        for date12 in date12_orig:
            if date12 not in date12_to_keep:
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.1 Update date12_to_rmv from coherence file
    if inps.coherence_based and os.path.isfile(inps.coherence_file):
        print '----------------------------------------------------------------------------'
        print 'use coherence-based network modification from coherence file: ' + inps.coherence_file
        # check mask AOI in lalo
        if inps.aoi_geo_box and inps.lookup_file:
            print 'input AOI in (lon0, lat1, lon1, lat0): ' + str(
                inps.aoi_geo_box)
            inps.aoi_pix_box = subset.bbox_geo2radar(inps.aoi_geo_box, atr,
                                                     inps.lookup_file)
        if inps.aoi_pix_box:
            # check mask AOI within the data coverage
            inps.aoi_pix_box = subset.check_box_within_data_coverage(
                inps.aoi_pix_box, atr)
            print 'input AOI in (x0,y0,x1,y1): ' + str(inps.aoi_pix_box)

        # Calculate spatial average coherence
        coh_list, coh_date12_list = ut.spatial_average(inps.coherence_file, inps.mask_file,\
                                                           inps.aoi_pix_box, saveList=True)

        # MST network
        if inps.keep_mst:
            print 'Get minimum spanning tree (MST) of interferograms with inverse of coherence.'
            print 'date12 with 1) average coherence < ' + str(
                inps.min_coherence) + ' AND 2) not in MST network: '
            mst_date12_list = pnet.threshold_coherence_based_mst(
                coh_date12_list, coh_list)
        else:
            print 'date12 with average coherence < ' + str(inps.min_coherence)
            mst_date12_list = []

        date12_to_rmv_temp = []
        for i in range(len(coh_date12_list)):
            date12 = coh_date12_list[i]
            if coh_list[
                    i] < inps.min_coherence and date12 not in mst_date12_list:
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.2 Update date12_to_rmv from temp baseline threshold
    if inps.max_temp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with temporal baseline > ' + str(
            inps.max_temp_baseline) + ' days'
        date8_list = ptime.ifgram_date_list(inps.file[0])
        date6_list = ptime.yymmdd(date8_list)
        tbase_list = ptime.date_list2tbase(date8_list)[0]
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            idx1 = date6_list.index(date1)
            idx2 = date6_list.index(date2)
            t_diff = tbase_list[idx2] - tbase_list[idx1]
            if t_diff > inps.max_temp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print 'number of pairs to drop: %d' % (len(date12_to_rmv_temp))
        print date12_to_rmv_temp

    # 2.3 Update date12_to_rmv from perp baseline threshold
    if inps.max_perp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with perpendicular spatial baseline > ' + str(
            inps.max_perp_baseline) + ' meters'
        ifg_bperp_list = pnet.igram_perp_baseline_list(inps.file[0])
        date12_to_rmv_temp = []
        for i in range(len(ifg_bperp_list)):
            if abs(ifg_bperp_list[i]) > inps.max_perp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print 'number of pairs to drop: %d' % (len(date12_to_rmv_temp))
        print date12_to_rmv_temp

    # 2.4 Update date12_to_rmv from exclude_ifg_index
    if inps.exclude_ifg_index:
        print '----------------------------------------------------------------------------'
        print 'drop date12/pair with the following index number:'
        for index in inps.exclude_ifg_index:
            date12 = date12_orig[index - 1]
            date12_to_rmv.append(date12)
            print str(index) + '    ' + date12

    # 2.5 Update date12_to_rmv from exclude_date
    if inps.exclude_date:
        inps.exclude_date = ptime.yymmdd(inps.exclude_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs including the following dates: \n' + str(
            inps.exclude_date)
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            if (date1 in inps.exclude_date) or (date2 in inps.exclude_date):
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.6 Update date12_to_rmv from start_date
    if inps.start_date:
        inps.start_date = ptime.yymmdd(inps.start_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with date earlier than start-date: ' + inps.start_date
        min_date = int(ptime.yyyymmdd(inps.start_date))
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) < min_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.7 Update date12_to_rmv from end_date
    if inps.end_date:
        inps.end_date = ptime.yymmdd(inps.end_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with date earlier than end-date: ' + inps.end_date
        max_date = int(ptime.yyyymmdd(inps.end_date))
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) > max_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 3. Manually drop pairs
    if inps.disp_network:
        date12_click = manual_select_pairs_to_remove(inps.file[0])
        for date12 in list(date12_click):
            if date12 not in date12_orig:
                date12_click.remove(date12)
        print 'date12 selected to remove:'
        print date12_click
        date12_to_rmv += date12_click

    # 4. drop duplicate date12 and sort in order
    date12_to_rmv = sorted(list(set(date12_to_rmv)))
    date12_keep = sorted(list(set(date12_orig) - set(date12_to_rmv)))
    print '----------------------------------------------------------------------------'
    print 'number of interferograms to remove: ' + str(len(date12_to_rmv))
    print 'number of interferograms kept     : ' + str(len(date12_keep))

    ##### Calculated date12_to_drop v.s. existing date12_to_drop
    # Get list of date12 of interferograms already been marked to drop
    k = readfile.read_attribute(inps.file[0])['FILE_TYPE']
    h5 = h5py.File(inps.file[0], 'r')
    ifgram_list_all = sorted(h5[k].keys())
    ifgram_list_keep = ut.check_drop_ifgram(h5, print_msg=False)
    ifgram_list_dropped = sorted(
        list(set(ifgram_list_all) - set(ifgram_list_keep)))
    date12_list_dropped = ptime.list_ifgram2date12(ifgram_list_dropped)
    h5.close()

    if date12_to_rmv == date12_list_dropped and inps.mark_attribute:
        print 'Calculated date12 to drop is the same as exsiting marked input file, skip update file attributes.'
        date12_to_rmv = []

    ##### Update date12 to drop
    if date12_to_rmv:
        ##### Update Input Files with date12_to_rmv
        Modified_CoherenceFile = 'Modified_coherence.h5'
        for File in inps.file:
            Modified_File = modify_file_date12_list(File, date12_to_rmv,
                                                    inps.mark_attribute)

            k = readfile.read_attribute(File)['FILE_TYPE']
            # Update Mask File
            if k == 'interferograms' and inps.update_aux:
                print 'update mask file for input ' + k + ' file based on ' + Modified_File
                inps.mask_file = 'mask.h5'
                print 'writing >>> ' + inps.mask_file
                ut.nonzero_mask(Modified_File, inps.mask_file)

            elif k == 'coherence' and inps.update_aux:
                inps.coherence_file = Modified_File
                print 'update average spatial coherence for input ' + k + ' file based on: ' + Modified_File
                outFile = 'averageSpatialCoherence.h5'
                print 'writing >>> ' + outFile
                ut.temporal_average(Modified_File, outFile)

                # Touch spatial average txt file of coherence if it's existed
                coh_spatialAverage_file = os.path.splitext(
                    Modified_File)[0] + '_spatialAverage.txt'
                if os.path.isfile(coh_spatialAverage_file):
                    touchCmd = 'touch ' + coh_spatialAverage_file
                    print touchCmd
                    os.system(touchCmd)

    # Plot result
    if inps.plot:
        print '\nplot modified network and save to file.'
        plotCmd = 'plot_network.py ' + inps.coherence_file + ' --coherence ' + inps.coherence_file + ' --nodisplay'
        if inps.template_file:
            plotCmd += ' --template ' + inps.template_file
        print plotCmd
        os.system(plotCmd)

    print 'Done.'
    return
Ejemplo n.º 30
0
def extract_attribute_interferogram(fname):
    '''Read/extract attributes for PySAR from Gamma .unw, .cor and .int file
    Inputs:
        fname : str, Gamma interferogram filename or path, i.e. /PopoSLT143TsxD/diff_filt_HDR_130118-130129_4rlks.unw
    Output:
        atr : dict, Attributes dictionary
    '''
    file_dir = os.path.dirname(fname)
    file_basename = os.path.basename(fname)

    rsc_file = fname + '.rsc'
    #if os.path.isfile(rsc_file):
    #    return rsc_file

    atr = {}
    atr['PROCESSOR'] = 'gamma'
    atr['INSAR_PROCESSOR'] = 'gamma'
    atr['FILE_TYPE'] = os.path.splitext(fname)[1]

    ## Get info: date12, num of loooks
    try:
        date12 = str(re.findall('\d{8}[-_]\d{8}', file_basename)[0])
    except:
        date12 = str(re.findall('\d{6}[-_]\d{6}', file_basename)[0])
    m_date, s_date = date12.replace('_', '-').split('-')
    atr['DATE12'] = ptime.yymmdd(m_date) + '-' + ptime.yymmdd(s_date)
    lks = os.path.splitext(file_basename.split(date12)[1])[0]

    ## Read .off and .par file
    off_file = file_dir + '/*' + date12 + lks + '.off'
    m_par_file = [
        file_dir + '/*' + m_date + lks + i for i in ['.amp.par', '.ramp.par']
    ]
    s_par_file = [
        file_dir + '/*' + s_date + lks + i for i in ['.amp.par', '.ramp.par']
    ]

    try:
        off_file = ut.get_file_list(off_file)[0]
    except:
        print '\nERROR: Can not find .off file, it supposed to be like: ' + off_file
    try:
        m_par_file = ut.get_file_list(m_par_file)[0]
    except:
        print '\nERROR: Can not find master date .par file, it supposed to be like: ' + m_par_file
    try:
        s_par_file = ut.get_file_list(s_par_file)[0]
    except:
        print '\nERROR: Can not find slave date .par file, it supposed to be like: ' + s_par_file

    #print 'read '+m_par_file
    #print 'read '+off_file
    par_dict = readfile.read_gamma_par(m_par_file)
    off_dict = readfile.read_gamma_par(off_file)

    #print 'convert Gamma attribute to ROI_PAC style'
    atr.update(par_dict)
    atr.update(off_dict)
    atr = readfile.attribute_gamma2roipac(atr)

    ## Perp Baseline Info
    #print 'extract baseline info from %s, %s and %s file' % (m_par_file, s_par_file, off_file)
    atr = get_perp_baseline(m_par_file, s_par_file, off_file, atr)

    ## LAT/LON_REF1/2/3/4
    #print 'extract LAT/LON_REF1/2/3/4 from '+m_par_file
    atr = get_lalo_ref(m_par_file, atr)

    ## Write to .rsc file
    #print 'writing >>> '+rsc_file
    try:
        atr_orig = readfile.read_roipac_rsc(rsc_file)
    except:
        atr_orig = None
    if atr_orig != atr:
        print 'merge %s, %s and %s into %s' % (os.path.basename(m_par_file), os.path.basename(s_par_file),\
                                               os.path.basename(off_file), os.path.basename(rsc_file))
        writefile.write_roipac_rsc(atr, rsc_file)

    return rsc_file
Ejemplo n.º 31
0
def plot_coherence_matrix(ax,
                          date12_list,
                          coherence_list,
                          date12_list_drop=[],
                          plot_dict={}):
    '''Plot Coherence Matrix of input network
    
    if date12_list_drop is not empty, plot KEPT pairs in the upper triangle and
                                           ALL  pairs in the lower triangle.
    '''
    # Figure Setting
    keyList = plot_dict.keys()
    if not 'fontsize' in keyList: plot_dict['fontsize'] = 12
    if not 'linewidth' in keyList: plot_dict['linewidth'] = 2
    if not 'markercolor' in keyList: plot_dict['markercolor'] = 'orange'
    if not 'markersize' in keyList: plot_dict['markersize'] = 16
    if not 'disp_title' in keyList: plot_dict['disp_title'] = True

    coh_mat = coherence_matrix(date12_list, coherence_list)

    if date12_list_drop:
        # Date Convert
        m_dates = [i.split('-')[0] for i in date12_list]
        s_dates = [i.split('-')[1] for i in date12_list]
        date6_list = ptime.yymmdd(sorted(list(set(m_dates + s_dates))))
        # Set dropped pairs' value to nan, in upper triangle only.
        for date12 in date12_list_drop:
            idx1, idx2 = [date6_list.index(i) for i in date12.split('-')]
            coh_mat[idx1, idx2] = np.nan

    im = ax.imshow(coh_mat,
                   cmap='jet',
                   vmin=0.0,
                   vmax=1.0,
                   interpolation='nearest')
    date_num = coh_mat.shape[0]
    if date_num < 30:
        tick_list = range(0, date_num, 5)
    else:
        tick_list = range(0, date_num, 10)
    ax.get_xaxis().set_ticks(tick_list)
    ax.get_yaxis().set_ticks(tick_list)
    ax.set_xlabel('Image Number', fontsize=plot_dict['fontsize'])
    ax.set_ylabel('Image Number', fontsize=plot_dict['fontsize'])

    if plot_dict['disp_title']:
        ax.set_title('Coherence Matrix')

    # Colorbar
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", "3%", pad="3%")
    cbar = plt.colorbar(im, cax=cax)
    cbar.set_label('Spatial Coherence', fontsize=plot_dict['fontsize'])

    # Legend
    if date12_list_drop:
        ax.plot([], [], label='Upper: used ifgrams')
        ax.plot([], [], label='Lower: all ifgrams')
        ax.legend(handlelength=0)

    return ax
Ejemplo n.º 32
0
def read_template2inps(templateFile, inps=None):
    '''Read network options from template file into Namespace variable inps'''
    template_dict = readfile.read_template(templateFile)
    if not template_dict:
        print 'Empty template: '+templateFile
        return None
    keyList = template_dict.keys()

    if not inps:
        inps = cmdLineParse([''])

    # Read network option regardless of prefix
    for key in keyList:
        if 'selectPairs.'    in key:   template_dict[key.split('selectPairs.')[1]]    = template_dict[key]
        if 'pysar.network.'  in key:   template_dict[key.split('pysar.network.')[1]]  = template_dict[key]
        if 'select.network.' in key:   template_dict[key.split('select.network.')[1]] = template_dict[key]
    keyList = template_dict.keys()
    for key, value in template_dict.iteritems():
        if value.lower() in ['off','false','n']:  template_dict[key] = 'no'
        if value.lower() in ['on', 'true', 'y']:  template_dict[key] = 'yes'

    # Update inps value if not existed
    if not inps.method:
        if   'selectMethod' in keyList:  inps.method = template_dict['selectMethod']
        elif 'method'       in keyList:  inps.method = template_dict['method']
        else: inps.method = 'all'

    if not inps.perp_base_max:
        if 'perpBaseMax'  in keyList:  inps.perp_base_max = float(template_dict['perpBaseMax'])
        else: inps.perp_base_max = 500.0

    if not inps.temp_base_max:
        if 'lengthDayMax'   in keyList:  inps.temp_base_max = float(template_dict['lengthDayMax'])
        elif 'tempBaseMax'  in keyList:  inps.temp_base_max = float(template_dict['tempBaseMax'])
        else: inps.temp_base_max = 1800.0

    if not inps.temp_base_min:
        if 'lengthDayMin'   in keyList:  inps.temp_base_min = float(template_dict['lengthDayMin'])
        elif 'tempBaseMin'  in keyList:  inps.temp_base_min = float(template_dict['tempBaseMin'])
        else: inps.temp_base_min = 0.0

    if 'seasonal'     in keyList and template_dict['seasonal'].lower()     == 'no': inps.keep_seasonal = False
    if 'keepSeasonal' in keyList and template_dict['keepSeasonal'].lower() == 'no': inps.keep_seasonal = False

    if not inps.dop_overlap_min:
        if 'DopOverlapThresh'   in keyList:  inps.dop_overlap_min = float(template_dict['DopOverlapThresh'])
        elif 'dopOverlapThresh' in keyList:  inps.dop_overlap_min = float(template_dict['dopOverlapThresh'])
        elif 'dopOverlapMin'    in keyList:  inps.dop_overlap_min = float(template_dict['dopOverlapMin'])
        else: inps.dop_overlap_min = 15.0

    if not inps.reference_file and 'referenceFile' in keyList:  inps.reference_file = template_dict['referenceFile']
    if not inps.increment_num:
        if 'incrementNum'  in keyList:  inps.increment_num  = int(template_dict['incrementNum'])
        else: inps.increment_num = 3

    if not inps.temp_perp_list:
        if 'dayPerpList'    in keyList:  inps.temp_perp_list = template_dict['dayPerpList']
        elif 'tempPerpList' in keyList:  inps.temp_perp_list = template_dict['tempPerpList']
        else: inps.temp_perp_list = '16,1600;32,800;48,600;64,200'
        inps.temp_perp_list = [[float(j) for j in i.split(',')] for i in inps.temp_perp_list.split(';')]

    if not inps.exclude_date and 'excludeDate' in keyList:
        ex_date_list = [i for i in template_dict['excludeDate'].split(',')]
        inps.exclude_date = ptime.yymmdd(ex_date_list)

    if not inps.start_date and 'startDate' in keyList:
        inps.start_date = ptime.yyyymmdd(template_dict['startDate'])
    if not inps.end_date and 'endDate' in keyList:
        inps.end_date = ptime.yyyymmdd(template_dict['endDate'])

    if not inps.m_date and 'masterDate' in keyList:
        inps.m_date = ptime.yymmdd(template_dict['masterDate'])

    return inps
Ejemplo n.º 33
0
def read_template2inps(templateFile, inps=None):
    '''Read network options from template file into Namespace variable inps'''
    if not inps:
        inps = cmdLineParse()

    ##Read template file
    template = readfile.read_template(templateFile)
    key_list = template.keys()
    if not template:
        print 'Empty template: ' + templateFile
        return None
    prefix = 'select.network.'

    ##Extra keys
    #extra_key_list = ['masterDate','startDate','endDate']
    #for extra_key in extra_key_list:
    #    if extra_key in key_list:
    #        template[prefix+extra_key] = template[extra_key]

    #Check option prefix
    for i in ['selectPairs.']:
        if any(i in key for key in key_list):
            print '\n+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++'
            print 'WARNING: un-supported option prefix detected: selectPairs.'
            print "         Use selectNetwork. instead"
            print '+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n'

    if all(prefix not in key for key in key_list):
        print '\n+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++'
        print 'ERROR: no valid input option deteced in template file!'
        print 'Check the template below for supported options:'
        print '+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n'
        print TEMPLATE
        sys.exit(-1)

    ##Read template dict into inps namespace
    key = prefix + 'method'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.method = 'all'
        else:
            inps.method = value

    key = prefix + 'referenceFile'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.reference_file = None
        else:
            inps.reference_file = value

    key = prefix + 'perpBaseMax'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.perp_base_max = 500.0
        elif value == 'no':
            inps.perp_base_max = 1e5
        else:
            inps.perp_base_max = float(value)

    key = prefix + 'tempBaseMax'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.temp_base_max = 1800.0
        elif value == 'no':
            inps.temp_base_max = 3.65e5
        else:
            inps.temp_base_max = float(value)

    key = prefix + 'tempBaseMin'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.temp_base_min = 0.0
        else:
            inps.temp_base_min = float(value)

    key = prefix + 'keepSeasonal'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.keep_seasonal = False
        else:
            inps.keep_seasonal = True

    key = prefix + 'dopOverlapMin'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.dop_overlap_min = 15.0
        elif value == 'no':
            inps.dop_overlap_min = 0.0
        else:
            inps.dop_overlap_min = float(value)

    key = 'PLATFORM'
    if key in key_list and not inps.sensor:
        inps.sensor = template[key]

    key = 'COH_COLOR_JUMP'
    if key in key_list:
        inps.coh_thres = float(template[key])

    key = prefix + 'masterDate'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.m_date = None
        else:
            inps.m_date = ptime.yymmdd(value)

    key = prefix + 'startDate'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.start_date = None
        else:
            inps.start_date = ptime.yyyymmdd(value)

    key = prefix + 'endDate'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.end_date = None
        else:
            inps.end_date = ptime.yyyymmdd(value)

    key = prefix + 'excludeDate'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.exclude_date = []
        else:
            inps.exclude_date = ptime.yyyymmdd([i for i in value.split(',')])

    key = prefix + 'incrementNum'
    if key in key_list:
        value = template[key]
        if value in ['auto']:
            inps.increment_num = 3
        else:
            inps.increment_num = int(value)

    key = prefix + 'tempPerpList'
    if key in key_list:
        value = template[key]
        if value in ['auto']:
            inps.temp_perp_list = '16,1600;32,800;48,600;64,200'
        else:
            inps.temp_perp_list = value
    if isinstance(inps.temp_perp_list, basestring):
        inps.temp_perp_list = [[float(j) for j in i.split(',')]
                               for i in inps.temp_perp_list.split(';')]

    return inps
Ejemplo n.º 34
0
def main(argv):

    # Read inputs
    inps = cmdLineParse()
    inps = read_template2inps(inps.template_file, inps)
    log(os.path.basename(sys.argv[0]) + ' ' + inps.template_file)

    project_name = os.path.splitext(os.path.basename(inps.template_file))[0]
    print 'project name: ' + project_name
    if not inps.sensor:
        inps.sensor = project_name2sensor(project_name)

    # Auto path setting for Miami user
    if not inps.baseline_file and pysar.miami_path and 'SCRATCHDIR' in os.environ:
        if pysar.miami_path and 'SCRATCHDIR' in os.environ:
            try:
                inps.baseline_file = glob.glob(
                    os.getenv('SCRATCHDIR') + '/' + project_name +
                    '/SLC/bl_list.txt')[0]
            except:
                inps.baseline_file = None

    # Pair selection from reference
    if inps.reference_file:
        print 'Use pairs info from reference file: ' + inps.reference_file
        date12_list = pnet.get_date12_list(inps.reference_file)
        date12_list = [i.replace('_', '-') for i in date12_list]

        if inps.baseline_file:
            date8_list, pbase_list, dop_list = pnet.read_baseline_file(
                inps.baseline_file)[0:3]
            date6_list = ptime.yymmdd(date8_list)
            tbase_list = ptime.date_list2tbase(date8_list)[0]

    # Pair selection from temp/perp/dop baseline info
    else:
        if not inps.baseline_file:
            raise Exception('ERROR: No baseline file found!')

        # Check start/end/exclude date
        date8_list = pnet.read_baseline_file(inps.baseline_file)[0]
        inps.exclude_date = ptime.yyyymmdd(inps.exclude_date)
        if not inps.exclude_date:
            inps.exclude_date = []
        else:
            print 'input exclude dates: ' + str(inps.exclude_date)
        if inps.start_date:
            print 'input start date: ' + inps.start_date
            inps.exclude_date += [
                i for i in date8_list
                if float(i) < float(ptime.yyyymmdd(inps.start_date))
            ]
            inps.exclude_date = sorted(inps.exclude_date)
        if inps.end_date:
            print 'input end   date: ' + inps.end_date
            inps.exclude_date += [
                i for i in date8_list
                if float(i) > float(ptime.yyyymmdd(inps.end_date))
            ]
            inps.exclude_date = sorted(inps.exclude_date)
        if inps.exclude_date:
            print 'exclude    dates: '
            print inps.exclude_date

        # Read baseline list file: bl_list.txt
        inps.exclude_date = ptime.yymmdd(inps.exclude_date)
        date8_list, pbase_list, dop_list = pnet.read_baseline_file(
            inps.baseline_file, inps.exclude_date)[0:3]
        date6_list = ptime.yymmdd(date8_list)
        tbase_list = ptime.date_list2tbase(date8_list)[0]

        # Initial network using input methods
        inps.method = inps.method.lower().replace('-', '_')
        if inps.method in ['star', 'ps']: inps.method = 'star'
        elif inps.method.startswith('seq'): inps.method = 'sequential'
        elif inps.method.startswith('hierar'): inps.method = 'hierarchical'
        elif inps.method in [
                'mst', 'min_spanning_tree', 'minimum_spanning_tree'
        ]:
            inps.method = 'mst'
        print 'select method: ' + inps.method

        if inps.method == 'all':
            date12_list = pnet.select_pairs_all(date6_list)
        elif inps.method == 'delaunay':
            date12_list = pnet.select_pairs_delaunay(date6_list, pbase_list,
                                                     inps.norm)
        elif inps.method == 'star':
            date12_list = pnet.select_pairs_star(date6_list)
        elif inps.method == 'sequential':
            date12_list = pnet.select_pairs_sequential(date6_list,
                                                       inps.increment_num)
        elif inps.method == 'hierarchical':
            date12_list = pnet.select_pairs_hierarchical(
                date6_list, pbase_list, inps.temp_perp_list)
        elif inps.method == 'mst':
            date12_list = pnet.select_pairs_mst(date6_list, pbase_list)
        else:
            raise Exception('Unrecoganized select method: ' + inps.method)
        print 'initial number of interferograms: ' + str(len(date12_list))

        # Filter pairs (optional) using temp/perp/doppler baseline threshold
        if inps.method in ['star', 'hierarchical', 'mst']:
            inps.threshold = False
        if inps.threshold:
            # Temporal baseline
            date12_list = pnet.threshold_temporal_baseline(date12_list, inps.temp_base_max,\
                                                           inps.keep_seasonal, inps.temp_base_min)
            print 'number of interferograms after filtering of <%d, %d> days in temporal baseline: %d'\
                  % (inps.temp_base_min, inps.temp_base_max, len(date12_list))
            if inps.keep_seasonal:
                print '\tkeep seasonal pairs, i.e. pairs with temporal baseline == N*years +/- one month'

            # Perpendicular spatial baseline
            date12_list = pnet.threshold_perp_baseline(date12_list, date6_list,
                                                       pbase_list,
                                                       inps.perp_base_max)
            print 'number of interferograms after filtering of max %d meters in perpendicular baseline: %d'\
                  % (inps.perp_base_max, len(date12_list))

            # Doppler Overlap Percentage
            if inps.sensor:
                bandwidth_az = pnet.azimuth_bandwidth(inps.sensor)
                date12_list = pnet.threshold_doppler_overlap(date12_list, date6_list, dop_list,\
                                                             bandwidth_az, inps.dop_overlap_min/100.0)
                print 'number of interferograms after filtering of min '+str(inps.dop_overlap_min)+'%'+\
                      ' overlap in azimuth Doppler frequency: '+str(len(date12_list))

    # Write ifgram_list.txt
    if not date12_list:
        print 'WARNING: No interferogram selected!'
        return None

    # date12_list to date_list
    m_dates = [
        date12.replace('_', '-').split('-')[0] for date12 in date12_list
    ]
    s_dates = [
        date12.replace('_', '-').split('-')[1] for date12 in date12_list
    ]
    try:
        print 'number of acquisitions   input   : ' + str(len(date6_list))
    except:
        pass
    print 'number of acquisitions   selected: ' + str(
        len(list(set(m_dates + s_dates))))
    print 'number of interferograms selected: ' + str(len(date12_list))

    # Output directory/filename
    if not inps.outfile:
        if pysar.miami_path and 'SCRATCHDIR' in os.environ:
            inps.out_dir = os.getenv(
                'SCRATCHDIR') + '/' + project_name + '/PROCESS'
        else:
            try:
                inps.out_dir = os.path.dirname(
                    os.path.abspath(inps.reference_file))
            except:
                inps.out_dir = os.path.dirname(
                    os.path.abspath(inps.baseline_file))
        inps.outfile = inps.out_dir + '/ifgram_list.txt'
    inps.outfile = os.path.abspath(inps.outfile)
    inps.out_dir = os.path.dirname(inps.outfile)
    if not os.path.isdir(inps.out_dir):
        os.makedirs(inps.out_dir)

    print 'writing >>> ' + inps.outfile
    if not inps.baseline_file:
        np.savetxt(inps.outfile, date12_list, fmt='%s')
        return inps.outfile

    ## Calculate Bperp, Btemp and predicted coherence
    ifgram_num = len(date12_list)
    ifgram_pbase_list = []
    ifgram_tbase_list = []

    for i in range(ifgram_num):
        m_date, s_date = date12_list[i].split('-')
        m_idx = date6_list.index(m_date)
        s_idx = date6_list.index(s_date)
        pbase = pbase_list[s_idx] - pbase_list[m_idx]
        tbase = tbase_list[s_idx] - tbase_list[m_idx]
        ifgram_pbase_list.append(pbase)
        ifgram_tbase_list.append(tbase)

    try:
        inps.coherence_list = pnet.simulate_coherence(
            date12_list, inps.baseline_file,
            sensor=inps.sensor).flatten().tolist()
        inps.cbar_label = 'Simulated coherence'
    except:
        inps.coherence_list = None

    ##### Write txt file
    fl = open(inps.outfile, 'w')
    fl.write('#Interferograms configuration generated by select_network.py\n')
    fl.write('#   Date12      Btemp(days)    Bperp(m)    sim_coherence\n')
    for i in range(len(date12_list)):
        line = '%s   %6.0f         %6.1f' % (
            date12_list[i], ifgram_tbase_list[i], ifgram_pbase_list[i])
        if inps.coherence_list:
            line += '       %1.4f' % (inps.coherence_list[i])
        fl.write(line + '\n')
    fl.close()

    ##### Plot network info
    if not inps.disp_fig:
        plt.switch_backend('Agg')

    out_fig_name = 'BperpHistory.pdf'
    print 'plotting baseline history in temp/perp baseline domain to file: ' + out_fig_name
    fig2, ax2 = plt.subplots()
    ax2 = pnet.plot_perp_baseline_hist(ax2, date8_list, pbase_list)
    plt.savefig(inps.out_dir + '/' + out_fig_name, bbox_inches='tight')

    out_fig_name = 'Network.pdf'
    print 'plotting network / pairs  in temp/perp baseline domain to file: ' + out_fig_name
    fig1, ax1 = plt.subplots()
    ax1 = pnet.plot_network(ax1,
                            date12_list,
                            date8_list,
                            pbase_list,
                            plot_dict=vars(inps),
                            print_msg=False)
    plt.savefig(inps.out_dir + '/' + out_fig_name, bbox_inches='tight')

    out_fig_name = 'CoherenceMatrix.pdf'
    if inps.coherence_list:
        print 'plotting predicted coherence matrix to file: ' + out_fig_name
        fig3, ax3 = plt.subplots()
        ax3 = pnet.plot_coherence_matrix(ax3,
                                         date12_list,
                                         inps.coherence_list,
                                         plot_dict=vars(inps))
        plt.savefig(inps.out_dir + '/' + out_fig_name, bbox_inches='tight')

    if inps.disp_fig:
        plt.show()

    return inps.outfile
Ejemplo n.º 35
0
def main(argv):

    ##### Default
    fontSize    = 12
    lineWidth   = 2
    markerColor = 'crimson'
    markerSize  = 16

    disp_fig  = 'no'
    save_fig  = 'yes'
    save_list = 'yes'

    ref_file  = 'reference_date.txt'
    drop_file = 'drop_date.txt'

    ##### Check Inputs
    if len(sys.argv)>3:
        try:
            opts, args = getopt.getopt(argv,'h:f:m:o:x:y:',['help','circle='])
        except getopt.GetoptError:
            print 'Error in reading input options!';  usage() ; sys.exit(1)

        for opt,arg in opts:
            if opt in ("-h","--help"):    usage() ; sys.exit()
            elif opt == '-f':  File      = arg
            elif opt == '-m':  maskFile  = arg
            elif opt == '-x':  xsub = [int(i) for i in arg.split(':')];  xsub.sort()
            elif opt == '-y':  ysub = [int(i) for i in arg.split(':')];  ysub.sort()
            elif opt == '--circle'   :  cir_par   = [i for i in arg.split(';')]
            #elif opt == '-o':  outName   = arg
            
    else:
        try:  File = argv[0]
        except: usage(); sys.exit(1)
        try:  maskFile = argv[1]
        except: pass

    try:  atr  = readfile.read_attribute(File)
    except: usage(); sys.exit(1)
    ext      = os.path.splitext(File)[1].lower()
    FileBase = os.path.basename(File).split(ext)[0]
    outNameBase = 'spatialMean_'+FileBase
    print '\n*************** Spatial Average ******************'

    ##### Input File Info
    k = atr['FILE_TYPE']
    print 'Input file is '+k
    width  = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])

    h5file = h5py.File(File)
    epochList = h5file[k].keys()
    epochList = sorted(epochList)
    epochNum  = len(epochList)
    print 'number of epoch: '+str(epochNum)
    if 
    dates,datevector = ptime.date_list2vector(epochList)

    ##### Mask Info
    try:
        Mask_orig,Matr = readfile.read(maskFile)
        print 'mask file: '+maskFile
        Masking = 'yes'
    except:
        print 'No mask. Use the whole area for ramp estimation.'
        Masking = 'no'
        Mask_orig=np.ones((length,width))
    Mask = np.zeros((length,width))
    Mask[:] = Mask_orig[:]

    ## Bounding Subset
    try:
        xsub
        ysub
        ysub,xsub = subset.check_subset_range(ysub,xsub,atr)
        Mask[ysub[0]:ysub[1],xsub[0]:xsub[1]] = Mask_orig[ysub[0]:ysub[1],xsub[0]:xsub[1]]*2
        #Mask[0:ysub[0],:]      = 0
        #Mask[ysub[1]:length,:] = 0
        #Mask[:,0:xsub[0]]      = 0
        #Mask[:,xsub[1]:width]  = 0
    except:
        Mask = Mask_orig*2
        print 'No subset input.'

    ## Circle Inputs
    try:
        cir_par
        for i in range(len(cir_par)):
            cir_idx = circle_index(atr,cir_par[i])
            Mask[cir_idx] = Mask_orig[cir_idx]
            print 'Circle '+str(i)+': '+cir_par[i]
    except: print 'No circle of interest input.'
    
    ## Mask output
    idx = Mask == 2
    idxNum = float(sum(sum(idx)))
    
    fig = plt.figure()
    plt.imshow(Mask,cmap='gray')
    plt.savefig(outNameBase+'_mask.png',bbox_inches='tight')
    print 'save mask to '+outNameBase+'_mask.png'
    #fig.clf()

    ##### Calculation
    meanList   = np.zeros(epochNum)
    pixPercent = np.zeros(epochNum)
    pixT = 0.7
    print 'calculating ...'
    print '  Date       Mean   Percentage'
    for i in range(epochNum):
        epoch = epochList[i]
        d      = h5file[k].get(epoch)[:]
        #d[Mask==0]  = np.nan
        
        meanList[i]   = np.nanmean(d[idx])
        pixPercent[i] = np.sum(d[idx] >= pixT)/idxNum
        
        print epoch+' :   %.2f    %.1f%%'%(meanList[i],pixPercent[i]*100)
    del d
    h5file.close()

    ##### Reference date - Max Value
    top3 = sorted(zip(meanList,epochList), reverse=True)[:3]
    print '------------ Top 3 Mean ------------------'
    print top3
    ## Write to txt file
    fref = open(ref_file,'w')
    fref.write(str(top3[0][1])+'\n')
    fref.close()
    print 'write optimal reference date to '+ref_file
    idxMean = meanList == np.nanmax(meanList)

    ##### Drop dates - mean threshold
    #meanT = 0.7
    #idxMean  = meanList < meanT
    #print '------------ Mean Value < '+str(meanT)+' --------'
    #print np.array(epochList)[idxMean]
    #print meanList[idxMean]

    ##### Drop dates - good pixel percentage
    pixNumT = 0.7
    print '------------ Good Pixel Percentage < %.0f%% -------'%(pixNumT*100)
    idxPix = pixPercent < pixNumT
    dropEpochList = np.array(epochList)[idxPix]
    print dropEpochList
    print pixPercent[idxPix]
    ## Write to txt file
    fdrop = open(drop_file,'w')
    for i in range(len(dropEpochList)):
        fdrop.write(str(dropEpochList[i])+'\n')
    fdrop.close()
    print 'write drop dates to '+drop_file
    print '-------------------------------------------'

    ##### Display
    fig = plt.figure(figsize=(12,12))
    ax  = fig.add_subplot(211)
    ax.plot(dates, meanList, '-ko', ms=markerSize, lw=lineWidth, alpha=0.7, mfc=markerColor)
    #ax.plot([dates[0],dates[-1]],[meanT,meanT], '--b', lw=lineWidth)
    #sc = ax.scatter(dates, np.tile(0.5,epochNum), c=meanList, s=22**2, alpha=0.3, vmin=0.0, vmax=1.0)
    #ax.scatter(np.array(dates)[idxMean], 0.5, c=meanList[idxMean], s=22**2, alpha=1.0, vmin=0.0, vmax=1.0)
    ax = ptime.auto_adjust_xaxis_date(ax,datevector)
    ax.set_ylim(0,1)
    ax.set_title('Spatial Average Value', fontsize=fontSize)
    ax.set_xlabel('Time [years]',         fontsize=fontSize)
    #cbar = plt.colorbar(sc)
    #cbar.set_label('Spatial Mean of Normalized Sum Epochs')

    ax  = fig.add_subplot(212)
    ax.plot(dates, pixPercent, '-ko', ms=markerSize, lw=lineWidth, alpha=0.7, mfc=markerColor)
    ax.plot([dates[0],dates[-1]],[pixNumT,pixNumT], '--b', lw=lineWidth)
    ax = ptime.auto_adjust_xaxis_date(ax,datevector)
    ax.set_ylim(0,1)
    ax.set_title('Percenrage of Pixels with Value > '+str(pixNumT), fontsize=fontSize)
    ax.set_xlabel('Time [years]',         fontsize=fontSize)
    vals = ax.get_yticks()
    ax.set_yticklabels(['{:3.0f}%'.format(i*100) for i in vals])

    if save_fig == 'yes':
        plt.savefig(outNameBase+'.png',bbox_inches='tight')
        print 'save figure to '+outNameBase+'.png'

    if disp_fig == 'yes':
        plt.show()

    ##### Output
    if save_list == 'yes':
        epochList6 = ptime.yymmdd(epochList)
        fl = open(outNameBase+'.txt','w')
        for i in range(epochNum):
            str_line = epochList6[i]+'    %.2f    %.2f\n'%(meanList[i],pixPercent[i])
            fl.write(str_line)
        fl.close()
        print 'write data to '+outNameBase+'.txt\n'
Ejemplo n.º 36
0
def main(argv):
    inps = cmdLineParse()
    if not inps.disp_fig:
        plt.switch_backend('Agg')
    print '\n******************** Plot Network **********************'

    # Output figure name
    figName1 = 'BperpHist' + inps.fig_ext
    figName2 = 'Network' + inps.fig_ext
    if 'Modified' in inps.file:
        figName1 = 'BperpHist_Modified' + inps.fig_ext
        figName2 = 'Network_Modified' + inps.fig_ext

    ##### 1. Read Info
    # Read dateList and bperpList
    ext = os.path.splitext(inps.file)[1]
    if ext in ['.h5']:
        k = readfile.read_attribute(inps.file)['FILE_TYPE']
        print 'reading date and perpendicular baseline from ' + k + ' file: ' + os.path.basename(
            inps.file)
        if not k in multi_group_hdf5_file:
            print 'ERROR: only the following file type are supported:\n' + str(
                multi_group_hdf5_file)
            sys.exit(1)
        Bp = ut.Baseline_timeseries(inps.file)
        date8List = ptime.igram_date_list(inps.file)
        date6List = ptime.yymmdd(date8List)
    else:
        print 'reading date and perpendicular baseline from baseline list file: ' + inps.bl_list_file
        date8List, Bp = pnet.read_baseline_file(inps.bl_list_file)[0:2]
        date6List = ptime.yymmdd(date8List)
    print 'number of acquisitions: ' + str(len(date8List))

    # Read Pairs Info
    print 'reading pairs info from file: ' + inps.file
    date12_list = pnet.get_date12_list(inps.file)
    pairs_idx = pnet.date12_list2index(date12_list, date6List)
    print 'number of pairs       : ' + str(len(pairs_idx))

    # Read Coherence List
    inps.coherence_list = None
    if inps.coherence_file and os.path.isfile(inps.coherence_file):
        ext = os.path.splitext(inps.coherence_file)[1]
        if ext in ['.h5']:
            listFile = os.path.splitext(
                inps.coherence_file)[0] + '_spatialAverage.list'
            if os.path.isfile(listFile):
                print 'reading coherence value from existed ' + listFile
                fcoh = np.loadtxt(listFile, dtype=str)
                inps.coherence_list = [float(i) for i in fcoh[:, 1]]
                coh_date12_list = [i for i in fcoh[:, 0]]
            else:
                print 'calculating average coherence value from ' + inps.coherence_file
                inps.coherence_list = ut.spatial_average(inps.coherence_file,
                                                         saveList=True)
                coh_date12_list = pnet.get_date12_list(inps.coherence_file)
        else:
            print 'reading coherence value from ' + inps.coherence_file
            fcoh = np.loadtxt(inps.coherence_file, dtype=str)
            inps.coherence_list = [float(i) for i in fcoh[:, 1]]
            coh_date12_list = [i for i in fcoh[:, 0]]
        # Check length of coherence file and input file
        if not set(coh_date12_list) == set(date12_list):
            print 'WARNING: input coherence list has different pairs/date12 from input file'
            print 'turn off the color plotting of interferograms based on coherence'
            inps.coherence_list = None

    ##### 2. Plot
    # Fig 1 - Baseline History
    fig1 = plt.figure(1)
    ax1 = fig1.add_subplot(111)
    ax1 = pnet.plot_perp_baseline_hist(ax1, date8List, Bp, vars(inps))

    if inps.save_fig:
        fig1.savefig(figName1, bbox_inches='tight')
        print 'save figure to ' + figName1

    # Fig 2 - Interferogram Network
    fig2 = plt.figure(2)
    ax2 = fig2.add_subplot(111)
    ax2 = pnet.plot_network(ax2, pairs_idx, date8List, Bp, vars(inps))

    if inps.save_fig:
        fig2.savefig(figName2, bbox_inches='tight')
        print 'save figure to ' + figName2

    if inps.save_list:
        txtFile = os.path.splitext(inps.file)[0] + '_date12.list'
        np.savetxt(txtFile, date12_list, fmt='%s')
        print 'save pairs/date12 info to file: ' + txtFile

    if inps.disp_fig:
        plt.show()