Ejemplo n.º 1
0
def read_pairs_list(date12ListFile, dateList=[]):
    '''Read Pairs List file like below:
    070311-070426
    070311-070611
    ...
    '''
    # Read date12 list file
    date12List = sorted(list(np.loadtxt(date12ListFile, dtype=str)))

    # Get dateList from date12List
    if not dateList:
        dateList = []
        for date12 in date12List:
            dates = date12.split('-')
            if not dates[0] in dateList: dateList.append(dates[0])
            if not dates[1] in dateList: dateList.append(dates[1])
        dateList.sort()
    date6List = ptime.yymmdd(dateList)

    # Get pair index
    pairs_idx = []
    for date12 in date12List:
        dates = date12.split('-')
        pair_idx = [date6List.index(dates[0]), date6List.index(dates[1])]
        pairs_idx.append(pair_idx)

    return pairs_idx
Ejemplo n.º 2
0
def coherence_matrix(date12_list, coh_list):
    '''Return coherence matrix based on input date12 list and its coherence
    Inputs:
        date12_list - list of string in YYMMDD-YYMMDD format
        coh_list    - list of float, average coherence for each interferograms
    Output:
        coh_matrix  - 2D np.array with dimension length = date num
                      np.nan value for interferograms non-existed.
                      1.0 for diagonal elements
    '''
    # Get date list
    m_dates = [date12.split('-')[0] for date12 in date12_list]
    s_dates = [date12.split('-')[1] for date12 in date12_list]
    date6_list = ptime.yymmdd(
        sorted(ptime.yyyymmdd(list(set(m_dates + s_dates)))))
    date_num = len(date6_list)

    coh_mat = np.zeros([date_num, date_num])
    coh_mat[:] = np.nan
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        coh = coh_list[date12_list.index(date12)]
        coh_mat[idx1, idx2] = coh  #symmetric
        coh_mat[idx2, idx1] = coh

    #for i in range(date_num):    # diagonal value
    #    coh_mat[i, i] = 1.0

    return coh_mat
Ejemplo n.º 3
0
def threshold_coherence_based_mst(date12_list, coh_list):
    '''Return a minimum spanning tree of network based on the coherence inverse.
    Inputs:
        date12_list - list of string in YYMMDD-YYMMDD format
        coh_list    - list of float, average coherence for each interferogram
    Output:
        mst_date12_list - list of string in YYMMDD-YYMMDD format, for MST network of interferograms 
    '''
    # coh_list --> coh_mat --> weight_mat
    coh_mat = coherence_matrix(date12_list, coh_list)
    mask = ~np.isnan(coh_mat)
    wei_mat = np.zeros(coh_mat.shape)
    wei_mat[:] = np.inf
    wei_mat[mask] = 1 / coh_mat[mask]

    # MST path based on weight matrix
    wei_mat_csr = csr_matrix(wei_mat)
    mst_mat_csr = minimum_spanning_tree(wei_mat_csr)

    # Get date6_list
    m_dates = [date12.split('-')[0] for date12 in date12_list]
    s_dates = [date12.split('-')[1] for date12 in date12_list]
    date6_list = ptime.yymmdd(
        sorted(ptime.yyyymmdd(list(set(m_dates + s_dates)))))

    # Convert MST index matrix into date12 list
    [s_idx_list, m_idx_list] = [
        date_idx_array.tolist() for date_idx_array in find(mst_mat_csr)[0:2]
    ]
    mst_date12_list = []
    for i in range(len(m_idx_list)):
        idx = sorted([m_idx_list[i], s_idx_list[i]])
        date12 = date6_list[idx[0]] + '-' + date6_list[idx[1]]
        mst_date12_list.append(date12)
    return mst_date12_list
Ejemplo n.º 4
0
def select_pairs_sequential(date_list, increment_num=2):
    '''Select Pairs in a Sequential way:
        For each acquisition, find its increment_num nearest acquisitions in the past time.
    Inputs:
        date_list  : list of date in YYMMDD/YYYYMMDD format
    Reference:
        Fattahi, H., and F. Amelung (2013), DEM Error Correction in InSAR Time Series, IEEE TGRS, 51(7), 4249-4259.
    '''
    date8_list = sorted(ptime.yyyymmdd(date_list))
    date6_list = ptime.yymmdd(date8_list)
    date_idx_list = list(range(len(date6_list)))

    # Get pairs index list
    date12_idx_list = []
    for date_idx in date_idx_list:
        for i in range(increment_num):
            if date_idx - i - 1 >= 0:
                date12_idx_list.append([date_idx - i - 1, date_idx])
    date12_idx_list = [sorted(idx) for idx in sorted(date12_idx_list)]

    # Convert index into date12
    date12_list = [
        date6_list[idx[0]] + '-' + date6_list[idx[1]]
        for idx in date12_idx_list
    ]
    return date12_list
Ejemplo n.º 5
0
def manual_select_pairs_to_remove(File):
    '''Manually select interferograms to remove'''
    print(
        '----------------------------------------------------------------------------'
    )
    print('Manually select interferograms to remove')
    print(
        'Click two dates - points - in the figure to select one pair of interferogram'
    )
    print('repeat until you select all pairs you would like to remove')
    print('then close the figure to continue the program ...')
    print(
        '----------------------------------------------------------------------------'
    )
    # Display the network
    fig = plt.figure()
    ax = fig.add_subplot(111)

    date12_orig = pnet.get_date12_list(File)
    bperp_list = ut.perp_baseline_ifgram2timeseries(File)[0].tolist()
    date8_list = ptime.ifgram_date_list(File)
    ax = pnet.plot_network(ax, date12_orig, date8_list, bperp_list)
    print('display the network of interferogram of file: ' + File)

    date6_list = ptime.yymmdd(date8_list)
    dates_array = np.array(ptime.date_list2vector(date8_list)[0])
    dateNum_array = mdates.date2num(dates_array)
    bperp_array = np.array(bperp_list)

    date_click = []
    date12_click = []

    def onclick(event):
        xClick = event.xdata
        yClick = event.ydata
        idx = nearest_neighbor(xClick, yClick, dateNum_array, bperp_array)
        date6 = date6_list[idx]
        print('click at ' + date6)
        date_click.append(date6)
        if len(date_click) % 2 == 0 and date_click[-2] != date_click[-1]:
            [m_date, s_date] = sorted(date_click[-2:])
            m_idx = date6_list.index(m_date)
            s_idx = date6_list.index(s_date)
            date12 = m_date + '-' + s_date
            if date12 in date12_orig:
                print('select date12: ' + date12)
                date12_click.append(date12)
                ax.plot([dateNum_array[m_idx], dateNum_array[s_idx]],
                        [bperp_array[m_idx], bperp_array[s_idx]],
                        'r',
                        lw=4)
            else:
                print(date12 + ' is not existed in input file')
        plt.draw()

    cid = fig.canvas.mpl_connect('button_press_event', onclick)
    plt.show()
    return date12_click
Ejemplo n.º 6
0
def select_master_interferogram(date12_list,
                                date_list,
                                pbase_list,
                                m_date=None):
    '''Select reference interferogram based on input temp/perp baseline info
    If master_date is specified, select its closest slave_date; otherwise, choose the closest pair
    among all pairs as master interferogram.
    Example:
        master_date12 = pnet.select_master_ifgram(date12_list, date_list, pbase_list)
    '''
    pbase_array = np.array(pbase_list, dtype='float64')
    # Get temporal baseline
    date8_list = ptime.yyyymmdd(date_list)
    date6_list = ptime.yymmdd(date8_list)
    tbase_array = np.array(ptime.date_list2tbase(date8_list)[0],
                           dtype='float64')
    # Normalization (Pepe and Lanari, 2006, TGRS)
    temp2perp_scale = (max(pbase_array) - min(pbase_array)) / (
        max(tbase_array) - min(tbase_array))
    tbase_array *= temp2perp_scale

    # Calculate sqrt of temp/perp baseline for input pairs
    idx1 = np.array(
        [date6_list.index(date12.split('-')[0]) for date12 in date12_list])
    idx2 = np.array(
        [date6_list.index(date12.split('-')[1]) for date12 in date12_list])
    base_distance = np.sqrt((tbase_array[idx2] - tbase_array[idx1])**2 +
                            (pbase_array[idx2] - pbase_array[idx1])**2)

    # Get master interferogram index
    if not m_date:
        # Choose pair with shortest temp/perp baseline
        m_date12_idx = np.argmin(base_distance)
    else:
        m_date = ptime.yymmdd(m_date)
        # Choose pair contains m_date with shortest temp/perp baseline
        m_date12_idx_array = np.array([
            date12_list.index(date12) for date12 in date12_list
            if m_date in date12
        ])
        min_base_distance = np.min(base_distance[m_date12_idx_array])
        m_date12_idx = np.where(base_distance == min_base_distance)[0][0]

    m_date12 = date12_list[m_date12_idx]
    return m_date12
Ejemplo n.º 7
0
def write_pairs_list(pairs, dateList, outName):
    '''Write pairs list file.'''
    dateList6 = ptime.yymmdd(dateList)
    fl = open(outName, 'w')
    for idx in pairs:
        date12 = dateList6[idx[0]] + '-' + dateList6[idx[1]] + '\n'
        fl.write(date12)
    fl.close()
    return 1
Ejemplo n.º 8
0
def select_pairs_all(date_list):
    '''Select All Possible Pairs/Interferograms
    Input : date_list   - list of date in YYMMDD/YYYYMMDD format
    Output: date12_list - list date12 in YYMMDD-YYMMDD format
    Reference:
        Berardino, P., G. Fornaro, R. Lanari, and E. Sansosti (2002), A new algorithm for surface deformation monitoring
        based on small baseline differential SAR interferograms, IEEE TGRS, 40(11), 2375-2383.
    '''
    date8_list = sorted(ptime.yyyymmdd(date_list))
    date6_list = ptime.yymmdd(date8_list)
    date12_list = list(itertools.combinations(date6_list, 2))
    date12_list = [date12[0] + '-' + date12[1] for date12 in date12_list]
    return date12_list
Ejemplo n.º 9
0
def date12_list2index(date12_list, date_list=[]):
    '''Convert list of date12 string into list of index'''
    # Get dateList from date12List
    if not date_list:
        m_dates = [date12.split('-')[0] for date12 in date12_list]
        s_dates = [date12.split('-')[1] for date12 in date12_list]
        date_list = list(set(m_dates + s_dates))
    date6_list = ptime.yymmdd(sorted(ptime.yyyymmdd(date_list)))

    # Get pair index
    pairs_idx = []
    for date12 in date12_list:
        dates = date12.split('-')
        pair_idx = [date6_list.index(dates[0]), date6_list.index(dates[1])]
        pairs_idx.append(pair_idx)

    return pairs_idx
Ejemplo n.º 10
0
def read_baseline_file(baselineFile, exDateList=[]):
    '''Read bl_list.txt without dates listed in exDateList
    # Date  Bperp    dop0/PRF  dop1/PRF   dop2/PRF      PRF    slcDir
    070106     0.0   0.03      0.0000000  0.00000000000 2155.2 /scratch/KyushuT422F650AlosA/SLC/070106/
    070709  2631.9   0.07      0.0000000  0.00000000000 2155.2 /scratch/KyushuT422F650AlosA/SLC/070709/
    070824  2787.3   0.07      0.0000000  0.00000000000 2155.2 /scratch/KyushuT422F650AlosA/SLC/070824/
    ...
    
    Examples:
        date8List, perpBaseList, dopList, prfList, slcDirList = read_baseline_file(baselineFile)
        date8List, perpBaseList, dopList, prfList, slcDirList = read_baseline_file(baselineFile,['080520','100726'])
        date8List, perpBaseList = read_baseline_file(baselineFile)[0:2]
    '''
    exDateList = ptime.yymmdd(exDateList)
    if not exDateList: exDateList = []

    ## Read baseline file into lines
    fb = open(baselineFile)
    lines = []
    for line in fb:
        l = str.replace(line, '\n', '').strip()
        lines.append(l)
    fb.close()

    ## Read each line and put the values into arrays
    date6List = []
    perpBaseList = []
    dopplerList = []
    slcDirList = []
    for line in lines:
        c = line.split()  # splits on white space
        date = c[0]
        if not date in exDateList:
            date6List.append(date)
            perpBaseList.append(float(c[1]))
            dop = np.array([float(c[2]), float(c[3]), float(c[4])])
            prf = float(c[5])
            dop *= prf
            dopplerList.append(dop)
            slcDirList.append(c[6])

    date8List = ptime.yyyymmdd(date6List)
    return date8List, perpBaseList, dopplerList, slcDirList
Ejemplo n.º 11
0
def select_pairs_mst(date_list, pbase_list):
    '''Select Pairs using Minimum Spanning Tree technique
        Connection Cost is calculated using the baseline distance in perp and scaled temporal baseline (Pepe and Lanari,
        2006, TGRS) plane.
    Inputs:
        date_list  : list of date in YYMMDD/YYYYMMDD format
        pbase_list : list of float, perpendicular spatial baseline
    References:
        Pepe, A., and R. Lanari (2006), On the extension of the minimum cost flow algorithm for phase unwrapping
        of multitemporal differential SAR interferograms, IEEE TGRS, 44(9), 2374-2383.
        Perissin D., Wang T. (2012), Repeat-pass SAR interferometry with partially coherent targets. IEEE TGRS. 271-280
    '''
    # Get temporal baseline in days
    date6_list = ptime.yymmdd(date_list)
    date8_list = ptime.yyyymmdd(date_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]
    # Normalization (Pepe and Lanari, 2006, TGRS)
    temp2perp_scale = (max(pbase_list) - min(pbase_list)) / (max(tbase_list) -
                                                             min(tbase_list))
    tbase_list = [tbase * temp2perp_scale for tbase in tbase_list]

    # Get weight matrix
    ttMat1, ttMat2 = np.meshgrid(np.array(tbase_list), np.array(tbase_list))
    ppMat1, ppMat2 = np.meshgrid(np.array(pbase_list), np.array(pbase_list))
    ttMat = np.abs(ttMat1 - ttMat2)  # temporal distance matrix
    ppMat = np.abs(ppMat1 - ppMat2)  # spatial distance matrix

    weightMat = np.sqrt(
        np.square(ttMat) +
        np.square(ppMat))  # 2D distance matrix in temp/perp domain
    weightMat = csr_matrix(weightMat)  # compress sparse row matrix

    # MST path based on weight matrix
    mstMat = minimum_spanning_tree(weightMat)

    # Convert MST index matrix into date12 list
    [s_idx_list, m_idx_list
     ] = [date_idx_array.tolist() for date_idx_array in find(mstMat)[0:2]]
    date12_list = [
        date6_list[m_idx_list[i]] + '-' + date6_list[s_idx_list[i]]
        for i in range(len(m_idx_list))
    ]
    return date12_list
Ejemplo n.º 12
0
def threshold_perp_baseline(date12_list,
                            date_list,
                            pbase_list,
                            pbase_max,
                            pbase_min=0.0):
    '''Remove pairs/interoferogram out of [pbase_min, pbase_max]
    Inputs:
        date12_list : list of string for date12 in YYMMDD-YYMMDD format
        date_list   : list of string for date in YYMMDD/YYYYMMDD format, optional
        pbase_list  : list of float for perpendicular spatial baseline
        pbase_max   : float, maximum perpendicular baseline
        pbase_min   : float, minimum perpendicular baseline
    Output:
        date12_list_out : list of string for date12 in YYMMDD-YYMMDD format
    Example:
        date12_list = threshold_perp_baseline(date12_list, date_list, pbase_list, 500)
    '''
    if not date12_list: return []
    # Get date6_list
    if not date_list:
        m_dates = [date12.split('-')[0] for date12 in date12_list]
        s_dates = [date12.split('-')[1] for date12 in date12_list]
        date_list = sorted(ptime.yyyymmdd(list(set(m_dates + s_dates))))
        if not len(date_list) == len(pbase_list):
            print(
                'ERROR: number of existing dates is not equal to number of perp baseline!'
            )
            print('date list is needed for threshold filtering!')
            print('skip filtering.')
            return date12_list
    date6_list = ptime.yymmdd(date_list)

    # Threshold
    date12_list_out = []
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        pbase = abs(pbase_list[idx1] - pbase_list[idx2])
        if pbase_min <= pbase <= pbase_max:
            date12_list_out.append(date12)
    return date12_list_out
Ejemplo n.º 13
0
def threshold_doppler_overlap(date12_list,
                              date_list,
                              dop_list,
                              bandwidth_az,
                              dop_overlap_min=0.15):
    '''Remove pairs/interoferogram with doppler overlap larger than critical value
    Inputs:
        date12_list : list of string, for date12 in YYMMDD-YYMMDD format
        date_list   : list of string, for date in YYMMDD/YYYYMMDD format, optional
        dop_list    : list of list of 3 float, for centroid Doppler frequency
        bandwidth_az    : float, bandwidth in azimuth direction
        dop_overlap_min : float, minimum overlap of azimuth Doppler frequency
    Outputs:
        date12_list : list of string, for date12 in YYMMDD-YYMMDD format
    '''
    if not date12_list: return []
    # Get date6_list
    if not date_list:
        m_dates = [date12.split('-')[0] for date12 in date12_list]
        s_dates = [date12.split('-')[1] for date12 in date12_list]
        date_list = sorted(ptime.yyyymmdd(list(set(m_dates + s_dates))))
        if not len(date_list) == len(pbase_list):
            print(
                'ERROR: number of existing dates is not equal to number of perp baseline!'
            )
            print('date list is needed for threshold filtering!')
            print('skip filtering.')
            return date12_list
    date6_list = ptime.yymmdd(date_list)

    # Threshold
    date12_list_out = []
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        dop_overlap = calculate_doppler_overlap(dop_list[idx1], dop_list[idx2],
                                                bandwidth_az)
        if dop_overlap >= dop_overlap_min:
            date12_list_out.append(date12)
    return date12_list_out
Ejemplo n.º 14
0
def select_pairs_star(date_list, m_date=None, pbase_list=[]):
    '''Select Star-like network/interferograms/pairs, it's a single master network, similar to PS approach.
    Usage:
        m_date : master date, choose it based on the following cretiria:
                 1) near the center in temporal and spatial baseline
                 2) prefer winter season than summer season for less temporal decorrelation
    Reference:
        Ferretti, A., C. Prati, and F. Rocca (2001), Permanent scatterers in SAR interferometry, IEEE TGRS, 39(1), 8-20.
    '''
    date8_list = sorted(ptime.yyyymmdd(date_list))
    date6_list = ptime.yymmdd(date8_list)

    # Select master date if not existed
    if not m_date:
        m_date = select_master_date(date8_list, pbase_list)
        print('auto select master date: ' + m_date)

    # Check input master date
    m_date8 = ptime.yyyymmdd(m_date)
    if m_date8 not in date8_list:
        print('Input master date is not existed in date list!')
        print('Input master date: ' + str(m_date8))
        print('Input date list: ' + str(date8_list))
        m_date8 = None

    # Generate star/ps network
    m_idx = date8_list.index(m_date8)
    date12_idx_list = [
        sorted([m_idx, s_idx]) for s_idx in range(len(date8_list))
        if s_idx is not m_idx
    ]
    date12_list = [
        date6_list[idx[0]] + '-' + date6_list[idx[1]]
        for idx in date12_idx_list
    ]

    return date12_list
Ejemplo n.º 15
0
def read_igram_pairs(igramFile):
    '''Read pairs index from hdf5 file'''
    ## Read Igram file
    h5file = h5py.File(igramFile, 'r')
    k = list(h5file.keys())
    if 'interferograms' in k: k[0] = 'interferograms'
    elif 'coherence' in k: k[0] = 'coherence'
    if k[0] not in ['interferograms', 'coherence', 'wrapped']:
        print('Only interferograms / coherence / wrapped are supported.')
        sys.exit(1)

    dateList = ptime.ifgram_date_list(igramFile)
    dateList6 = ptime.yymmdd(dateList)

    pairs = []
    igramList = list(h5file[k[0]].keys())
    for igram in igramList:
        date12 = h5file[k[0]][igram].attrs['DATE12'].split('-')
        pairs.append([dateList6.index(date12[0]), dateList6.index(date12[1])])
    h5file.close()

    pairs = pair_sort(pairs)

    return pairs
Ejemplo n.º 16
0
def select_pairs_delaunay(date_list, pbase_list, norm=True):
    '''Select Pairs using Delaunay Triangulation based on temporal/perpendicular baselines
    Inputs:
        date_list  : list of date in YYMMDD/YYYYMMDD format
        pbase_list : list of float, perpendicular spatial baseline
        norm       : normalize temporal baseline to perpendicular baseline
    Key points
        1. Define a ratio between perpendicular and temporal baseline axis units (Pepe and Lanari, 2006, TGRS).
        2. Pairs with too large perpendicular / temporal baseline or Doppler centroid difference should be removed
           after this, using a threshold, to avoid strong decorrelations (Zebker and Villasenor, 1992, TGRS).
    Reference:
        Pepe, A., and R. Lanari (2006), On the extension of the minimum cost flow algorithm for phase unwrapping
        of multitemporal differential SAR interferograms, IEEE TGRS, 44(9), 2374-2383.
        Zebker, H. A., and J. Villasenor (1992), Decorrelation in interferometric radar echoes, IEEE TGRS, 30(5), 950-959.
    '''
    # Get temporal baseline in days
    date6_list = ptime.yymmdd(date_list)
    date8_list = ptime.yyyymmdd(date_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]

    # Normalization (Pepe and Lanari, 2006, TGRS)
    if norm:
        temp2perp_scale = (max(pbase_list) - min(pbase_list)) / (
            max(tbase_list) - min(tbase_list))
        tbase_list = [tbase * temp2perp_scale for tbase in tbase_list]

    # Generate Delaunay Triangulation
    date12_idx_list = Triangulation(tbase_list, pbase_list).edges.tolist()
    date12_idx_list = [sorted(idx) for idx in sorted(date12_idx_list)]

    # Convert index into date12
    date12_list = [
        date6_list[idx[0]] + '-' + date6_list[idx[1]]
        for idx in date12_idx_list
    ]
    return date12_list
Ejemplo n.º 17
0
def threshold_temporal_baseline(date12_list,
                                btemp_max,
                                keep_seasonal=True,
                                btemp_min=0.0):
    '''Remove pairs/interferograms out of min/max/seasonal temporal baseline limits
    Inputs:
        date12_list : list of string for date12 in YYMMDD-YYMMDD format
        btemp_max   : float, maximum temporal baseline
        btemp_min   : float, minimum temporal baseline
        keep_seasonal : keep interferograms with seasonal temporal baseline
    Output:
        date12_list_out : list of string for date12 in YYMMDD-YYMMDD format
    Example:
        date12_list = threshold_temporal_baseline(date12_list, 200)
        date12_list = threshold_temporal_baseline(date12_list, 200, False)
    '''
    if not date12_list: return []
    # Get date list and tbase list
    m_dates = [date12.split('-')[0] for date12 in date12_list]
    s_dates = [date12.split('-')[1] for date12 in date12_list]
    date8_list = sorted(ptime.yyyymmdd(list(set(m_dates + s_dates))))
    date6_list = ptime.yymmdd(date8_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]

    # Threshold
    date12_list_out = []
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        tbase = int(abs(tbase_list[idx1] - tbase_list[idx2]))
        if btemp_min <= tbase <= btemp_max:
            date12_list_out.append(date12)
        elif keep_seasonal and tbase / 30 in [11, 12]:
            date12_list_out.append(date12)
    return date12_list_out
Ejemplo n.º 18
0
def main(argv):
    
    # Read inputs
    inps = cmdLineParse()
    inps = read_template2inps(inps.template_file, inps)
    log(os.path.basename(sys.argv[0])+' '+inps.template_file)

    project_name = os.path.splitext(os.path.basename(inps.template_file))[0]
    print('project name: '+project_name)
    if not inps.sensor:
        inps.sensor = project_name2sensor(project_name)
 
    # Pair selection from reference
    if inps.reference_file:
        print('Use pairs info from reference file: '+inps.reference_file)
        date12_list = pnet.get_date12_list(inps.reference_file)

    # Pair selection from temp/perp/dop baseline info
    else:
        # Auto path setting for Miami user
        if not inps.baseline_file and pysar.miami_path and 'SCRATCHDIR' in os.environ:
            if pysar.miami_path and 'SCRATCHDIR' in os.environ:
                try:    inps.baseline_file = glob.glob(os.getenv('SCRATCHDIR')+'/'+project_name+'/SLC/bl_list.txt')[0]
                except: inps.baseline_file = None
        if not inps.baseline_file:
            raise Exception('ERROR: No baseline file found!')

        # Check start/end/exclude date
        date8_list = pnet.read_baseline_file(inps.baseline_file)[0]
        inps.exclude_date = ptime.yyyymmdd(inps.exclude_date)
        if not inps.exclude_date:
            inps.exclude_date = []
        else:
            print('input exclude dates: '+str(inps.exclude_date))
        if inps.start_date:
            print('input start date: '+inps.start_date)
            inps.exclude_date += [i for i in date8_list if float(i) < float(ptime.yyyymmdd(inps.start_date))]
            inps.exclude_date = sorted(inps.exclude_date)
        if inps.end_date:
            print('input end   date: '+inps.end_date)
            inps.exclude_date += [i for i in date8_list if float(i) > float(ptime.yyyymmdd(inps.end_date))]
            inps.exclude_date = sorted(inps.exclude_date)
        if inps.exclude_date:
            print('exclude    dates: ')
            print(inps.exclude_date)

        # Read baseline list file: bl_list.txt
        inps.exclude_date = ptime.yymmdd(inps.exclude_date)
        date8_list, pbase_list, dop_list = pnet.read_baseline_file(inps.baseline_file, inps.exclude_date)[0:3]
        date6_list = ptime.yymmdd(date8_list)
        tbase_list = ptime.date_list2tbase(date8_list)[0]

        # Initial network using input methods
        inps.method = inps.method.lower().replace('-','_')
        if inps.method in ['star','ps']:       inps.method = 'star'
        elif inps.method.startswith('seq'):    inps.method = 'sequential'
        elif inps.method.startswith('hierar'): inps.method = 'hierarchical'
        elif inps.method in ['mst','min_spanning_tree','minimum_spanning_tree']:  inps.method = 'mst'
        print('select method: '+inps.method)

        if   inps.method == 'all':
            date12_list = pnet.select_pairs_all(date6_list)
        elif inps.method == 'delaunay':
            date12_list = pnet.select_pairs_delaunay(date6_list, pbase_list, inps.norm)
        elif inps.method == 'star':
            date12_list = pnet.select_pairs_star(date6_list)
        elif inps.method == 'sequential':
            date12_list = pnet.select_pairs_sequential(date6_list, inps.increment_num)
        elif inps.method == 'hierarchical':
            date12_list = pnet.select_pairs_hierarchical(date6_list, pbase_list, inps.temp_perp_list)
        elif inps.method == 'mst':
            date12_list = pnet.select_pairs_mst(date6_list, pbase_list)
        else:
            raise Exception('Unrecoganized select method: '+inps.method)
        print('initial number of interferograms: '+str(len(date12_list)))

        # Filter pairs (optional) using temp/perp/doppler baseline threshold
        if inps.method in ['star','hierarchical','mst']:
            inps.threshold = False
        if inps.threshold:
            # Temporal baseline
            date12_list = pnet.threshold_temporal_baseline(date12_list, inps.temp_base_max,\
                                                           inps.keep_seasonal, inps.temp_base_min)
            print('number of interferograms after filtering of <%d, %d> days in temporal baseline: %d'\
                  % (inps.temp_base_min, inps.temp_base_max, len(date12_list)))
            if inps.keep_seasonal:
                print('\tkeep seasonal pairs, i.e. pairs with temporal baseline == N*years +/- one month')

            # Perpendicular spatial baseline
            date12_list = pnet.threshold_perp_baseline(date12_list, date6_list, pbase_list, inps.perp_base_max)
            print('number of interferograms after filtering of max %d meters in perpendicular baseline: %d'\
                  % (inps.perp_base_max, len(date12_list)))
            
            # Doppler Overlap Percentage
            if inps.sensor:
                bandwidth_az = pnet.azimuth_bandwidth(inps.sensor)
                date12_list = pnet.threshold_doppler_overlap(date12_list, date6_list, dop_list,\
                                                             bandwidth_az, inps.dop_overlap_min/100.0)
                print('number of interferograms after filtering of min '+str(inps.dop_overlap_min)+'%'+\
                      ' overlap in azimuth Doppler frequency: '+str(len(date12_list)))

    # Write ifgram_list.txt
    if not date12_list:
        print('WARNING: No interferogram selected!')
        return None

    # date12_list to date_list
    m_dates = [date12.split('-')[0] for date12 in date12_list]
    s_dates = [date12.split('-')[1] for date12 in date12_list]
    print('number of acquisitions   input   : '+str(len(date6_list)))
    print('number of acquisitions   selected: '+str(len(list(set(m_dates + s_dates)))))
    print('number of interferograms selected: '+str(len(date12_list)))
    
    # Output directory/filename
    if not inps.outfile:
        if pysar.miami_path and 'SCRATCHDIR' in os.environ:
            inps.out_dir = os.getenv('SCRATCHDIR')+'/'+project_name+'/PROCESS'
        else:
            try:    inps.out_dir = os.path.dirname(os.path.abspath(inps.reference_file))
            except: inps.out_dir = os.path.dirname(os.path.abspath(inps.baseline_file))
        inps.outfile = inps.out_dir+'/ifgram_list.txt'
    inps.outfile = os.path.abspath(inps.outfile)
    inps.out_dir = os.path.dirname(inps.outfile)
    if not os.path.isdir(inps.out_dir):
        os.makedirs(inps.out_dir)

    # Write txt file
    print('writing >>> '+inps.outfile)
    np.savetxt(inps.outfile, date12_list, fmt='%s')

    # Plot network info
    if not inps.disp_fig:
        plt.switch_backend('Agg')

    out_fig_name = 'BperpHistory.pdf'
    print('plotting baseline history in temp/perp baseline domain to file: '+out_fig_name)
    fig2, ax2 = plt.subplots()
    ax2 = pnet.plot_perp_baseline_hist(ax2, date8_list, pbase_list)
    plt.savefig(inps.out_dir+'/'+out_fig_name, bbox_inches='tight')

    out_fig_name = 'Network.pdf'
    print('plotting network / pairs  in temp/perp baseline domain to file: '+out_fig_name)
    fig1, ax1 = plt.subplots()
    ax1 = pnet.plot_network(ax1, date12_list, date8_list, pbase_list)
    plt.savefig(inps.out_dir+'/'+out_fig_name, bbox_inches='tight')
    
    if inps.disp_fig:
        plt.show()

    return inps.outfile
Ejemplo n.º 19
0
def main(argv):
    ##### Read Inputs
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    date12_orig = pnet.get_date12_list(inps.file[0])
    print('input file(s) to be modified: ' + str(inps.file))
    print('number of interferograms: ' + str(len(date12_orig)))
    atr = readfile.read_attribute(inps.file[0])

    # Update inps if template is input
    if inps.template_file:
        inps = read_template2inps(inps.template_file, inps)

    if inps.reset:
        print(
            '----------------------------------------------------------------------------'
        )
        for file in inps.file:
            reset_pairs(file)

        mean_coh_txt_file = os.path.splitext(
            inps.coherence_file)[0] + '_spatialAverage.txt'
        if os.path.isfile(mean_coh_txt_file):
            rmCmd = 'rm ' + mean_coh_txt_file
            print(rmCmd)
            os.system(rmCmd)

        return

    if all(not i for i in [inps.reference_file, inps.max_temp_baseline, inps.max_perp_baseline,\
                           inps.exclude_ifg_index, inps.exclude_date, inps.coherence_based, inps.start_date, inps.end_date]):
        # Display network for manually modification when there is no other modification input.
        print('No input option found to remove interferogram')
        print('To manually modify network, please use --manual option ')
        return

    # Convert index : input to continous index list
    if inps.exclude_ifg_index:
        ifg_index = list(inps.exclude_ifg_index)
        inps.exclude_ifg_index = []
        for index in ifg_index:
            index_temp = [int(i) for i in index.split(':')]
            index_temp.sort()
            if len(index_temp) == 2:
                for j in range(index_temp[0], index_temp[1] + 1):
                    inps.exclude_ifg_index.append(j)
            elif len(index_temp) == 1:
                inps.exclude_ifg_index.append(int(index))
            else:
                print('Unrecoganized input: ' + index)
        inps.exclude_ifg_index = sorted(inps.exclude_ifg_index)
        if max(inps.exclude_ifg_index) > len(date12_orig):
            raise Exception('Input index out of range!\n'+\
                            'input index:'+str(inps.exclude_ifg_index)+'\n'+\
                            'index range of file: '+str(len(date12_orig)))

    ##### Get date12_to_rmv
    date12_to_rmv = []

    # 1. Update date12_to_rmv from reference file
    if inps.reference_file:
        date12_to_keep = pnet.get_date12_list(inps.reference_file)
        print(
            '----------------------------------------------------------------------------'
        )
        print('use reference pairs info from file: ' + inps.reference_file)
        print('number of interferograms in reference: ' +
              str(len(date12_to_keep)))
        print('date12 not in reference file:')
        for date12 in date12_orig:
            if date12 not in date12_to_keep:
                date12_to_rmv.append(date12)
                print(date12)

    # 2.1 Update date12_to_rmv from coherence file
    if inps.coherence_based and os.path.isfile(inps.coherence_file):
        print(
            '----------------------------------------------------------------------------'
        )
        print(
            'use coherence-based network modification from coherence file: ' +
            inps.coherence_file)
        # check mask AOI in lalo
        if inps.aoi_geo_box and inps.trans_file:
            print('input AOI in (lon0, lat1, lon1, lat0): ' +
                  str(inps.aoi_geo_box))
            inps.aoi_pix_box = subset.bbox_geo2radar(inps.aoi_geo_box, atr,
                                                     inps.trans_file)
        if inps.aoi_pix_box:
            # check mask AOI within the data coverage
            inps.aoi_pix_box = subset.check_box_within_data_coverage(
                inps.aoi_pix_box, atr)
            print('input AOI in (x0,y0,x1,y1): ' + str(inps.aoi_pix_box))

        # Calculate spatial average coherence
        coh_list, coh_date12_list = ut.spatial_average(inps.coherence_file, inps.mask_file,\
                                                           inps.aoi_pix_box, saveList=True)

        # MST network
        if inps.keep_mst:
            print(
                'Get minimum spanning tree (MST) of interferograms with inverse of coherence.'
            )
            print('date12 with 1) average coherence < ' +
                  str(inps.min_coherence) + ' AND 2) not in MST network: ')
            mst_date12_list = pnet.threshold_coherence_based_mst(
                coh_date12_list, coh_list)
        else:
            print('date12 with average coherence < ' + str(inps.min_coherence))
            mst_date12_list = []

        for i in range(len(coh_date12_list)):
            date12 = coh_date12_list[i]
            if coh_list[
                    i] < inps.min_coherence and date12 not in mst_date12_list:
                date12_to_rmv.append(date12)
                print(date12)

    # 2.2 Update date12_to_rmv from perp baseline threshold
    if inps.max_perp_baseline:
        print(
            '----------------------------------------------------------------------------'
        )
        print('Drop pairs with perpendicular spatial baseline > ' +
              str(inps.max_perp_baseline) + ' meters')
        ifg_bperp_list = pnet.igram_perp_baseline_list(inps.file[0])
        for i in range(len(ifg_bperp_list)):
            if ifg_bperp_list[i] > inps.max_perp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print(date12)

    # 2.3 Update date12_to_rmv from temp baseline threshold
    if inps.max_temp_baseline:
        print(
            '----------------------------------------------------------------------------'
        )
        print('Drop pairs with temporal baseline > ' +
              str(inps.max_temp_baseline) + ' days')
        date8_list = ptime.ifgram_date_list(inps.file[0])
        date6_list = ptime.yymmdd(date8_list)
        tbase_list = ptime.date_list2tbase(date8_list)[0]
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            idx1 = date6_list.index(date1)
            idx2 = date6_list.index(date2)
            t_diff = tbase_list[idx2] - tbase_list[idx1]
            if t_diff > inps.max_temp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print(date12)

    # 2.4 Update date12_to_rmv from exclude_ifg_index
    if inps.exclude_ifg_index:
        print(
            '----------------------------------------------------------------------------'
        )
        print('drop date12/pair with the following index number:')
        for index in inps.exclude_ifg_index:
            date12 = date12_orig[index - 1]
            date12_to_rmv.append(date12)
            print(str(index) + '    ' + date12)

    # 2.5 Update date12_to_rmv from exclude_date
    if inps.exclude_date:
        inps.exclude_date = ptime.yymmdd(inps.exclude_date)
        print(
            '----------------------------------------------------------------------------'
        )
        print('Drop pairs including the following dates: \n' +
              str(inps.exclude_date))
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            if (date1 in inps.exclude_date) or (date2 in inps.exclude_date):
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print(date12)

    # 2.6 Update date12_to_rmv from start_date
    if inps.start_date:
        inps.start_date = ptime.yymmdd(inps.start_date)
        print(
            '----------------------------------------------------------------------------'
        )
        print('Drop pairs with date earlier than start-date: ' +
              inps.start_date)
        min_date = int(ptime.yyyymmdd(inps.start_date))
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) < min_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                print(date12)

    # 2.7 Update date12_to_rmv from end_date
    if inps.end_date:
        inps.end_date = ptime.yymmdd(inps.end_date)
        print(
            '----------------------------------------------------------------------------'
        )
        print('Drop pairs with date earlier than end-date: ' + inps.end_date)
        max_date = int(ptime.yyyymmdd(inps.end_date))
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) > max_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                print(date12)

    # 3. Manually drop pairs
    if inps.disp_network:
        date12_click = manual_select_pairs_to_remove(inps.file[0])
        for date12 in list(date12_click):
            if date12 not in date12_orig:
                date12_click.remove(date12)
        print('date12 selected to remove:')
        print(date12_click)
        date12_to_rmv += date12_click

    # 4. drop duplicate date12 and sort in order
    date12_to_rmv = list(set(date12_to_rmv))
    date12_to_rmv = sorted(date12_to_rmv)
    print(
        '----------------------------------------------------------------------------'
    )
    print('number of interferograms to remove: ' + str(len(date12_to_rmv)))
    print('list   of interferograms to remove:')
    print(date12_to_rmv)

    ##### Calculated date12_to_drop v.s. existing date12_to_drop
    # Get list of date12 of interferograms already been marked to drop
    k = readfile.read_attribute(inps.file[0])['FILE_TYPE']
    h5 = h5py.File(inps.file[0], 'r')
    ifgram_list_all = sorted(h5[k].keys())
    ifgram_list_keep = ut.check_drop_ifgram(h5,
                                            atr,
                                            ifgram_list_all,
                                            print_msg=False)
    ifgram_list_dropped = sorted(
        list(set(ifgram_list_all) - set(ifgram_list_keep)))
    date12_list_dropped = ptime.list_ifgram2date12(ifgram_list_dropped)
    h5.close()

    if date12_to_rmv == date12_list_dropped and inps.mark_attribute:
        print(
            'Calculated date12 to drop is the same as exsiting marked input file, skip update file attributes.'
        )
        return

    ##### Update date12 to drop
    if date12_to_rmv:
        ##### Update Input Files with date12_to_rmv
        Modified_CoherenceFile = 'Modified_coherence.h5'
        for File in inps.file:
            Modified_File = modify_file_date12_list(File, date12_to_rmv,
                                                    inps.mark_attribute)

            k = readfile.read_attribute(File)['FILE_TYPE']
            # Update Mask File
            if k == 'interferograms':
                print('update mask file for input ' + k + ' file based on ' +
                      Modified_File)
                inps.mask_file = 'mask.h5'
                print('writing >>> ' + inps.mask_file)
                ut.nonzero_mask(Modified_File, inps.mask_file)
            elif k == 'coherence':
                print('update average spatial coherence for input ' + k +
                      ' file based on: ' + Modified_File)
                outFile = 'averageSpatialCoherence.h5'
                print('writing >>> ' + outFile)
                ut.temporal_average(Modified_File, outFile)
                Modified_CoherenceFile = Modified_File

        # Plot result
        if inps.plot:
            print('\nplot modified network and save to file.')
            plotCmd = 'plot_network.py ' + Modified_File + ' --coherence ' + Modified_CoherenceFile + ' --nodisplay'
            if inps.mask_file:
                plotCmd += ' --mask ' + inps.mask_file
            print(plotCmd)
            os.system(plotCmd)

        print('Done.')
        return
    else:
        print('No new interferograms to drop, skip update.')
        return
Ejemplo n.º 20
0
def read_template2inps(template_file, inps=None):
    '''Read input template options into Namespace inps'''
    if not inps:
        inps = cmdLineParse()

    template = readfile.read_template(inps.template_file)
    key_list = list(template.keys())

    # Coherence-based network modification
    prefix = 'pysar.network.'

    key = prefix + 'coherenceBased'
    if key in key_list and template[key] in ['auto', 'yes']:
        inps.coherence_based = True

    key = prefix + 'keepMinSpanTree'
    if key in key_list and template[key] in ['no']:
        inps.keep_mst = False

    key = prefix + 'coherenceFile'
    if key in key_list:
        if template[key] == 'auto':
            inps.coherence_file = 'coherence.h5'
        else:
            inps.coherence_file = template[key]

    # find coherence file from input files if inps.coherence_file does not exists.
    if inps.coherence_based and not os.path.isfile(inps.coherence_file):
        k_list = [readfile.read_attribute(f)['FILE_TYPE'] for f in inps.file]
        try:
            coh_file_idx = k_list.index('coherence')
        except ValueError:
            print(
                'No coherence file found! Can not use coherence-based method without it.'
            )
        inps.coherence_file = inps.file[coh_file_idx]

    key = prefix + 'minCoherence'
    if key in key_list:
        if template[key] == 'auto':
            inps.min_coherence = 0.7
        else:
            inps.min_coherence = float(template[key])

    key = prefix + 'maskFile'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.mask_file = 'mask.h5'
        elif value == 'no':
            inps.mask_file = None
        else:
            inps.mask_file = value

    key = prefix + 'maskAoi.yx'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.aoi_pix_box = None
        else:
            tmp = [i.strip() for i in value.split(',')]
            sub_y = sorted([int(i.strip()) for i in tmp[0].split(':')])
            sub_x = sorted([int(i.strip()) for i in tmp[1].split(':')])
            inps.aoi_pix_box = (sub_x[0], sub_y[0], sub_x[1], sub_y[1])

    key = prefix + 'maskAoi.lalo'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.aoi_geo_box = None
        else:
            tmp = [i.strip() for i in value.split(',')]
            sub_lat = sorted([float(i.strip()) for i in tmp[0].split(':')])
            sub_lon = sorted([float(i.strip()) for i in tmp[1].split(':')])
            inps.aoi_geo_box = (sub_lon[0], sub_lat[1], sub_lon[1], sub_lat[0])
            # Check trans file
            try:
                inps.trans_file = ut.get_file_list(inps.trans_file)[0]
            except:
                inps.trans_file = None
                print(
                    'Warning: no mapping transformation file found! Can not use '
                    + key + ' option without it.')
                print('skip this option.')
                inps.aoi_pix_box = None

    ## Network Modification based on thresholds
    key = prefix + 'tempBaseMax'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.max_temp_baseline = float(value)

    key = prefix + 'perpBaseMax'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.max_perp_baseline = float(value)

    key = prefix + 'referenceFile'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.reference_file = None
        else:
            inps.reference_file = value

    key = prefix + 'excludeDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.exclude_date = [i for i in value.replace(',', ' ').split()]

    key = prefix + 'excludeIfgIndex'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.exclude_ifg_index = [
                i for i in value.replace(',', ' ').split()
            ]

    key = prefix + 'startDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.start_date = ptime.yymmdd(value)

    key = prefix + 'endDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.end_date = ptime.yymmdd(value)

    return inps
Ejemplo n.º 21
0
def extract_attribute_interferogram(fname):
    '''Read/extract attributes for PySAR from Gamma .unw, .cor and .int file
    Inputs:
        fname : str, Gamma interferogram filename or path, i.e. /PopoSLT143TsxD/diff_filt_HDR_130118-130129_4rlks.unw
    Output:
        atr : dict, Attributes dictionary
    '''
    file_dir = os.path.dirname(fname)
    file_basename = os.path.basename(fname)

    atr_file = fname + '.rsc'
    #if os.path.isfile(atr_file):
    #    return atr_file

    atr = {}
    atr['PROCESSOR'] = 'gamma'
    atr['INSAR_PROCESSOR'] = 'gamma'
    atr['FILE_TYPE'] = os.path.splitext(fname)[1]

    ## Get info: date12, num of loooks
    try:
        date12 = str(re.findall('\d{8}[-_]\d{8}', file_basename)[0])
    except:
        date12 = str(re.findall('\d{6}[-_]\d{6}', file_basename)[0])
    m_date, s_date = date12.replace('_', '-').split('-')
    atr['DATE12'] = ptime.yymmdd(m_date) + '-' + ptime.yymmdd(s_date)
    lks = os.path.splitext(file_basename.split(date12)[1])[0]

    ## Read .off and .par file
    off_file = file_dir + '/*' + date12 + lks + '.off'
    m_par_file = file_dir + '/*' + m_date + lks + '.amp.par'
    s_par_file = file_dir + '/*' + s_date + lks + '.amp.par'

    try:
        off_file = ut.get_file_list(off_file)[0]
    except:
        print('\nERROR: Can not find .off file, it supposed to be like: ' +
              off_file)
    try:
        m_par_file = ut.get_file_list(m_par_file)[0]
    except:
        print(
            '\nERROR: Can not find master date .par file, it supposed to be like: '
            + m_par_file)
    try:
        s_par_file = ut.get_file_list(s_par_file)[0]
    except:
        print(
            '\nERROR: Can not find slave date .par file, it supposed to be like: '
            + s_par_file)

    #print 'read '+m_par_file
    #print 'read '+off_file
    par_dict = readfile.read_gamma_par(m_par_file)
    off_dict = readfile.read_gamma_par(off_file)

    #print 'convert Gamma attribute to ROI_PAC style'
    atr.update(par_dict)
    atr.update(off_dict)
    atr = readfile.attribute_gamma2roipac(atr)

    ## Perp Baseline Info
    #print 'extract baseline info from %s, %s and %s file' % (m_par_file, s_par_file, off_file)
    atr = get_perp_baseline(m_par_file, s_par_file, off_file, atr)

    ## LAT/LON_REF1/2/3/4
    #print 'extract LAT/LON_REF1/2/3/4 from '+m_par_file
    atr = get_lalo_ref(m_par_file, atr)

    ## Write to .rsc file
    #print 'writing >>> '+atr_file
    print('merge %s, %s and %s into %s' % (os.path.basename(m_par_file), os.path.basename(s_par_file),\
                                           os.path.basename(off_file), os.path.basename(atr_file)))
    writefile.write_roipac_rsc(atr, atr_file)

    return atr_file
Ejemplo n.º 22
0
def plot_network(ax,
                 date12_list,
                 date_list,
                 pbase_list,
                 plot_dict={},
                 date12_list_drop=[]):
    '''Plot Temporal-Perp baseline Network
    Inputs
        ax : matplotlib axes object
        date12_list : list of string for date12 in YYMMDD-YYMMDD format
        date_list   : list of string, for date in YYYYMMDD/YYMMDD format
        pbase_list  : list of float, perp baseline, len=number of acquisition
        plot_dict   : dictionary with the following items:
                      fontsize
                      linewidth
                      markercolor
                      markersize

                      coherence_list : list of float, coherence value of each interferogram, len = number of ifgrams
                      disp_min/max :  float, min/max range of the color display based on coherence_list
                      colormap : string, colormap name
                      coh_thres : float, coherence of where to cut the colormap for display
                      disp_title : bool, show figure title or not, default: True
                      disp_drop: bool, show dropped interferograms or not, default: True
    Output
        ax : matplotlib axes object
    '''

    # Figure Setting
    keyList = list(plot_dict.keys())
    if not 'fontsize' in keyList: plot_dict['fontsize'] = 12
    if not 'linewidth' in keyList: plot_dict['linewidth'] = 2
    if not 'markercolor' in keyList: plot_dict['markercolor'] = 'orange'
    if not 'markersize' in keyList: plot_dict['markersize'] = 16
    # For colorful display of coherence
    if not 'coherence_list' in keyList: plot_dict['coherence_list'] = None
    if not 'disp_min' in keyList: plot_dict['disp_min'] = 0.2
    if not 'disp_max' in keyList: plot_dict['disp_max'] = 1.0
    if not 'colormap' in keyList: plot_dict['colormap'] = 'RdBu'
    if not 'disp_title' in keyList: plot_dict['disp_title'] = True
    if not 'coh_thres' in keyList: plot_dict['coh_thres'] = None
    if not 'disp_drop' in keyList: plot_dict['disp_drop'] = True
    coh_list = plot_dict['coherence_list']
    disp_min = plot_dict['disp_min']
    disp_max = plot_dict['disp_max']
    coh_thres = plot_dict['coh_thres']
    transparency = 0.7

    # Date Convert
    date8_list = ptime.yyyymmdd(sorted(date_list))
    date6_list = ptime.yymmdd(date8_list)
    dates, datevector = ptime.date_list2vector(date8_list)

    ## Keep/Drop - date12
    date12_list_keep = sorted(list(set(date12_list) - set(date12_list_drop)))
    idx_date12_keep = [date12_list.index(i) for i in date12_list_keep]
    idx_date12_drop = [date12_list.index(i) for i in date12_list_drop]

    ## Keep/Drop - date
    m_dates = [i.split('-')[0] for i in date12_list_keep]
    s_dates = [i.split('-')[1] for i in date12_list_keep]
    date8_list_keep = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
    date8_list_drop = sorted(list(set(date8_list) - set(date8_list_keep)))
    idx_date_keep = [date8_list.index(i) for i in date8_list_keep]
    idx_date_drop = [date8_list.index(i) for i in date8_list_drop]

    # Ploting
    #ax=fig.add_subplot(111)
    ## Colorbar when conherence is colored
    if coh_list:
        data_min = min(coh_list)
        data_max = max(coh_list)
        # Normalize
        normalization = False
        if normalization:
            coh_list = [(coh - data_min) / (data_min - data_min)
                        for coh in coh_list]
            disp_min = data_min
            disp_max = data_max

        print('showing coherence')
        print('colormap: ' + plot_dict['colormap'])
        print('display range: ' + str([disp_min, disp_max]))
        print('data    range: ' + str([data_min, data_max]))

        # Use lower/upper part of colormap to emphasis dropped interferograms
        if not coh_thres:
            # Find proper cut percentage so that all keep pairs are blue and drop pairs are red
            coh_list_keep = [coh_list[i] for i in idx_date12_keep]
            coh_list_drop = [coh_list[i] for i in idx_date12_drop]
            if coh_list_drop:
                coh_thres = max(coh_list_drop)
            else:
                coh_thres = min(coh_list_keep)

        c1_num = np.ceil(200.0 * (coh_thres - disp_min) /
                         (disp_max - disp_min)).astype('int')
        coh_thres = c1_num / 200.0 * (disp_max - disp_min) + disp_min
        cmap = plt.get_cmap(plot_dict['colormap'])
        colors1 = cmap(np.linspace(0.0, 0.3, c1_num))
        colors2 = cmap(np.linspace(0.6, 1.0, 200 - c1_num))
        cmap = colors.LinearSegmentedColormap.from_list(
            'truncate_RdBu', np.vstack((colors1, colors2)))
        print('color jump at ' + str(coh_thres))

        divider = make_axes_locatable(ax)
        cax = divider.append_axes("right", "5%", pad="3%")
        norm = mpl.colors.Normalize(vmin=disp_min, vmax=disp_max)
        cbar = mpl.colorbar.ColorbarBase(cax, cmap=cmap, norm=norm)
        cbar.set_label('Average Spatial Coherence',
                       fontsize=plot_dict['fontsize'])

    ## Dot - SAR Acquisition
    if idx_date_keep:
        x_list = [dates[i] for i in idx_date_keep]
        y_list = [pbase_list[i] for i in idx_date_keep]
        ax.plot(x_list,
                y_list,
                'ko',
                alpha=0.7,
                ms=plot_dict['markersize'],
                mfc=plot_dict['markercolor'])
    if idx_date_drop:
        x_list = [dates[i] for i in idx_date_drop]
        y_list = [pbase_list[i] for i in idx_date_drop]
        ax.plot(x_list,
                y_list,
                'ko',
                alpha=0.7,
                ms=plot_dict['markersize'],
                mfc='gray')

    ## Line - Pair/Interferogram
    # interferograms kept
    for date12 in date12_list_keep:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        x = np.array([dates[idx1], dates[idx2]])
        y = np.array([pbase_list[idx1], pbase_list[idx2]])
        if coh_list:
            coh = coh_list[date12_list.index(date12)]
            coh_idx = (coh - disp_min) / (disp_max - disp_min)
            ax.plot(x,
                    y,
                    '-',
                    lw=plot_dict['linewidth'],
                    alpha=transparency,
                    c=cmap(coh_idx))
        else:
            ax.plot(x,
                    y,
                    '-',
                    lw=plot_dict['linewidth'],
                    alpha=transparency,
                    c='k')

    # interferograms dropped
    if plot_dict['disp_drop']:
        for date12 in date12_list_drop:
            date1, date2 = date12.split('-')
            idx1 = date6_list.index(date1)
            idx2 = date6_list.index(date2)
            x = np.array([dates[idx1], dates[idx2]])
            y = np.array([pbase_list[idx1], pbase_list[idx2]])
            if coh_list:
                coh = coh_list[date12_list.index(date12)]
                coh_idx = (coh - disp_min) / (disp_max - disp_min)
                ax.plot(x,
                        y,
                        '--',
                        lw=plot_dict['linewidth'],
                        alpha=transparency,
                        c=cmap(coh_idx))
            else:
                ax.plot(x,
                        y,
                        '--',
                        lw=plot_dict['linewidth'],
                        alpha=transparency,
                        c='k')

    if plot_dict['disp_title']:
        ax.set_title('Interferogram Network', fontsize=plot_dict['fontsize'])

    # axis format
    ax = ptime.auto_adjust_xaxis_date(ax, datevector, plot_dict['fontsize'])[0]
    ax = auto_adjust_yaxis(ax, pbase_list, plot_dict['fontsize'])
    ax.set_xlabel('Time [years]', fontsize=plot_dict['fontsize'])
    ax.set_ylabel('Perp Baseline [m]', fontsize=plot_dict['fontsize'])

    # Legend
    solid_line = mlines.Line2D([], [],
                               color='k',
                               ls='solid',
                               label='Interferograms')
    dash_line = mlines.Line2D([], [],
                              color='k',
                              ls='dashed',
                              label='Interferograms dropped')
    ax.legend(handles=[solid_line, dash_line])

    return ax
Ejemplo n.º 23
0
def read_template2inps(templateFile, inps=None):
    '''Read network options from template file into Namespace variable inps'''
    template_dict = readfile.read_template(templateFile)
    if not template_dict:
        print('Empty template: '+templateFile)
        return None
    keyList = list(template_dict.keys())

    if not inps:
        inps = cmdLineParse([''])

    # Read network option regardless of prefix
    for key in keyList:
        if 'selectPairs.'    in key:   template_dict[key.split('selectPairs.')[1]]    = template_dict[key]
        if 'pysar.network.'  in key:   template_dict[key.split('pysar.network.')[1]]  = template_dict[key]
        if 'select.network.' in key:   template_dict[key.split('select.network.')[1]] = template_dict[key]
    keyList = list(template_dict.keys())
    for key, value in template_dict.items():
        if value.lower() in ['off','false','n']:  template_dict[key] = 'no'
        if value.lower() in ['on', 'true', 'y']:  template_dict[key] = 'yes'

    # Update inps value if not existed
    if not inps.method:
        if   'selectMethod' in keyList:  inps.method = template_dict['selectMethod']
        elif 'method'       in keyList:  inps.method = template_dict['method']
        else: inps.method = 'all'

    if not inps.perp_base_max:
        if 'perpBaseMax'  in keyList:  inps.perp_base_max = float(template_dict['perpBaseMax'])
        else: inps.perp_base_max = 500.0

    if not inps.temp_base_max:
        if 'lengthDayMax'   in keyList:  inps.temp_base_max = float(template_dict['lengthDayMax'])
        elif 'tempBaseMax'  in keyList:  inps.temp_base_max = float(template_dict['tempBaseMax'])
        else: inps.temp_base_max = 1800.0

    if not inps.temp_base_min:
        if 'lengthDayMin'   in keyList:  inps.temp_base_min = float(template_dict['lengthDayMin'])
        elif 'tempBaseMin'  in keyList:  inps.temp_base_min = float(template_dict['tempBaseMin'])
        else: inps.temp_base_min = 0.0

    if 'seasonal'     in keyList and template_dict['seasonal'].lower()     == 'no': inps.keep_seasonal = False
    if 'keepSeasonal' in keyList and template_dict['keepSeasonal'].lower() == 'no': inps.keep_seasonal = False

    if not inps.dop_overlap_min:
        if 'DopOverlapThresh'   in keyList:  inps.dop_overlap_min = float(template_dict['DopOverlapThresh'])
        elif 'dopOverlapThresh' in keyList:  inps.dop_overlap_min = float(template_dict['dopOverlapThresh'])
        elif 'dopOverlapMin'    in keyList:  inps.dop_overlap_min = float(template_dict['dopOverlapMin'])
        else: inps.dop_overlap_min = 15.0

    if not inps.reference_file and 'referenceFile' in keyList:  inps.reference_file = template_dict['referenceFile']
    if not inps.increment_num:
        if 'incrementNum'  in keyList:  inps.increment_num  = int(template_dict['incrementNum'])
        else: inps.increment_num = 3

    if not inps.temp_perp_list:
        if 'dayPerpList'    in keyList:  inps.temp_perp_list = template_dict['dayPerpList']
        elif 'tempPerpList' in keyList:  inps.temp_perp_list = template_dict['tempPerpList']
        else: inps.temp_perp_list = '16,1600;32,800;48,600;64,200'
        inps.temp_perp_list = [[float(j) for j in i.split(',')] for i in inps.temp_perp_list.split(';')]

    if not inps.exclude_date and 'excludeDate' in keyList:
        ex_date_list = [i for i in template_dict['excludeDate'].split(',')]
        inps.exclude_date = ptime.yymmdd(ex_date_list)

    if not inps.start_date and 'startDate' in keyList:
        inps.start_date = ptime.yyyymmdd(template_dict['startDate'])
    if not inps.end_date and 'endDate' in keyList:
        inps.end_date = ptime.yyyymmdd(template_dict['endDate'])

    if not inps.m_date and 'masterDate' in keyList:
        inps.m_date = ptime.yymmdd(template_dict['masterDate'])

    return inps