Exemplo n.º 1
0
def get_giant_ifg_list(fnames):
    m_date_list = []
    s_date_list = []
    pbase_list = []

    ext = os.path.splitext(fnames[0])[1]
    if ext == '.h5':
        obj = ifgramStack(fnames[0])
        obj.open()
        m_date_list = obj.mDates[obj.dropIfgram].tolist()
        s_date_list = obj.sDates[obj.dropIfgram].tolist()
        pbase_list = obj.pbaseIfgram[obj.dropIfgram].tolist()

    else:
        ifgramNum = len(fnames)
        print('Number of interferograms: %d' % (ifgramNum))
        for fname in fnames:
            atr = readfile.read_attribute(fname)
            m_date, s_date = ptime.yymmdd(atr['DATE12'].split('-'))
            pbase = (float(atr['P_BASELINE_TOP_HDR']) +
                     float(atr['P_BASELINE_BOTTOM_HDR'])) / 2.
            m_date_list.append(m_date)
            s_date_list.append(s_date)
            pbase_list.append(pbase)
    return m_date_list, s_date_list, pbase_list
Exemplo n.º 2
0
def read_igram_pairs(igramFile):
    """Read pairs index from hdf5 file"""
    # Read Igram file
    h5file = h5py.File(igramFile, 'r')
    k = list(h5file.keys())
    if 'interferograms' in k:
        k[0] = 'interferograms'
    elif 'coherence' in k:
        k[0] = 'coherence'
    if k[0] not in ['interferograms', 'coherence', 'wrapped']:
        print('Only interferograms / coherence / wrapped are supported.')
        sys.exit(1)

    dateList = ptime.ifgram_date_list(igramFile)
    dateList6 = ptime.yymmdd(dateList)

    pairs = []
    igramList = list(h5file[k[0]].keys())
    for igram in igramList:
        date12 = h5file[k[0]][igram].attrs['DATE12'].split('-')
        pairs.append([dateList6.index(date12[0]), dateList6.index(date12[1])])
    h5file.close()

    pairs = pair_sort(pairs)

    return pairs
Exemplo n.º 3
0
def read_baseline_info(baseline_file, reference_file):
    """Read date, bperp and/or DOP info
    Parameters: baseline_file : str, path of bl_list.txt file
                reference_file : str, path of ifgramStack.h5 file
    Returns:    date_list : list of str in YYMMDD format
                tbase_list : list of int in days
                pbase_list : list of float in meter
                dop_list : None, list of 1D array in size of (3,)
    """
    dop_list = None
    if baseline_file:
        date_list, pbase_list, dop_list = pnet.read_baseline_file(
            baseline_file)[0:3]
        date_list = ptime.yymmdd(date_list)
        tbase_list = ptime.date_list2tbase(date_list)[0]

    elif reference_file:
        obj = ifgramStack(reference_file)
        date12_list_all = obj.get_date12_list(dropIfgram=False)
        date12_list_all = ptime.yymmdd_date12(date12_list_all)
        m_dates = [i.split('-')[0] for i in date12_list_all]
        s_dates = [i.split('-')[1] for i in date12_list_all]
        date_list = sorted(list(set(m_dates + s_dates)))
        tbase_list = ptime.date_list2tbase(date_list)[0]

        pbase_list = obj.get_perp_baseline_timeseries(
            dropIfgram=False).tolist()
    return date_list, tbase_list, pbase_list, dop_list
Exemplo n.º 4
0
def coherence_matrix(date12_list, coh_list, diagValue=np.nan):
    """Return coherence matrix based on input date12 list and its coherence
    Inputs:
        date12_list - list of string in YYMMDD-YYMMDD format
        coh_list    - list of float, average coherence for each interferograms
    Output:
        coh_matrix  - 2D np.array with dimension length = date num
                      np.nan value for interferograms non-existed.
                      1.0 for diagonal elements
    """
    # Get date list
    date12_list = ptime.yymmdd_date12(date12_list)
    m_dates = [date12.split('-')[0] for date12 in date12_list]
    s_dates = [date12.split('-')[1] for date12 in date12_list]
    date_list = sorted(ptime.yymmdd(list(set(m_dates + s_dates))))
    date_num = len(date_list)

    coh_mat = np.zeros([date_num, date_num])
    coh_mat[:] = np.nan
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date_list.index(date1)
        idx2 = date_list.index(date2)
        coh = coh_list[date12_list.index(date12)]
        coh_mat[idx1, idx2] = coh  # symmetric
        coh_mat[idx2, idx1] = coh

    if diagValue is not np.nan:
        for i in range(date_num):  # diagonal value
            coh_mat[i, i] = diagValue
    return coh_mat
Exemplo n.º 5
0
def select_pairs_sequential(date_list,
                            num_connection=2,
                            date12_format='YYMMDD-YYMMDD'):
    """Select Pairs in a Sequential way:
        For each acquisition, find its num_connection nearest acquisitions in the past time.
    Inputs:
        date_list  : list of date in YYMMDD/YYYYMMDD format
    Reference:
        Fattahi, H., and F. Amelung (2013), DEM Error Correction in InSAR Time Series, IEEE TGRS, 51(7), 4249-4259.
    """
    date8_list = sorted(ptime.yyyymmdd(date_list))
    date6_list = ptime.yymmdd(date8_list)
    date_idx_list = list(range(len(date6_list)))

    # Get pairs index list
    date12_idx_list = []
    for date_idx in date_idx_list:
        for i in range(num_connection):
            if date_idx - i - 1 >= 0:
                date12_idx_list.append([date_idx - i - 1, date_idx])
    date12_idx_list = [sorted(idx) for idx in sorted(date12_idx_list)]

    # Convert index into date12
    date12_list = [
        date6_list[idx[0]] + '-' + date6_list[idx[1]]
        for idx in date12_idx_list
    ]
    if date12_format == 'YYYYMMDD_YYYYMMDD':
        date12_list = ptime.yyyymmdd_date12(date12_list)
    return date12_list
Exemplo n.º 6
0
def read_pairs_list(date12ListFile, dateList=[]):
    """Read Pairs List file like below:
    070311-070426
    070311-070611
    ...
    """
    # Read date12 list file
    date12List = sorted(
        list(np.loadtxt(date12ListFile, dtype=bytes).astype(str)))

    # Get dateList from date12List
    if not dateList:
        dateList = []
        for date12 in date12List:
            dates = date12.split('-')
            if not dates[0] in dateList:
                dateList.append(dates[0])
            if not dates[1] in dateList:
                dateList.append(dates[1])
        dateList.sort()
    date6List = ptime.yymmdd(dateList)

    # Get pair index
    pairs_idx = []
    for date12 in date12List:
        dates = date12.split('-')
        pair_idx = [date6List.index(dates[0]), date6List.index(dates[1])]
        pairs_idx.append(pair_idx)

    return pairs_idx
Exemplo n.º 7
0
def select_master_interferogram(date12_list,
                                date_list,
                                pbase_list,
                                m_date=None):
    """Select reference interferogram based on input temp/perp baseline info
    If master_date is specified, select its closest slave_date, which is newer than master_date;
        otherwise, choose the closest pair among all pairs as master interferogram.
    Example:
        master_date12   = pnet.select_master_ifgram(date12_list, date_list, pbase_list)
        '080211-080326' = pnet.select_master_ifgram(date12_list, date_list, pbase_list, m_date='080211')
    """
    pbase_array = np.array(pbase_list, dtype='float64')
    # Get temporal baseline
    date8_list = ptime.yyyymmdd(date_list)
    date6_list = ptime.yymmdd(date8_list)
    tbase_array = np.array(ptime.date_list2tbase(date8_list)[0],
                           dtype='float64')
    # Normalization (Pepe and Lanari, 2006, TGRS)
    temp2perp_scale = (max(pbase_array) - min(pbase_array)) / (
        max(tbase_array) - min(tbase_array))
    tbase_array *= temp2perp_scale

    # Calculate sqrt of temp/perp baseline for input pairs
    idx1 = np.array(
        [date6_list.index(date12.split('-')[0]) for date12 in date12_list])
    idx2 = np.array(
        [date6_list.index(date12.split('-')[1]) for date12 in date12_list])
    base_distance = np.sqrt((tbase_array[idx2] - tbase_array[idx1])**2 +
                            (pbase_array[idx2] - pbase_array[idx1])**2)

    # Get master interferogram index
    if not m_date:
        # Choose pair with shortest temp/perp baseline
        m_date12_idx = np.argmin(base_distance)
    else:
        m_date = ptime.yymmdd(m_date)
        # Choose pair contains m_date with shortest temp/perp baseline
        m_date12_idx_array = np.array([
            date12_list.index(date12) for date12 in date12_list
            if m_date + '-' in date12
        ])
        min_base_distance = np.min(base_distance[m_date12_idx_array])
        m_date12_idx = np.where(base_distance == min_base_distance)[0][0]

    m_date12 = date12_list[m_date12_idx]
    return m_date12
Exemplo n.º 8
0
def write_pairs_list(pairs, dateList, outName):
    """Write pairs list file."""
    dateList6 = ptime.yymmdd(dateList)
    fl = open(outName, 'w')
    for idx in pairs:
        date12 = dateList6[idx[0]] + '-' + dateList6[idx[1]] + '\n'
        fl.write(date12)
    fl.close()
    return 1
Exemplo n.º 9
0
def select_pairs_all(date_list):
    """Select All Possible Pairs/Interferograms
    Input : date_list   - list of date in YYMMDD/YYYYMMDD format
    Output: date12_list - list date12 in YYMMDD-YYMMDD format
    Reference:
        Berardino, P., G. Fornaro, R. Lanari, and E. Sansosti (2002), A new algorithm for surface deformation monitoring
        based on small baseline differential SAR interferograms, IEEE TGRS, 40(11), 2375-2383.
    """
    date8_list = sorted(ptime.yyyymmdd(date_list))
    date6_list = ptime.yymmdd(date8_list)
    date12_list = list(itertools.combinations(date6_list, 2))
    date12_list = [date12[0] + '-' + date12[1] for date12 in date12_list]
    return date12_list
Exemplo n.º 10
0
def read_baseline_file(baselineFile, exDateList=[]):
    """Read bl_list.txt without dates listed in exDateList
    # Date  Bperp    dop0/PRF  dop1/PRF   dop2/PRF      PRF    slcDir
    070106     0.0   0.03      0.0000000  0.00000000000 2155.2 /scratch/KyushuT422F650AlosA/SLC/070106/
    070709  2631.9   0.07      0.0000000  0.00000000000 2155.2 /scratch/KyushuT422F650AlosA/SLC/070709/
    070824  2787.3   0.07      0.0000000  0.00000000000 2155.2 /scratch/KyushuT422F650AlosA/SLC/070824/
    ...

    Examples:
        date8List, perpBaseList, dopList, prfList, slcDirList = read_baseline_file(baselineFile)
        date8List, perpBaseList, dopList, prfList, slcDirList = read_baseline_file(baselineFile,['080520','100726'])
        date8List, perpBaseList = read_baseline_file(baselineFile)[0:2]
    """
    exDateList = ptime.yymmdd(exDateList)
    if not exDateList:
        exDateList = []

    # Read baseline file into lines
    fb = open(baselineFile)
    lines = []
    for line in fb:
        l = str.replace(line, '\n', '').strip()
        lines.append(l)
    fb.close()

    # Read each line and put the values into arrays
    date6List = []
    perpBaseList = []
    dopplerList = []
    slcDirList = []
    for line in lines:
        c = line.split()  # splits on white space
        date = c[0]
        if not date in exDateList:
            date6List.append(date)
            perpBaseList.append(float(c[1]))
            try:
                dop = np.array([float(c[2]), float(c[3]), float(c[4])])
                prf = float(c[5])
                dop *= prf
                dopplerList.append(dop)
            except:
                pass
            try:
                slcDirList.append(c[6])
            except:
                pass

    date8List = ptime.yyyymmdd(date6List)
    return date8List, perpBaseList, dopplerList, slcDirList
Exemplo n.º 11
0
def select_pairs_mst(date_list, pbase_list, date12_format='YYMMDD-YYMMDD'):
    """Select Pairs using Minimum Spanning Tree technique
        Connection Cost is calculated using the baseline distance in perp and scaled temporal baseline (Pepe and Lanari,
        2006, TGRS) plane.
    Inputs:
        date_list  : list of date in YYMMDD/YYYYMMDD format
        pbase_list : list of float, perpendicular spatial baseline
    References:
        Pepe, A., and R. Lanari (2006), On the extension of the minimum cost flow algorithm for phase unwrapping
        of multitemporal differential SAR interferograms, IEEE TGRS, 44(9), 2374-2383.
        Perissin D., Wang T. (2012), Repeat-pass SAR interferometry with partially coherent targets. IEEE TGRS. 271-280
    """
    # Get temporal baseline in days
    date6_list = ptime.yymmdd(date_list)
    date8_list = ptime.yyyymmdd(date_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]
    # Normalization (Pepe and Lanari, 2006, TGRS)
    temp2perp_scale = (max(pbase_list) - min(pbase_list)) / (max(tbase_list) -
                                                             min(tbase_list))
    tbase_list = [tbase * temp2perp_scale for tbase in tbase_list]

    # Get weight matrix
    ttMat1, ttMat2 = np.meshgrid(np.array(tbase_list), np.array(tbase_list))
    ppMat1, ppMat2 = np.meshgrid(np.array(pbase_list), np.array(pbase_list))
    ttMat = np.abs(ttMat1 - ttMat2)  # temporal distance matrix
    ppMat = np.abs(ppMat1 - ppMat2)  # spatial distance matrix

    # 2D distance matrix in temp/perp domain
    weightMat = np.sqrt(np.square(ttMat) + np.square(ppMat))
    weightMat = sparse.csr_matrix(weightMat)  # compress sparse row matrix

    # MST path based on weight matrix
    mstMat = sparse.csgraph.minimum_spanning_tree(weightMat)

    # Convert MST index matrix into date12 list
    [s_idx_list, m_idx_list] = [
        date_idx_array.tolist() for date_idx_array in sparse.find(mstMat)[0:2]
    ]
    date12_list = []
    for i in range(len(m_idx_list)):
        idx = sorted([m_idx_list[i], s_idx_list[i]])
        date12 = date6_list[idx[0]] + '-' + date6_list[idx[1]]
        date12_list.append(date12)
    if date12_format == 'YYYYMMDD_YYYYMMDD':
        date12_list = ptime.yyyymmdd_date12(date12_list)
    return date12_list
Exemplo n.º 12
0
def date12_list2index(date12_list, date_list=[]):
    """Convert list of date12 string into list of index"""
    # Get dateList from date12List
    if not date_list:
        m_dates = [date12.split('-')[0] for date12 in date12_list]
        s_dates = [date12.split('-')[1] for date12 in date12_list]
        date_list = list(set(m_dates + s_dates))
    date6_list = ptime.yymmdd(sorted(ptime.yyyymmdd(date_list)))

    # Get pair index
    pairs_idx = []
    for date12 in date12_list:
        dates = date12.split('-')
        pair_idx = [date6_list.index(dates[0]), date6_list.index(dates[1])]
        pairs_idx.append(pair_idx)

    return pairs_idx
Exemplo n.º 13
0
def threshold_perp_baseline(date12_list,
                            date_list,
                            pbase_list,
                            pbase_max,
                            pbase_min=0.0):
    """Remove pairs/interoferogram out of [pbase_min, pbase_max]
    Inputs:
        date12_list : list of string for date12 in YYMMDD-YYMMDD format
        date_list   : list of string for date in YYMMDD/YYYYMMDD format, optional
        pbase_list  : list of float for perpendicular spatial baseline
        pbase_max   : float, maximum perpendicular baseline
        pbase_min   : float, minimum perpendicular baseline
    Output:
        date12_list_out : list of string for date12 in YYMMDD-YYMMDD format
    Example:
        date12_list = threshold_perp_baseline(date12_list, date_list, pbase_list, 500)
    """
    if not date12_list:
        return []
    # Get date6_list
    if not date_list:
        m_dates = [date12.split('-')[0] for date12 in date12_list]
        s_dates = [date12.split('-')[1] for date12 in date12_list]
        date_list = sorted(ptime.yyyymmdd(list(set(m_dates + s_dates))))
        if not len(date_list) == len(pbase_list):
            print(
                'ERROR: number of existing dates is not equal to number of perp baseline!'
            )
            print('date list is needed for threshold filtering!')
            print('skip filtering.')
            return date12_list
    date6_list = ptime.yymmdd(date_list)

    # Threshold
    date12_list_out = []
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        pbase = abs(pbase_list[idx1] - pbase_list[idx2])
        if pbase_min <= pbase <= pbase_max:
            date12_list_out.append(date12)
    return date12_list_out
Exemplo n.º 14
0
def threshold_doppler_overlap(date12_list,
                              date_list,
                              dop_list,
                              bandwidth_az,
                              dop_overlap_min=0.15):
    """Remove pairs/interoferogram with doppler overlap larger than critical value
    Inputs:
        date12_list : list of string, for date12 in YYMMDD-YYMMDD format
        date_list   : list of string, for date in YYMMDD/YYYYMMDD format, optional
        dop_list    : list of list of 3 float, for centroid Doppler frequency
        bandwidth_az    : float, bandwidth in azimuth direction
        dop_overlap_min : float, minimum overlap of azimuth Doppler frequency
    Outputs:
        date12_list : list of string, for date12 in YYMMDD-YYMMDD format
    """
    if not date12_list:
        return []
    # Get date6_list
    if not date_list:
        m_dates = [date12.split('-')[0] for date12 in date12_list]
        s_dates = [date12.split('-')[1] for date12 in date12_list]
        date_list = sorted(ptime.yyyymmdd(list(set(m_dates + s_dates))))
        if not len(date_list) == len(pbase_list):
            print(
                'ERROR: number of existing dates is not equal to number of perp baseline!'
            )
            print('date list is needed for threshold filtering!')
            print('skip filtering.')
            return date12_list
    date6_list = ptime.yymmdd(date_list)

    # Threshold
    date12_list_out = []
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        dop_overlap = calculate_doppler_overlap(dop_list[idx1], dop_list[idx2],
                                                bandwidth_az)
        if dop_overlap >= dop_overlap_min:
            date12_list_out.append(date12)
    return date12_list_out
Exemplo n.º 15
0
def select_pairs_star(date_list,
                      m_date=None,
                      pbase_list=[],
                      date12_format='YYMMDD-YYMMDD'):
    """Select Star-like network/interferograms/pairs, it's a single master network, similar to PS approach.
    Usage:
        m_date : master date, choose it based on the following cretiria:
                 1) near the center in temporal and spatial baseline
                 2) prefer winter season than summer season for less temporal decorrelation
    Reference:
        Ferretti, A., C. Prati, and F. Rocca (2001), Permanent scatterers in SAR interferometry, IEEE TGRS, 39(1), 8-20.
    """
    date8_list = sorted(ptime.yyyymmdd(date_list))
    date6_list = ptime.yymmdd(date8_list)

    # Select master date if not existed
    if not m_date:
        m_date = select_master_date(date8_list, pbase_list)
        print(('auto select master date: ' + m_date))

    # Check input master date
    m_date8 = ptime.yyyymmdd(m_date)
    if m_date8 not in date8_list:
        print('Input master date is not existed in date list!')
        print(('Input master date: ' + str(m_date8)))
        print(('Input date list: ' + str(date8_list)))
        m_date8 = None

    # Generate star/ps network
    m_idx = date8_list.index(m_date8)
    date12_idx_list = [
        sorted([m_idx, s_idx]) for s_idx in range(len(date8_list))
        if s_idx is not m_idx
    ]
    date12_list = [
        date6_list[idx[0]] + '-' + date6_list[idx[1]]
        for idx in date12_idx_list
    ]
    if date12_format == 'YYYYMMDD_YYYYMMDD':
        date12_list = ptime.yyyymmdd_date12(date12_list)
    return date12_list
Exemplo n.º 16
0
def select_pairs_delaunay(date_list,
                          pbase_list,
                          norm=True,
                          date12_format='YYMMDD-YYMMDD'):
    """Select Pairs using Delaunay Triangulation based on temporal/perpendicular baselines
    Inputs:
        date_list  : list of date in YYMMDD/YYYYMMDD format
        pbase_list : list of float, perpendicular spatial baseline
        norm       : normalize temporal baseline to perpendicular baseline
    Key points
        1. Define a ratio between perpendicular and temporal baseline axis units (Pepe and Lanari, 2006, TGRS).
        2. Pairs with too large perpendicular / temporal baseline or Doppler centroid difference should be removed
           after this, using a threshold, to avoid strong decorrelations (Zebker and Villasenor, 1992, TGRS).
    Reference:
        Pepe, A., and R. Lanari (2006), On the extension of the minimum cost flow algorithm for phase unwrapping
        of multitemporal differential SAR interferograms, IEEE TGRS, 44(9), 2374-2383.
        Zebker, H. A., and J. Villasenor (1992), Decorrelation in interferometric radar echoes, IEEE TGRS, 30(5), 950-959.
    """
    # Get temporal baseline in days
    date6_list = ptime.yymmdd(date_list)
    date8_list = ptime.yyyymmdd(date_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]

    # Normalization (Pepe and Lanari, 2006, TGRS)
    if norm:
        temp2perp_scale = (max(pbase_list) - min(pbase_list)) / (
            max(tbase_list) - min(tbase_list))
        tbase_list = [tbase * temp2perp_scale for tbase in tbase_list]

    # Generate Delaunay Triangulation
    date12_idx_list = Triangulation(tbase_list, pbase_list).edges.tolist()
    date12_idx_list = [sorted(idx) for idx in sorted(date12_idx_list)]

    # Convert index into date12
    date12_list = [
        date6_list[idx[0]] + '-' + date6_list[idx[1]]
        for idx in date12_idx_list
    ]
    if date12_format == 'YYYYMMDD_YYYYMMDD':
        date12_list = ptime.yyyymmdd_date12(date12_list)
    return date12_list
Exemplo n.º 17
0
def coherence_matrix(date12_list,
                     coh_list,
                     diag_value=np.nan,
                     fill_triangle='both',
                     date_list=None):
    """Return coherence matrix based on input date12 list and its coherence
    Inputs:
        date12_list - list of string in YYMMDD-YYMMDD format
        coh_list    - list of float, average coherence for each interferograms
        diag_value  - number, value to be filled in the diagonal
        fill_triangle - str, 'both', 'upper', 'lower'
    Output:
        coh_matrix  - 2D np.array with dimension length = date num
                      np.nan value for interferograms non-existed.
                      1.0 for diagonal elements
    """
    # Get date list
    date12_list = ptime.yymmdd_date12(date12_list)
    if not date_list:
        m_dates = [date12.split('-')[0] for date12 in date12_list]
        s_dates = [date12.split('-')[1] for date12 in date12_list]
        date_list = sorted(list(set(m_dates + s_dates)))
    date_list = ptime.yymmdd(date_list)
    date_num = len(date_list)

    coh_mat = np.zeros([date_num, date_num])
    coh_mat[:] = np.nan
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date_list.index(date1)
        idx2 = date_list.index(date2)
        coh = coh_list[date12_list.index(date12)]
        if fill_triangle in ['upper', 'both']:
            coh_mat[idx1, idx2] = coh  # symmetric
        if fill_triangle in ['lower', 'both']:
            coh_mat[idx2, idx1] = coh

    if diag_value is not np.nan:
        for i in range(date_num):  # diagonal value
            coh_mat[i, i] = diag_value
    return coh_mat
Exemplo n.º 18
0
def threshold_coherence_based_mst(date12_list, coh_list):
    """Return a minimum spanning tree of network based on the coherence inverse.
    Inputs:
        date12_list - list of string in YYMMDD-YYMMDD format
        coh_list    - list of float, average coherence for each interferogram
    Output:
        mst_date12_list - list of string in YYMMDD-YYMMDD format, for MST network of interferograms 
    """
    # coh_list --> coh_mat --> weight_mat
    coh_mat = coherence_matrix(date12_list, coh_list)
    mask = ~np.isnan(coh_mat)
    wei_mat = np.zeros(coh_mat.shape)
    wei_mat[:] = np.inf
    wei_mat[mask] = 1 / coh_mat[mask]

    # MST path based on weight matrix
    wei_mat_csr = sparse.csr_matrix(wei_mat)
    mst_mat_csr = sparse.csgraph.minimum_spanning_tree(wei_mat_csr)

    # Get date6_list
    date12_list = ptime.yymmdd_date12(date12_list)
    m_dates = [date12.split('-')[0] for date12 in date12_list]
    s_dates = [date12.split('-')[1] for date12 in date12_list]
    date6_list = ptime.yymmdd(
        sorted(ptime.yyyymmdd(list(set(m_dates + s_dates)))))

    # Convert MST index matrix into date12 list
    [s_idx_list, m_idx_list] = [
        date_idx_array.tolist()
        for date_idx_array in sparse.find(mst_mat_csr)[0:2]
    ]
    mst_date12_list = []
    for i in range(len(m_idx_list)):
        idx = sorted([m_idx_list[i], s_idx_list[i]])
        date12 = date6_list[idx[0]] + '-' + date6_list[idx[1]]
        mst_date12_list.append(date12)
    return mst_date12_list
Exemplo n.º 19
0
def threshold_temporal_baseline(date12_list,
                                btemp_max,
                                keep_seasonal=True,
                                btemp_min=0.0):
    """Remove pairs/interferograms out of min/max/seasonal temporal baseline limits
    Inputs:
        date12_list : list of string for date12 in YYMMDD-YYMMDD format
        btemp_max   : float, maximum temporal baseline
        btemp_min   : float, minimum temporal baseline
        keep_seasonal : keep interferograms with seasonal temporal baseline
    Output:
        date12_list_out : list of string for date12 in YYMMDD-YYMMDD format
    Example:
        date12_list = threshold_temporal_baseline(date12_list, 200)
        date12_list = threshold_temporal_baseline(date12_list, 200, False)
    """
    if not date12_list:
        return []
    # Get date list and tbase list
    m_dates = [date12.split('-')[0] for date12 in date12_list]
    s_dates = [date12.split('-')[1] for date12 in date12_list]
    date8_list = sorted(ptime.yyyymmdd(list(set(m_dates + s_dates))))
    date6_list = ptime.yymmdd(date8_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]

    # Threshold
    date12_list_out = []
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        tbase = int(abs(tbase_list[idx1] - tbase_list[idx2]))
        if btemp_min <= tbase <= btemp_max:
            date12_list_out.append(date12)
        elif keep_seasonal and tbase / 30 in [11, 12]:
            date12_list_out.append(date12)
    return date12_list_out
Exemplo n.º 20
0
def extract_metadata4interferogram(fname):
    """Read/extract attributes for PySAR from Gamma .unw, .cor and .int file
    Parameters: fname : str, Gamma interferogram filename or path,
                    i.e. /PopoSLT143TsxD/diff_filt_HDR_130118-130129_4rlks.unw
    Returns:    atr : dict, Attributes dictionary
    """
    file_dir = os.path.dirname(fname)
    file_basename = os.path.basename(fname)

    rsc_file = fname+'.rsc'
    # if os.path.isfile(rsc_file):
    #    return rsc_file

    atr = {}
    atr['PROCESSOR'] = 'gamma'
    atr['FILE_TYPE'] = os.path.splitext(fname)[1]

    # Get info: date12, num of loooks
    try:
        date12 = str(re.findall('\d{8}[-_]\d{8}', file_basename)[0])
    except:
        date12 = str(re.findall('\d{6}[-_]\d{6}', file_basename)[0])
    m_date, s_date = date12.replace('-', '_').split('_')
    atr['DATE12'] = ptime.yymmdd(m_date)+'-'+ptime.yymmdd(s_date)
    lks = os.path.splitext(file_basename.split(date12)[1])[0]

    # Read .off and .par file
    off_file = file_dir+'/*'+date12+lks+'.off'
    m_par_file = [file_dir+'/*'+m_date+lks+i for i in ['.amp.par', '.ramp.par']]
    s_par_file = [file_dir+'/*'+s_date+lks+i for i in ['.amp.par', '.ramp.par']]

    try:
        off_file = ut.get_file_list(off_file)[0]
    except:
        print('\nERROR: Can not find .off file, it supposed to be like: '+off_file)
    try:
        m_par_file = ut.get_file_list(m_par_file)[0]
    except:
        print('\nERROR: Can not find master date .par file, it supposed to be like: '+m_par_file)
    try:
        s_par_file = ut.get_file_list(s_par_file)[0]
    except:
        print('\nERROR: Can not find slave date .par file, it supposed to be like: '+s_par_file)

    par_dict = readfile.read_gamma_par(m_par_file)
    off_dict = readfile.read_gamma_par(off_file)
    atr.update(par_dict)
    atr.update(off_dict)

    # Perp Baseline Info
    atr = get_perp_baseline(m_par_file, s_par_file, off_file, atr)

    # LAT/LON_REF1/2/3/4
    atr = get_lalo_ref(m_par_file, atr)

    # Write to .rsc file
    try:
        atr_orig = readfile.read_roipac_rsc(rsc_file)
    except:
        atr_orig = dict()
    if not set(atr.items()).issubset(set(atr_orig.items())):
        atr_out = {**atr_orig, **atr}
        print('merge %s, %s and %s into %s' % (os.path.basename(m_par_file),
                                               os.path.basename(s_par_file),
                                               os.path.basename(off_file),
                                               os.path.basename(rsc_file)))
        writefile.write_roipac_rsc(atr_out, out_file=rsc_file)

    return rsc_file