예제 #1
0
def read_exclude_date(inps, date_list_all):
    inps.excludeDate = ptime.yyyymmdd(inps.excludeDate)
    if not inps.excludeDate:
        inps.excludeDate = []
    else:
        log('input exclude dates: {}'.format(inps.excludeDate))

    if inps.startDate:
        log('input start date: ' + inps.startDate)
        inps.excludeDate += [
            i for i in date_list_all
            if float(i) < float(ptime.yyyymmdd(inps.startDate))
        ]
        inps.excludeDate = sorted(inps.excludeDate)

    if inps.endDate:
        log('input end   date: ' + inps.endDate)
        inps.excludeDate += [
            i for i in date_list_all
            if float(i) > float(ptime.yyyymmdd(inps.endDate))
        ]
        inps.excludeDate = sorted(inps.excludeDate)

    if inps.excludeDate:
        log('exclude    dates: ({})\n{}'.format(len(inps.excludeDate),
                                                inps.excludeDate))
    return inps.excludeDate
예제 #2
0
def read_template2inps(template_file, inps=None):
    """Read input template options into Namespace inps"""
    if not inps:
        inps = cmd_line_parse()
    inpsDict = vars(inps)
    print('read options from template file: '+os.path.basename(template_file))
    template = readfile.read_template(inps.template_file)
    template = ut.check_template_auto_value(template)

    # Update inps if key existed in template file
    prefix = 'pysar.network.'
    keyList = [i for i in list(inpsDict.keys()) if prefix+i in template.keys()]
    for key in keyList:
        value = template[prefix+key]
        if key in ['coherenceBased', 'keepMinSpanTree']:
            inpsDict[key] = value
        elif value:
            if key in ['minCoherence', 'tempBaseMax', 'perpBaseMax']:
                inpsDict[key] = float(value)
            elif key in ['connNumMax']:
                inpsDict[key] = int(value)
            elif key in ['maskFile', 'referenceFile']:
                inpsDict[key] = value
            elif key == 'aoiYX':
                tmp = [i.strip() for i in value.split(',')]
                sub_y = sorted([int(i.strip()) for i in tmp[0].split(':')])
                sub_x = sorted([int(i.strip()) for i in tmp[1].split(':')])
                inps.aoi_pix_box = (sub_x[0], sub_y[0], sub_x[1], sub_y[1])
            elif key == 'aoiLALO':
                tmp = [i.strip() for i in value.split(',')]
                sub_lat = sorted([float(i.strip()) for i in tmp[0].split(':')])
                sub_lon = sorted([float(i.strip()) for i in tmp[1].split(':')])
                inps.aoi_geo_box = (sub_lon[0], sub_lat[1], sub_lon[1], sub_lat[0])
                # Check lookup file
                if not inps.lookupFile:
                    print('Warning: no lookup table file found! Can not use '+key+' option without it.')
                    print('skip this option.')
                    inps.aoi_pix_box = None
            elif key in ['startDate', 'endDate']:
                inpsDict[key] = ptime.yyyymmdd(value)
            elif key == 'excludeDate':
                inpsDict[key] = ptime.yyyymmdd(value.replace(',', ' ').split())
            elif key == 'excludeIfgIndex':
                inpsDict[key] += value.replace(',', ' ').split()
                inps.excludeIfgIndex = read_input_index_list(inps.excludeIfgIndex, stackFile=inps.file)

    # Turn reset on if 1) no input options found to drop ifgram AND 2) there is template input
    if all(not i for i in [inps.referenceFile, inps.tempBaseMax, inps.perpBaseMax, inps.connNumMax,
                           inps.excludeIfgIndex, inps.excludeDate, inps.coherenceBased,
                           inps.startDate, inps.endDate, inps.reset, inps.manual]):
        print('No input option found to remove interferogram')
        print('Keep all interferograms by enable --reset option')
        inps.reset = True
    return inps
예제 #3
0
파일: network.py 프로젝트: whigg/PySAR
def select_pairs_sequential(date_list,
                            num_connection=2,
                            date12_format='YYMMDD-YYMMDD'):
    """Select Pairs in a Sequential way:
        For each acquisition, find its num_connection nearest acquisitions in the past time.
    Inputs:
        date_list  : list of date in YYMMDD/YYYYMMDD format
    Reference:
        Fattahi, H., and F. Amelung (2013), DEM Error Correction in InSAR Time Series, IEEE TGRS, 51(7), 4249-4259.
    """
    date8_list = sorted(ptime.yyyymmdd(date_list))
    date6_list = ptime.yymmdd(date8_list)
    date_idx_list = list(range(len(date6_list)))

    # Get pairs index list
    date12_idx_list = []
    for date_idx in date_idx_list:
        for i in range(num_connection):
            if date_idx - i - 1 >= 0:
                date12_idx_list.append([date_idx - i - 1, date_idx])
    date12_idx_list = [sorted(idx) for idx in sorted(date12_idx_list)]

    # Convert index into date12
    date12_list = [
        date6_list[idx[0]] + '-' + date6_list[idx[1]]
        for idx in date12_idx_list
    ]
    if date12_format == 'YYYYMMDD_YYYYMMDD':
        date12_list = ptime.yyyymmdd_date12(date12_list)
    return date12_list
예제 #4
0
파일: network.py 프로젝트: whigg/PySAR
def select_master_date(date_list, pbase_list=[]):
    """Select super master date based on input temporal and/or perpendicular baseline info.
    Return master date in YYYYMMDD format.
    """
    date8_list = ptime.yyyymmdd(date_list)
    if not pbase_list:
        # Choose date in the middle
        m_date8 = date8_list[int(len(date8_list) / 2)]
    else:
        # Get temporal baseline list
        tbase_list = ptime.date_list2tbase(date8_list)[0]
        # Normalization (Pepe and Lanari, 2006, TGRS)
        temp2perp_scale = (max(pbase_list) - min(pbase_list)) / (
            max(tbase_list) - min(tbase_list))
        tbase_list = [tbase * temp2perp_scale for tbase in tbase_list]
        # Get distance matrix
        ttMat1, ttMat2 = np.meshgrid(np.array(tbase_list),
                                     np.array(tbase_list))
        ppMat1, ppMat2 = np.meshgrid(np.array(pbase_list),
                                     np.array(pbase_list))
        ttMat = np.abs(ttMat1 - ttMat2)  # temporal distance matrix
        ppMat = np.abs(ppMat1 - ppMat2)  # spatial distance matrix
        # 2D distance matrix in temp/perp domain
        disMat = np.sqrt(np.square(ttMat) + np.square(ppMat))

        # Choose date minimize the total distance of temp/perp baseline
        disMean = np.mean(disMat, 0)
        m_idx = np.argmin(disMean)
        m_date8 = date8_list[m_idx]
    return m_date8
예제 #5
0
def read_exclude_date(ex_date_list, date_list_all):
    """Read exclude dates info
    Parameters: ex_date_list  : list of string, date in YYMMDD or YYYYMMDD format,
                                or text file with date in it
                date_list_all : list of string, date in YYYYMMDD format
    Returns:    drop_date     : 1D array of bool in size of (num_date,)
    """
    # Read exclude date input
    if ex_date_list:
        tempList = []
        for d in ex_date_list:
            if os.path.isfile(d):
                tempList += ptime.read_date_list(d)
            else:
                tempList.append(d)
        ex_date_list = sorted(ptime.yyyymmdd(tempList))
        print(('exclude the following dates for DEM error estimation:'
               ' ({})\n{}').format(len(ex_date_list), ex_date_list))
    else:
        ex_date_list = []

    # convert to mark array
    drop_date = np.array([i not in ex_date_list for i in date_list_all],
                         dtype=np.bool_)
    return drop_date
예제 #6
0
def exclude_dates():
    global inps, dateList

    if inps.ex_date_list:
        input_ex_date = list(inps.ex_date_list)
        inps.ex_date_list = []

        if input_ex_date:
            for ex_date in input_ex_date:

                if os.path.isfile(ex_date):
                    ex_date = ptime.read_date_list(ex_date)
                else:
                    ex_date = [ptime.yyyymmdd(ex_date)]

                inps.ex_date_list += list(
                    set(ex_date) - set(inps.ex_date_list))

            # delete dates not existed in input file
            inps.ex_date_list = sorted(
                list(set(inps.ex_date_list).intersection(dateList)))
            inps.ex_dates = ptime.date_list2vector(inps.ex_date_list)[0]
            inps.ex_idx_list = sorted(
                [dateList.index(i) for i in inps.ex_date_list])
            print(('exclude date:' + str(inps.ex_date_list)))
예제 #7
0
def correct_local_oscilator_drift(fname, rg_dist_file=None, out_file=None):
    print('-'*50)
    print('correct Local Oscilator Drift for Envisat using an empirical model (Marinkovic and Larsen, 2013)')
    print('-'*50)
    atr = readfile.read_attribute(fname)

    # Check Sensor Type
    platform = atr['PLATFORM']
    print('platform: '+platform)
    if not platform.lower() in ['env', 'envisat']:
        print('No need to correct LOD for '+platform)
        return

    # output file name
    if not out_file:
        out_file = '{}_LODcor{}'.format(os.path.splitext(fname)[0], os.path.splitext(fname)[1])

    # Get LOD ramp rate from empirical model
    if not rg_dist_file:
        print('calculate range distance from file metadata')
        rg_dist = get_relative_range_distance(atr)
    else:
        print('read range distance from file: %s' % (rg_dist_file))
        rg_dist = readfile.read(rg_dist_file, datasetName='slantRangeDistance', print_msg=False)[0]
        rg_dist -= rg_dist[int(atr['REF_Y']), int(atr['REF_X'])]
    ramp_rate = np.array(rg_dist * 3.87e-7, np.float32)

    # Correct LOD Ramp for Input fname
    range2phase = -4*np.pi / float(atr['WAVELENGTH'])
    k = atr['FILE_TYPE']
    if k == 'timeseries':
        # read
        obj = timeseries(fname)
        obj.open()
        data = obj.read()

        # correct LOD
        diff_year = np.array(obj.yearList)
        diff_year -= diff_year[obj.refIndex]
        for i in range(data.shape[0]):
            data[i, :, :] -= ramp_rate * diff_year[i]

        # write
        obj_out = timeseries(out_file)
        obj_out.write2hdf5(data, refFile=fname)

    elif k in ['.unw']:
        data, atr = readfile.read(fname)

        dates = ptime.yyyymmdd2years(ptime.yyyymmdd(atr['DATE12'].split('-')))
        dt = dates[1] - dates[0]
        data -= ramp_rate * range2phase * dt

        writefile.write(data, out_file=out_file, metadata=atr)
    else:
        print('No need to correct for LOD for %s file' % (k))
    return out_file
예제 #8
0
def read_exclude_date(inps, dateListAll):
    # Merge ex_date/startDate/endDate into ex_date
    yy_list_all = ptime.yyyymmdd2years(dateListAll)
    exDateList = []
    # 1. template_file
    if inps.template_file:
        print('read option from template file: ' + inps.template_file)
        inps = read_template2inps(inps.template_file, inps)

    # 2. ex_date
    input_ex_date = list(inps.excludeDate)
    if input_ex_date:
        for ex_date in input_ex_date:
            if os.path.isfile(ex_date):
                ex_date = ptime.read_date_list(ex_date)
            else:
                ex_date = [ptime.yyyymmdd(ex_date)]
            exDateList += list(set(ex_date) - set(exDateList))
        # delete dates not existed in input file
        exDateList = list(set(exDateList).intersection(dateListAll))
        print('exclude date:' + str(exDateList))

    # 3. startDate
    if inps.startDate:
        print('start date: ' + inps.startDate)
        yy_min = ptime.yyyymmdd2years(ptime.yyyymmdd(inps.startDate))
        for i in range(len(dateListAll)):
            date = dateListAll[i]
            if yy_list_all[i] < yy_min and date not in exDateList:
                print('  remove date: ' + date)
                exDateList.append(date)

    # 4. endDate
    if inps.endDate:
        print('end date: ' + inps.endDate)
        yy_max = ptime.yyyymmdd2years(ptime.yyyymmdd(inps.endDate))
        for i in range(len(dateListAll)):
            date = dateListAll[i]
            if yy_list_all[i] > yy_max and date not in exDateList:
                print('  remove date: ' + date)
                exDateList.append(date)
    exDateList = list(set(exDateList))
    return exDateList
예제 #9
0
파일: network.py 프로젝트: whigg/PySAR
def select_pairs_star(date_list,
                      m_date=None,
                      pbase_list=[],
                      date12_format='YYMMDD-YYMMDD'):
    """Select Star-like network/interferograms/pairs, it's a single master network, similar to PS approach.
    Usage:
        m_date : master date, choose it based on the following cretiria:
                 1) near the center in temporal and spatial baseline
                 2) prefer winter season than summer season for less temporal decorrelation
    Reference:
        Ferretti, A., C. Prati, and F. Rocca (2001), Permanent scatterers in SAR interferometry, IEEE TGRS, 39(1), 8-20.
    """
    date8_list = sorted(ptime.yyyymmdd(date_list))
    date6_list = ptime.yymmdd(date8_list)

    # Select master date if not existed
    if not m_date:
        m_date = select_master_date(date8_list, pbase_list)
        print(('auto select master date: ' + m_date))

    # Check input master date
    m_date8 = ptime.yyyymmdd(m_date)
    if m_date8 not in date8_list:
        print('Input master date is not existed in date list!')
        print(('Input master date: ' + str(m_date8)))
        print(('Input date list: ' + str(date8_list)))
        m_date8 = None

    # Generate star/ps network
    m_idx = date8_list.index(m_date8)
    date12_idx_list = [
        sorted([m_idx, s_idx]) for s_idx in range(len(date8_list))
        if s_idx is not m_idx
    ]
    date12_list = [
        date6_list[idx[0]] + '-' + date6_list[idx[1]]
        for idx in date12_idx_list
    ]
    if date12_format == 'YYYYMMDD_YYYYMMDD':
        date12_list = ptime.yyyymmdd_date12(date12_list)
    return date12_list
예제 #10
0
def read_template2inps(template_file, inps=None):
    """Read input template file into inps.ex_date"""
    if not inps:
        inps = cmd_line_parse()
    template = readfile.read_template(template_file)

    # Read template option
    prefix = 'pysar.topographicResidual.'

    key = prefix+'polyOrder'
    if key in template.keys():
        value = template[key]
        if value == 'auto':
            inps.poly_order = 2
        else:
            inps.poly_order = int(value)

    key = prefix+'excludeDate'
    if key in template.keys():
        value = template[key]
        if value not in ['auto', 'no']:
            value = value.replace(',', ' ').split()
            value = ptime.yyyymmdd(value)
            inps.ex_date += value

    key = prefix+'stepFuncDate'
    if key in template.keys():
        value = template[key]
        if value not in ['auto', 'no']:
            value = value.replace(',', ' ').split()
            value = ptime.yyyymmdd(value)
            inps.step_date += value

    key = prefix+'phaseVelocity'
    if key in template.keys():
        value = template[key]
        if value.lower() not in ['auto', 'no']:
            inps.min_phase_velocity = True

    return inps
예제 #11
0
def select_pairs_all(date_list):
    """Select All Possible Pairs/Interferograms
    Input : date_list   - list of date in YYMMDD/YYYYMMDD format
    Output: date12_list - list date12 in YYMMDD-YYMMDD format
    Reference:
        Berardino, P., G. Fornaro, R. Lanari, and E. Sansosti (2002), A new algorithm for surface deformation monitoring
        based on small baseline differential SAR interferograms, IEEE TGRS, 40(11), 2375-2383.
    """
    date8_list = sorted(ptime.yyyymmdd(date_list))
    date6_list = ptime.yymmdd(date8_list)
    date12_list = list(itertools.combinations(date6_list, 2))
    date12_list = [date12[0] + '-' + date12[1] for date12 in date12_list]
    return date12_list
예제 #12
0
파일: network.py 프로젝트: whigg/PySAR
def read_baseline_file(baselineFile, exDateList=[]):
    """Read bl_list.txt without dates listed in exDateList
    # Date  Bperp    dop0/PRF  dop1/PRF   dop2/PRF      PRF    slcDir
    070106     0.0   0.03      0.0000000  0.00000000000 2155.2 /scratch/KyushuT422F650AlosA/SLC/070106/
    070709  2631.9   0.07      0.0000000  0.00000000000 2155.2 /scratch/KyushuT422F650AlosA/SLC/070709/
    070824  2787.3   0.07      0.0000000  0.00000000000 2155.2 /scratch/KyushuT422F650AlosA/SLC/070824/
    ...

    Examples:
        date8List, perpBaseList, dopList, prfList, slcDirList = read_baseline_file(baselineFile)
        date8List, perpBaseList, dopList, prfList, slcDirList = read_baseline_file(baselineFile,['080520','100726'])
        date8List, perpBaseList = read_baseline_file(baselineFile)[0:2]
    """
    exDateList = ptime.yymmdd(exDateList)
    if not exDateList:
        exDateList = []

    # Read baseline file into lines
    fb = open(baselineFile)
    lines = []
    for line in fb:
        l = str.replace(line, '\n', '').strip()
        lines.append(l)
    fb.close()

    # Read each line and put the values into arrays
    date6List = []
    perpBaseList = []
    dopplerList = []
    slcDirList = []
    for line in lines:
        c = line.split()  # splits on white space
        date = c[0]
        if not date in exDateList:
            date6List.append(date)
            perpBaseList.append(float(c[1]))
            try:
                dop = np.array([float(c[2]), float(c[3]), float(c[4])])
                prf = float(c[5])
                dop *= prf
                dopplerList.append(dop)
            except:
                pass
            try:
                slcDirList.append(c[6])
            except:
                pass

    date8List = ptime.yyyymmdd(date6List)
    return date8List, perpBaseList, dopplerList, slcDirList
예제 #13
0
def read_template2inps(template_file, inps=None):
    """Read input template file into inps.excludeDate"""
    if not inps:
        inps = cmd_line_parse()
    inpsDict = vars(inps)
    print('read options from template file: ' +
          os.path.basename(template_file))
    template = readfile.read_template(inps.template_file)
    template = ut.check_template_auto_value(template)

    # Read template option
    prefix = 'pysar.velocity.'
    keyList = [
        i for i in list(inpsDict.keys()) if prefix + i in template.keys()
    ]
    for key in keyList:
        value = template[prefix + key]
        if value:
            if key in ['startDate', 'endDate']:
                inpsDict[key] = ptime.yyyymmdd(value)
            elif key in ['excludeDate']:
                inpsDict[key] = ptime.yyyymmdd(value.replace(',', ' ').split())
    return inps
예제 #14
0
def read_exclude_date(inps, dateListAll):
    # Merge ex_date/startDate/endDate into ex_date
    yy_list_all = ptime.yyyymmdd2years(dateListAll)
    exDateList = []
    # 1. template_file
    if inps.template_file:
        print('read option from template file: ' + inps.template_file)
        inps = read_template2inps(inps.template_file, inps)

    # 2. ex_date
    exDateList += ptime.read_date_list(list(inps.excludeDate),
                                       date_list_all=dateListAll)
    if exDateList:
        print('exclude date:' + str(exDateList))

    # 3. startDate
    if inps.startDate:
        print('start date: ' + inps.startDate)
        yy_min = ptime.yyyymmdd2years(ptime.yyyymmdd(inps.startDate))
        for i in range(len(dateListAll)):
            date = dateListAll[i]
            if yy_list_all[i] < yy_min and date not in exDateList:
                print('  remove date: ' + date)
                exDateList.append(date)

    # 4. endDate
    if inps.endDate:
        print('end date: ' + inps.endDate)
        yy_max = ptime.yyyymmdd2years(ptime.yyyymmdd(inps.endDate))
        for i in range(len(dateListAll)):
            date = dateListAll[i]
            if yy_list_all[i] > yy_max and date not in exDateList:
                print('  remove date: ' + date)
                exDateList.append(date)
    exDateList = list(set(exDateList))
    return exDateList
예제 #15
0
파일: network.py 프로젝트: whigg/PySAR
def date12_list2index(date12_list, date_list=[]):
    """Convert list of date12 string into list of index"""
    # Get dateList from date12List
    if not date_list:
        m_dates = [date12.split('-')[0] for date12 in date12_list]
        s_dates = [date12.split('-')[1] for date12 in date12_list]
        date_list = list(set(m_dates + s_dates))
    date6_list = ptime.yymmdd(sorted(ptime.yyyymmdd(date_list)))

    # Get pair index
    pairs_idx = []
    for date12 in date12_list:
        dates = date12.split('-')
        pair_idx = [date6_list.index(dates[0]), date6_list.index(dates[1])]
        pairs_idx.append(pair_idx)

    return pairs_idx
예제 #16
0
파일: network.py 프로젝트: whigg/PySAR
def select_pairs_mst(date_list, pbase_list, date12_format='YYMMDD-YYMMDD'):
    """Select Pairs using Minimum Spanning Tree technique
        Connection Cost is calculated using the baseline distance in perp and scaled temporal baseline (Pepe and Lanari,
        2006, TGRS) plane.
    Inputs:
        date_list  : list of date in YYMMDD/YYYYMMDD format
        pbase_list : list of float, perpendicular spatial baseline
    References:
        Pepe, A., and R. Lanari (2006), On the extension of the minimum cost flow algorithm for phase unwrapping
        of multitemporal differential SAR interferograms, IEEE TGRS, 44(9), 2374-2383.
        Perissin D., Wang T. (2012), Repeat-pass SAR interferometry with partially coherent targets. IEEE TGRS. 271-280
    """
    # Get temporal baseline in days
    date6_list = ptime.yymmdd(date_list)
    date8_list = ptime.yyyymmdd(date_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]
    # Normalization (Pepe and Lanari, 2006, TGRS)
    temp2perp_scale = (max(pbase_list) - min(pbase_list)) / (max(tbase_list) -
                                                             min(tbase_list))
    tbase_list = [tbase * temp2perp_scale for tbase in tbase_list]

    # Get weight matrix
    ttMat1, ttMat2 = np.meshgrid(np.array(tbase_list), np.array(tbase_list))
    ppMat1, ppMat2 = np.meshgrid(np.array(pbase_list), np.array(pbase_list))
    ttMat = np.abs(ttMat1 - ttMat2)  # temporal distance matrix
    ppMat = np.abs(ppMat1 - ppMat2)  # spatial distance matrix

    # 2D distance matrix in temp/perp domain
    weightMat = np.sqrt(np.square(ttMat) + np.square(ppMat))
    weightMat = sparse.csr_matrix(weightMat)  # compress sparse row matrix

    # MST path based on weight matrix
    mstMat = sparse.csgraph.minimum_spanning_tree(weightMat)

    # Convert MST index matrix into date12 list
    [s_idx_list, m_idx_list] = [
        date_idx_array.tolist() for date_idx_array in sparse.find(mstMat)[0:2]
    ]
    date12_list = []
    for i in range(len(m_idx_list)):
        idx = sorted([m_idx_list[i], s_idx_list[i]])
        date12 = date6_list[idx[0]] + '-' + date6_list[idx[1]]
        date12_list.append(date12)
    if date12_format == 'YYYYMMDD_YYYYMMDD':
        date12_list = ptime.yyyymmdd_date12(date12_list)
    return date12_list
예제 #17
0
파일: network.py 프로젝트: whigg/PySAR
def select_master_interferogram(date12_list,
                                date_list,
                                pbase_list,
                                m_date=None):
    """Select reference interferogram based on input temp/perp baseline info
    If master_date is specified, select its closest slave_date, which is newer than master_date;
        otherwise, choose the closest pair among all pairs as master interferogram.
    Example:
        master_date12   = pnet.select_master_ifgram(date12_list, date_list, pbase_list)
        '080211-080326' = pnet.select_master_ifgram(date12_list, date_list, pbase_list, m_date='080211')
    """
    pbase_array = np.array(pbase_list, dtype='float64')
    # Get temporal baseline
    date8_list = ptime.yyyymmdd(date_list)
    date6_list = ptime.yymmdd(date8_list)
    tbase_array = np.array(ptime.date_list2tbase(date8_list)[0],
                           dtype='float64')
    # Normalization (Pepe and Lanari, 2006, TGRS)
    temp2perp_scale = (max(pbase_array) - min(pbase_array)) / (
        max(tbase_array) - min(tbase_array))
    tbase_array *= temp2perp_scale

    # Calculate sqrt of temp/perp baseline for input pairs
    idx1 = np.array(
        [date6_list.index(date12.split('-')[0]) for date12 in date12_list])
    idx2 = np.array(
        [date6_list.index(date12.split('-')[1]) for date12 in date12_list])
    base_distance = np.sqrt((tbase_array[idx2] - tbase_array[idx1])**2 +
                            (pbase_array[idx2] - pbase_array[idx1])**2)

    # Get master interferogram index
    if not m_date:
        # Choose pair with shortest temp/perp baseline
        m_date12_idx = np.argmin(base_distance)
    else:
        m_date = ptime.yymmdd(m_date)
        # Choose pair contains m_date with shortest temp/perp baseline
        m_date12_idx_array = np.array([
            date12_list.index(date12) for date12 in date12_list
            if m_date + '-' in date12
        ])
        min_base_distance = np.min(base_distance[m_date12_idx_array])
        m_date12_idx = np.where(base_distance == min_base_distance)[0][0]

    m_date12 = date12_list[m_date12_idx]
    return m_date12
예제 #18
0
def read_ref_date(inps):
    if not inps.refDate:
        print('No reference date input, skip this step.')
        return inps.timeseries_file

    elif os.path.isfile(inps.refDate):
        print('read reference date from file: ' + inps.refDate)
        inps.refDate = ptime.read_date_txt(inps.refDate)[0]
    inps.refDate = ptime.yyyymmdd(inps.refDate)
    print('input reference date: {}'.format(inps.refDate))

    # check input reference date
    date_list = timeseries(inps.timeseries_file[0]).get_date_list()
    if inps.refDate not in date_list:
        msg = 'input reference date: {} is not found.'.format(inps.refDate)
        msg += '\nAll available dates:\n{}'.format(date_list)
        raise Exception(msg)
    return inps.refDate
예제 #19
0
파일: network.py 프로젝트: whigg/PySAR
def threshold_perp_baseline(date12_list,
                            date_list,
                            pbase_list,
                            pbase_max,
                            pbase_min=0.0):
    """Remove pairs/interoferogram out of [pbase_min, pbase_max]
    Inputs:
        date12_list : list of string for date12 in YYMMDD-YYMMDD format
        date_list   : list of string for date in YYMMDD/YYYYMMDD format, optional
        pbase_list  : list of float for perpendicular spatial baseline
        pbase_max   : float, maximum perpendicular baseline
        pbase_min   : float, minimum perpendicular baseline
    Output:
        date12_list_out : list of string for date12 in YYMMDD-YYMMDD format
    Example:
        date12_list = threshold_perp_baseline(date12_list, date_list, pbase_list, 500)
    """
    if not date12_list:
        return []
    # Get date6_list
    if not date_list:
        m_dates = [date12.split('-')[0] for date12 in date12_list]
        s_dates = [date12.split('-')[1] for date12 in date12_list]
        date_list = sorted(ptime.yyyymmdd(list(set(m_dates + s_dates))))
        if not len(date_list) == len(pbase_list):
            print(
                'ERROR: number of existing dates is not equal to number of perp baseline!'
            )
            print('date list is needed for threshold filtering!')
            print('skip filtering.')
            return date12_list
    date6_list = ptime.yymmdd(date_list)

    # Threshold
    date12_list_out = []
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        pbase = abs(pbase_list[idx1] - pbase_list[idx2])
        if pbase_min <= pbase <= pbase_max:
            date12_list_out.append(date12)
    return date12_list_out
예제 #20
0
파일: network.py 프로젝트: whigg/PySAR
def threshold_doppler_overlap(date12_list,
                              date_list,
                              dop_list,
                              bandwidth_az,
                              dop_overlap_min=0.15):
    """Remove pairs/interoferogram with doppler overlap larger than critical value
    Inputs:
        date12_list : list of string, for date12 in YYMMDD-YYMMDD format
        date_list   : list of string, for date in YYMMDD/YYYYMMDD format, optional
        dop_list    : list of list of 3 float, for centroid Doppler frequency
        bandwidth_az    : float, bandwidth in azimuth direction
        dop_overlap_min : float, minimum overlap of azimuth Doppler frequency
    Outputs:
        date12_list : list of string, for date12 in YYMMDD-YYMMDD format
    """
    if not date12_list:
        return []
    # Get date6_list
    if not date_list:
        m_dates = [date12.split('-')[0] for date12 in date12_list]
        s_dates = [date12.split('-')[1] for date12 in date12_list]
        date_list = sorted(ptime.yyyymmdd(list(set(m_dates + s_dates))))
        if not len(date_list) == len(pbase_list):
            print(
                'ERROR: number of existing dates is not equal to number of perp baseline!'
            )
            print('date list is needed for threshold filtering!')
            print('skip filtering.')
            return date12_list
    date6_list = ptime.yymmdd(date_list)

    # Threshold
    date12_list_out = []
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        dop_overlap = calculate_doppler_overlap(dop_list[idx1], dop_list[idx2],
                                                bandwidth_az)
        if dop_overlap >= dop_overlap_min:
            date12_list_out.append(date12)
    return date12_list_out
예제 #21
0
파일: network.py 프로젝트: whigg/PySAR
def select_pairs_delaunay(date_list,
                          pbase_list,
                          norm=True,
                          date12_format='YYMMDD-YYMMDD'):
    """Select Pairs using Delaunay Triangulation based on temporal/perpendicular baselines
    Inputs:
        date_list  : list of date in YYMMDD/YYYYMMDD format
        pbase_list : list of float, perpendicular spatial baseline
        norm       : normalize temporal baseline to perpendicular baseline
    Key points
        1. Define a ratio between perpendicular and temporal baseline axis units (Pepe and Lanari, 2006, TGRS).
        2. Pairs with too large perpendicular / temporal baseline or Doppler centroid difference should be removed
           after this, using a threshold, to avoid strong decorrelations (Zebker and Villasenor, 1992, TGRS).
    Reference:
        Pepe, A., and R. Lanari (2006), On the extension of the minimum cost flow algorithm for phase unwrapping
        of multitemporal differential SAR interferograms, IEEE TGRS, 44(9), 2374-2383.
        Zebker, H. A., and J. Villasenor (1992), Decorrelation in interferometric radar echoes, IEEE TGRS, 30(5), 950-959.
    """
    # Get temporal baseline in days
    date6_list = ptime.yymmdd(date_list)
    date8_list = ptime.yyyymmdd(date_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]

    # Normalization (Pepe and Lanari, 2006, TGRS)
    if norm:
        temp2perp_scale = (max(pbase_list) - min(pbase_list)) / (
            max(tbase_list) - min(tbase_list))
        tbase_list = [tbase * temp2perp_scale for tbase in tbase_list]

    # Generate Delaunay Triangulation
    date12_idx_list = Triangulation(tbase_list, pbase_list).edges.tolist()
    date12_idx_list = [sorted(idx) for idx in sorted(date12_idx_list)]

    # Convert index into date12
    date12_list = [
        date6_list[idx[0]] + '-' + date6_list[idx[1]]
        for idx in date12_idx_list
    ]
    if date12_format == 'YYYYMMDD_YYYYMMDD':
        date12_list = ptime.yyyymmdd_date12(date12_list)
    return date12_list
예제 #22
0
def ref_date_file(inFile, refDate, outFile=None):
    """Change input file reference date to a different one."""
    if not outFile:
        outFile = os.path.splitext(inFile)[0] + '_refDate.h5'
    refDate = ptime.yyyymmdd(refDate)
    print('input reference date: ' + refDate)

    # Input file info
    obj = timeseries(inFile)
    obj.open()
    atr = dict(obj.metadata)
    if atr['FILE_TYPE'] != 'timeseries':
        print('ERROR: input file is {}, only timeseries is supported.'.format(
            atr['FILE_TYPE']))
        return None
    if refDate not in obj.dateList:
        print(
            'ERROR: Input reference date was not found!\nAll dates available: {}'
            .format(obj.dateList))
        return None
    if refDate == atr['REF_DATE']:
        print('Same reference date chosen as existing reference date.')
        print('Copy {} to {}'.format(inFile, outFile))
        shutil.copy2(inFile, outFile)
        return outFile

    # Referencing in time
    data = obj.read()
    data -= np.tile(
        data[obj.dateList.index(refDate), :, :].reshape(
            1, data.shape[1], data.shape[2]), (data.shape[0], 1, 1))
    atr['REF_DATE'] = refDate

    if not outFile:
        outFile = '{}_refDate.h5'.format(os.path.splitext(inFile)[0])
    outObj = timeseries(outFile)
    outObj.write2hdf5(data, refFile=inFile, metadata=atr)
    return outFile
예제 #23
0
파일: tsview.py 프로젝트: pfgoting/PySAR
def read_exclude_date(input_ex_date, dateListAll):
    ex_date_list = []
    ex_dates = []
    ex_flag = np.ones((len(dateListAll)), np.bool_)

    if input_ex_date:
        input_ex_date = list(input_ex_date)
        if input_ex_date:
            for ex_date in input_ex_date:
                if os.path.isfile(ex_date):
                    ex_date = ptime.read_date_list(ex_date)
                else:
                    ex_date = [ptime.yyyymmdd(ex_date)]
                ex_date_list += list(set(ex_date) - set(ex_date_list))

            # delete dates not existed in input file
            ex_date_list = sorted(
                list(set(ex_date_list).intersection(dateListAll)))
            ex_dates = ptime.date_list2vector(ex_date_list)[0]
            for i in ex_date_list:
                ex_flag[dateListAll.index(i)] = False
            print('exclude date:' + str(ex_date_list))
    return ex_date_list, ex_dates, ex_flag
예제 #24
0
파일: plot.py 프로젝트: fossabot/PySAR
def plot_coherence_history(ax, date12List, cohList, plot_dict={}):
    """Plot min/max Coherence of all interferograms for each date"""
    # Figure Setting
    if not 'fontsize'    in plot_dict.keys():   plot_dict['fontsize']    = 12
    if not 'linewidth'   in plot_dict.keys():   plot_dict['linewidth']   = 2
    if not 'markercolor' in plot_dict.keys():   plot_dict['markercolor'] = 'orange'
    if not 'markersize'  in plot_dict.keys():   plot_dict['markersize']  = 16
    if not 'disp_title'  in plot_dict.keys():   plot_dict['disp_title']  = True
    if not 'every_year'  in plot_dict.keys():   plot_dict['every_year']  = 1

    # Get date list
    date12List = ptime.yyyymmdd_date12(date12List)
    m_dates = [date12.split('_')[0] for date12 in date12List]
    s_dates = [date12.split('_')[1] for date12 in date12List]
    dateList = sorted(ptime.yyyymmdd(list(set(m_dates + s_dates))))

    dates, datevector = ptime.date_list2vector(dateList)
    bar_width = ut.most_common(np.diff(dates).tolist())*3/4
    x_list = [i-bar_width/2 for i in dates]

    coh_mat = pnet.coherence_matrix(date12List, cohList)

    ax.bar(x_list, np.nanmax(coh_mat, axis=0), bar_width.days, label='Max Coherence')
    ax.bar(x_list, np.nanmin(coh_mat, axis=0), bar_width.days, label='Min Coherence')

    if plot_dict['disp_title']:
        ax.set_title('Coherence History of All Related Interferograms')

    ax = auto_adjust_xaxis_date(ax, datevector, plot_dict['fontsize'],
                                every_year=plot_dict['every_year'])[0]
    ax.set_ylim([0.0, 1.0])

    ax.set_xlabel('Time [years]', fontsize=plot_dict['fontsize'])
    ax.set_ylabel('Coherence', fontsize=plot_dict['fontsize'])
    ax.legend(loc='lower right')

    return ax
예제 #25
0
파일: network.py 프로젝트: whigg/PySAR
def threshold_coherence_based_mst(date12_list, coh_list):
    """Return a minimum spanning tree of network based on the coherence inverse.
    Inputs:
        date12_list - list of string in YYMMDD-YYMMDD format
        coh_list    - list of float, average coherence for each interferogram
    Output:
        mst_date12_list - list of string in YYMMDD-YYMMDD format, for MST network of interferograms 
    """
    # coh_list --> coh_mat --> weight_mat
    coh_mat = coherence_matrix(date12_list, coh_list)
    mask = ~np.isnan(coh_mat)
    wei_mat = np.zeros(coh_mat.shape)
    wei_mat[:] = np.inf
    wei_mat[mask] = 1 / coh_mat[mask]

    # MST path based on weight matrix
    wei_mat_csr = sparse.csr_matrix(wei_mat)
    mst_mat_csr = sparse.csgraph.minimum_spanning_tree(wei_mat_csr)

    # Get date6_list
    date12_list = ptime.yymmdd_date12(date12_list)
    m_dates = [date12.split('-')[0] for date12 in date12_list]
    s_dates = [date12.split('-')[1] for date12 in date12_list]
    date6_list = ptime.yymmdd(
        sorted(ptime.yyyymmdd(list(set(m_dates + s_dates)))))

    # Convert MST index matrix into date12 list
    [s_idx_list, m_idx_list] = [
        date_idx_array.tolist()
        for date_idx_array in sparse.find(mst_mat_csr)[0:2]
    ]
    mst_date12_list = []
    for i in range(len(m_idx_list)):
        idx = sorted([m_idx_list[i], s_idx_list[i]])
        date12 = date6_list[idx[0]] + '-' + date6_list[idx[1]]
        mst_date12_list.append(date12)
    return mst_date12_list
예제 #26
0
파일: network.py 프로젝트: whigg/PySAR
def threshold_temporal_baseline(date12_list,
                                btemp_max,
                                keep_seasonal=True,
                                btemp_min=0.0):
    """Remove pairs/interferograms out of min/max/seasonal temporal baseline limits
    Inputs:
        date12_list : list of string for date12 in YYMMDD-YYMMDD format
        btemp_max   : float, maximum temporal baseline
        btemp_min   : float, minimum temporal baseline
        keep_seasonal : keep interferograms with seasonal temporal baseline
    Output:
        date12_list_out : list of string for date12 in YYMMDD-YYMMDD format
    Example:
        date12_list = threshold_temporal_baseline(date12_list, 200)
        date12_list = threshold_temporal_baseline(date12_list, 200, False)
    """
    if not date12_list:
        return []
    # Get date list and tbase list
    m_dates = [date12.split('-')[0] for date12 in date12_list]
    s_dates = [date12.split('-')[1] for date12 in date12_list]
    date8_list = sorted(ptime.yyyymmdd(list(set(m_dates + s_dates))))
    date6_list = ptime.yymmdd(date8_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]

    # Threshold
    date12_list_out = []
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        tbase = int(abs(tbase_list[idx1] - tbase_list[idx2]))
        if btemp_min <= tbase <= btemp_max:
            date12_list_out.append(date12)
        elif keep_seasonal and tbase / 30 in [11, 12]:
            date12_list_out.append(date12)
    return date12_list_out
예제 #27
0
def read_template2inps(templateFile, inps=None):
    """Read network options from template file into Namespace variable inps"""
    if not inps:
        inps = cmd_line_parse()
    inpsDict = vars(inps)

    # Read template file
    template = readfile.read_template(templateFile)
    template = ut.check_template_auto_value(
        template, auto_file='../defaults/selectNetwork.cfg')
    if not template:
        log('Empty template: ' + templateFile)
        return None

    prefix = 'selectNetwork.'
    # Check obsolete option prefix
    for i in ['selectPairs.', 'select.network.']:
        if any(i in key for key in template.keys()):
            msg = 'obsolete option prefix detected: {}\n'.format(i)
            msg += 'Use {} instead'.format(prefix)
            raise Exception(msg)
    if all(prefix not in key for key in template.keys()):
        msg = 'no valid input option deteced in template file!\n'
        msg += 'Check the template below for supported options:\n'
        msg += TEMPLATE
        raise Exception(msg)

    # convert template into inpsDict
    keyList = [
        i for i in list(inpsDict.keys()) if prefix + i in template.keys()
    ]
    for key in keyList:
        value = template[prefix + key]
        # bool
        if key in ['keepSeasonal']:
            inpsDict[key] = value
        elif value:
            # str
            if key in ['method', 'referenceFile', 'tempPerpList']:
                inpsDict[key] = value
            # date in YYYYMMDD
            elif key in ['masterDate', 'startDate', 'endDate']:
                inpsDict[key] = ptime.yyyymmdd(value)
            # list of dates in YYYYMMDD
            elif key in ['excludeDate']:
                inps.excludeDate = ptime.yyyymmdd(
                    [i.strip() for i in value.split(',')])
            # float
            elif key in [
                    'perpBaseMax', 'tempBaseMax', 'tempBaseMin',
                    'dopOverlapMin'
            ]:
                inpsDict[key] = float(value)
            # int
            elif key in ['connNum']:
                inpsDict[key] = int(value)

    # read tempPerpList from str
    if isinstance(inps.tempPerpList, str):
        inps.tempPerpList = [[float(j) for j in i.split(',')]
                             for i in inps.tempPerpList.split(';')]

    # Initial network using input methods
    inps.method = inps.method.lower().replace('-', '_')
    if inps.method in ['star', 'ps']:
        inps.method = 'star'
    elif inps.method.startswith('seq'):
        inps.method = 'sequential'
    elif inps.method.startswith('hierar'):
        inps.method = 'hierarchical'
    elif inps.method in ['mst', 'min_spanning_tree', 'minimum_spanning_tree']:
        inps.method = 'mst'

    # for coherence prediction
    key = 'PLATFORM'
    if key in template.keys() and not inps.sensor:
        inps.sensor = template[key]

    key = 'COH_COLOR_JUMP'
    if key in template.keys():
        inps.coh_thres = float(template[key])

    # project name and sensor
    project_name = os.path.splitext(os.path.basename(inps.template_file))[0]
    log('project name: ' + project_name)
    if not inps.sensor:
        inps.sensor = sensor.project_name2sensor(project_name)[0]

    # Output directory/filename
    if not inps.outfile:
        if autoPath and 'SCRATCHDIR' in os.environ:
            inps.out_dir = os.getenv(
                'SCRATCHDIR') + '/' + project_name + '/PROCESS'
        else:
            try:
                inps.out_dir = os.path.dirname(
                    os.path.abspath(inps.referenceFile))
            except:
                inps.out_dir = os.path.dirname(
                    os.path.abspath(inps.baseline_file))
        inps.outfile = inps.out_dir + '/ifgram_list.txt'

    # Auto path of bl_list.txt file (for Miami user)
    if not inps.baseline_file and autoPath and 'SCRATCHDIR' in os.environ:
        bl_file = os.path.join(os.getenv('SCRATCHDIR'),
                               '{}/SLC/bl_list.txt'.format(project_name))
        if os.path.isfile(bl_file):
            inps.baseline_file = bl_file

    if not inps.referenceFile and not inps.baseline_file:
        raise Exception(
            'No baseline file or reference file found! At least one is required.'
        )

    return inps
예제 #28
0
def read_inps_dict2geometry_dict_object(inpsDict):

    # eliminate dsName by processor
    if inpsDict['processor'] in ['isce', 'doris']:
        datasetName2templateKey.pop('azimuthCoord')
        datasetName2templateKey.pop('rangeCoord')
    elif inpsDict['processor'] in ['roipac', 'gamma']:
        datasetName2templateKey.pop('latitude')
        datasetName2templateKey.pop('longitude')
    else:
        print('Un-recognized InSAR processor: {}'.format(
            inpsDict['processor']))

    # inpsDict --> dsPathDict
    print('-' * 50)
    print('searching geometry files info')
    print('input data files:')
    maxDigit = max([len(i) for i in list(datasetName2templateKey.keys())])
    dsPathDict = {}
    for dsName in [
            i for i in geometryDatasetNames
            if i in datasetName2templateKey.keys()
    ]:
        key = datasetName2templateKey[dsName]
        if key in inpsDict.keys():
            files = sorted(glob.glob(str(inpsDict[key])))
            if len(files) > 0:
                if dsName == 'bperp':
                    bperpDict = {}
                    for file in files:
                        date = ptime.yyyymmdd(
                            os.path.basename(os.path.dirname(file)))
                        bperpDict[date] = file
                    dsPathDict[dsName] = bperpDict
                    print('{:<{width}}: {path}'.format(dsName,
                                                       width=maxDigit,
                                                       path=inpsDict[key]))
                    print('number of bperp files: {}'.format(
                        len(list(bperpDict.keys()))))
                else:
                    dsPathDict[dsName] = files[0]
                    print('{:<{width}}: {path}'.format(dsName,
                                                       width=maxDigit,
                                                       path=files[0]))

    # Check required dataset
    dsName0 = geometryDatasetNames[0]
    if dsName0 not in dsPathDict.keys():
        print('WARNING: No reqired {} data files found!'.format(dsName0))

    # metadata
    ifgramRadarMetadata = None
    ifgramKey = datasetName2templateKey['unwrapPhase']
    if ifgramKey in inpsDict.keys():
        ifgramFiles = glob.glob(str(inpsDict[ifgramKey]))
        if len(ifgramFiles) > 0:
            atr = readfile.read_attribute(ifgramFiles[0])
            if 'Y_FIRST' not in atr.keys():
                ifgramRadarMetadata = atr.copy()

    # dsPathDict --> dsGeoPathDict + dsRadarPathDict
    dsNameList = list(dsPathDict.keys())
    dsGeoPathDict = {}
    dsRadarPathDict = {}
    for dsName in dsNameList:
        if dsName == 'bperp':
            atr = readfile.read_attribute(
                next(iter(dsPathDict[dsName].values())))
        else:
            atr = readfile.read_attribute(dsPathDict[dsName])
        if 'Y_FIRST' in atr.keys():
            dsGeoPathDict[dsName] = dsPathDict[dsName]
        else:
            dsRadarPathDict[dsName] = dsPathDict[dsName]

    geomRadarObj = None
    geomGeoObj = None
    if len(dsRadarPathDict) > 0:
        geomRadarObj = geometryDict(processor=inpsDict['processor'],
                                    datasetDict=dsRadarPathDict,
                                    extraMetadata=ifgramRadarMetadata)
    if len(dsGeoPathDict) > 0:
        geomGeoObj = geometryDict(processor=inpsDict['processor'],
                                  datasetDict=dsGeoPathDict,
                                  extraMetadata=None)
    return geomRadarObj, geomGeoObj
예제 #29
0
def read_inps_dict2ifgram_stack_dict_object(inpsDict):
    """Read input arguments into dict of ifgramStackDict object"""
    # inpsDict --> dsPathDict
    print('-' * 50)
    print('searching interferometric pairs info')
    print('input data files:')
    maxDigit = max([len(i) for i in list(datasetName2templateKey.keys())])
    dsPathDict = {}
    dsNumDict = {}
    for dsName in [
            i for i in ifgramDatasetNames
            if i in datasetName2templateKey.keys()
    ]:
        key = datasetName2templateKey[dsName]
        if key in inpsDict.keys():
            files = sorted(glob.glob(str(inpsDict[key])))
            if len(files) > 0:
                dsPathDict[dsName] = files
                dsNumDict[dsName] = len(files)
                print('{:<{width}}: {path}'.format(dsName,
                                                   width=maxDigit,
                                                   path=inpsDict[key]))

    # Check required dataset
    dsName0 = 'unwrapPhase'
    if dsName0 not in dsPathDict.keys():
        print('WARNING: No reqired {} data files found!'.format(dsName0))
        return None

    # Check number of files for all dataset types
    dsNumList = list(dsNumDict.values())
    if any(i != dsNumList[0] for i in dsNumList):
        print(
            'WARNING: Not all types of dataset have the same number of files:')
        for key, value in dsNumDict.items():
            print('number of {:<{width}}: {num}'.format(key,
                                                        width=maxDigit,
                                                        num=value))
    print('number of files per type: {}'.format(dsNumList[0]))

    # Check data dimension for all files

    # dsPathDict --> pairsDict --> stackObj
    dsNameList = list(dsPathDict.keys())
    pairsDict = {}
    for dsPath in dsPathDict[dsName0]:
        dates = ptime.yyyymmdd(
            readfile.read_attribute(dsPath)['DATE12'].split('-'))

        #####################################
        # A dictionary of data files for a given pair.
        # One pair may have several types of dataset.
        # example ifgramPathDict = {'unwrapPhase': /pathToFile/filt.unw, 'iono':/PathToFile/iono.bil}
        # All path of data file must contain the master and slave date, either in file name or folder name.

        ifgramPathDict = {}
        for i in range(len(dsNameList)):
            dsName = dsNameList[i]
            dsPath1 = dsPathDict[dsName][0]
            if all(d[2:8] in dsPath1 for d in dates):
                ifgramPathDict[dsName] = dsPath1
            else:
                dsPath2 = [
                    i for i in dsPathDict[dsName]
                    if all(d[2:8] in i for d in dates)
                ]
                if len(dsPath2) > 0:
                    ifgramPathDict[dsName] = dsPath2[0]
                else:
                    print('WARNING: {} file missing for pair {}'.format(
                        dsName, dates))
        ifgramObj = ifgramDict(dates=tuple(dates), datasetDict=ifgramPathDict)
        pairsDict[tuple(dates)] = ifgramObj

    if len(pairsDict) > 0:
        stackObj = ifgramStackDict(pairsDict=pairsDict)
    else:
        stackObj = None
    return stackObj
예제 #30
0
def check_inputs(inps):
    parser = create_parser()

    # output directories/files
    atr = dict()
    pysar_dir = None
    if inps.timeseries_file:
        atr = readfile.read_attribute(inps.timeseries_file)
        pysar_dir = os.path.dirname(inps.timeseries_file)
        if not inps.outfile:
            fbase = os.path.splitext(inps.timeseries_file)[0]
            inps.outfile = '{}_{}.h5'.format(fbase, inps.trop_model)
    elif inps.geom_file:
        atr = readfile.read_attribute(inps.geom_file)
        pysar_dir = os.path.join(os.path.dirname(inps.geom_file), '..')
    else:
        pysar_dir = os.path.abspath(os.getcwd())

    # trop_file
    inps.trop_file = os.path.join(pysar_dir,
                                  'INPUTS/{}.h5'.format(inps.trop_model))
    print('output tropospheric delay file: {}'.format(inps.trop_file))

    # hour
    if not inps.hour:
        if 'CENTER_LINE_UTC' in atr.keys():
            inps.hour = ptime.closest_weather_product_time(
                atr['CENTER_LINE_UTC'], inps.trop_model)
        else:
            parser.print_usage()
            raise Exception('no input for hour')
    print('time of cloest available product: {}:00 UTC'.format(inps.hour))

    # date list
    if inps.timeseries_file:
        print('read date list from timeseries file: {}'.format(
            inps.timeseries_file))
        ts_obj = timeseries(inps.timeseries_file)
        ts_obj.open(print_msg=False)
        inps.date_list = ts_obj.dateList
    elif len(inps.date_list) == 1:
        if os.path.isfile(inps.date_list[0]):
            print('read date list from text file: {}'.format(
                inps.date_list[0]))
            inps.date_list = ptime.yyyymmdd(
                np.loadtxt(inps.date_list[0], dtype=bytes,
                           usecols=(0, )).astype(str).tolist())
        else:
            parser.print_usage()
            raise Exception('ERROR: input date list < 2')

    # Grib data directory
    inps.grib_dir = os.path.join(inps.weather_dir, inps.trop_model)
    if not os.path.isdir(inps.grib_dir):
        os.makedirs(inps.grib_dir)
        print('making directory: ' + inps.grib_dir)

    # Date list to grib file list
    inps.grib_file_list = date_list2grib_file(inps.date_list, inps.hour,
                                              inps.trop_model, inps.grib_dir)

    if 'REF_Y' in atr.keys():
        inps.ref_yx = [int(atr['REF_Y']), int(atr['REF_X'])]
        print('reference pixel: {}'.format(inps.ref_yx))

    # Coordinate system: geocoded or not
    inps.geocoded = False
    if 'Y_FIRST' in atr.keys():
        inps.geocoded = True
    print('geocoded: {}'.format(inps.geocoded))

    # Prepare DEM, inc_angle, lat/lon file for PyAPS to read
    if inps.geom_file:
        geom_atr = readfile.read_attribute(inps.geom_file)
        print('converting DEM/incAngle for PyAPS to read')
        # DEM
        data = readfile.read(inps.geom_file,
                             datasetName='height',
                             print_msg=False)[0]
        inps.dem_file = 'pyapsDem.hgt'
        writefile.write(data, inps.dem_file, metadata=geom_atr)

        # inc_angle
        inps.inc_angle = readfile.read(inps.geom_file,
                                       datasetName='incidenceAngle',
                                       print_msg=False)[0]
        inps.inc_angle_file = 'pyapsIncAngle.flt'
        writefile.write(inps.inc_angle, inps.inc_angle_file, metadata=geom_atr)

        # latitude
        try:
            data = readfile.read(inps.geom_file,
                                 datasetName='latitude',
                                 print_msg=False)[0]
            print('converting lat for PyAPS to read')
            inps.lat_file = 'pyapsLat.flt'
            writefile.write(data, inps.lat_file, metadata=geom_atr)
        except:
            inps.lat_file = None

        # longitude
        try:
            data = readfile.read(inps.geom_file,
                                 datasetName='longitude',
                                 print_msg=False)[0]
            print('converting lon for PyAPS to read')
            inps.lon_file = 'pyapsLon.flt'
            writefile.write(data, inps.lon_file, metadata=geom_atr)
        except:
            inps.lon_file = None
    return inps, atr