Пример #1
0
def select_pairs_delaunay(date_list, pbase_list, norm=True):
    '''Select Pairs using Delaunay Triangulation based on temporal/perpendicular baselines
    Inputs:
        date_list  : list of date in YYMMDD/YYYYMMDD format
        pbase_list : list of float, perpendicular spatial baseline
        norm       : normalize temporal baseline to perpendicular baseline
    Key points
        1. Define a ratio between perpendicular and temporal baseline axis units (Pepe and Lanari, 2006, TGRS).
        2. Pairs with too large perpendicular / temporal baseline or Doppler centroid difference should be removed
           after this, using a threshold, to avoid strong decorrelations (Zebker and Villasenor, 1992, TGRS).
    Reference:
        Pepe, A., and R. Lanari (2006), On the extension of the minimum cost flow algorithm for phase unwrapping
        of multitemporal differential SAR interferograms, IEEE TGRS, 44(9), 2374-2383.
        Zebker, H. A., and J. Villasenor (1992), Decorrelation in interferometric radar echoes, IEEE TGRS, 30(5), 950-959.
    '''
    # Get temporal baseline in days
    date6_list = ptime.yymmdd(date_list)
    date8_list = ptime.yyyymmdd(date_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]

    # Normalization (Pepe and Lanari, 2006, TGRS)
    if norm:
        temp2perp_scale = (max(pbase_list)-min(pbase_list)) / (max(tbase_list)-min(tbase_list))
        tbase_list = [tbase*temp2perp_scale for tbase in tbase_list]
    
    # Generate Delaunay Triangulation
    date12_idx_list = Triangulation(tbase_list, pbase_list).edges.tolist()
    date12_idx_list = [sorted(idx) for idx in sorted(date12_idx_list)]

    # Convert index into date12
    date12_list = [date6_list[idx[0]]+'-'+date6_list[idx[1]] for idx in date12_idx_list]
    return date12_list
Пример #2
0
def select_master_date(date_list, pbase_list=[]):
    '''Select super master date based on input temporal and/or perpendicular baseline info.
    Return master date in YYYYMMDD format.
    '''
    date8_list = ptime.yyyymmdd(date_list)
    if not pbase_list:
        # Choose date in the middle
        m_date8 = date8_list[int(len(date8_list)/2)]
    else:
        # Get temporal baseline list
        tbase_list = ptime.date_list2tbase(date8_list)[0]
        # Normalization (Pepe and Lanari, 2006, TGRS)
        temp2perp_scale = (max(pbase_list)-min(pbase_list)) / (max(tbase_list)-min(tbase_list))
        tbase_list = [tbase*temp2perp_scale for tbase in tbase_list]
        # Get distance matrix
        ttMat1, ttMat2 = np.meshgrid(np.array(tbase_list), np.array(tbase_list))
        ppMat1, ppMat2 = np.meshgrid(np.array(pbase_list), np.array(pbase_list))
        ttMat = np.abs(ttMat1 - ttMat2)  # temporal distance matrix
        ppMat = np.abs(ppMat1 - ppMat2)  # spatial distance matrix
        disMat = np.sqrt(np.square(ttMat) + np.square(ppMat))  # 2D distance matrix in temp/perp domain
        
        # Choose date minimize the total distance of temp/perp baseline
        disMean = np.mean(disMat, 0)
        m_idx = np.argmin(disMean)
        m_date8 = date8_list[m_idx]
    return m_date8
Пример #3
0
def select_master_interferogram(date12_list, date_list, pbase_list, m_date=None):
    '''Select reference interferogram based on input temp/perp baseline info
    If master_date is specified, select its closest slave_date; otherwise, choose the closest pair
    among all pairs as master interferogram.
    Example:
        master_date12 = pnet.select_master_ifgram(date12_list, date_list, pbase_list)
    '''
    pbase_array = np.array(pbase_list, dtype='float64')
    # Get temporal baseline
    date8_list = ptime.yyyymmdd(date_list)
    date6_list = ptime.yymmdd(date8_list)
    tbase_array = np.array(ptime.date_list2tbase(date8_list)[0], dtype='float64')
    # Normalization (Pepe and Lanari, 2006, TGRS)
    temp2perp_scale = (max(pbase_array)-min(pbase_array)) / (max(tbase_array)-min(tbase_array))
    tbase_array *= temp2perp_scale
    
    # Calculate sqrt of temp/perp baseline for input pairs
    idx1 = np.array([date6_list.index(date12.split('-')[0]) for date12 in date12_list])
    idx2 = np.array([date6_list.index(date12.split('-')[1]) for date12 in date12_list])
    base_distance = np.sqrt((tbase_array[idx2] - tbase_array[idx1])**2 + (pbase_array[idx2] - pbase_array[idx1])**2)
    
    # Get master interferogram index
    if not m_date:
        # Choose pair with shortest temp/perp baseline
        m_date12_idx = np.argmin(base_distance)        
    else:
        m_date = ptime.yymmdd(m_date)
        # Choose pair contains m_date with shortest temp/perp baseline
        m_date12_idx_array = np.array([date12_list.index(date12) for date12 in date12_list if m_date in date12])
        min_base_distance = np.min(base_distance[m_date12_idx_array])
        m_date12_idx = np.where(base_distance == min_base_distance)[0][0]
    
    m_date12 = date12_list[m_date12_idx]
    return m_date12
Пример #4
0
def threshold_temporal_baseline(date12_list, btemp_max, keep_seasonal=True, btemp_min=0.0):
    '''Remove pairs/interferograms out of min/max/seasonal temporal baseline limits
    Inputs:
        date12_list : list of string for date12 in YYMMDD-YYMMDD format
        btemp_max   : float, maximum temporal baseline
        btemp_min   : float, minimum temporal baseline
        keep_seasonal : keep interferograms with seasonal temporal baseline
    Output:
        date12_list_out : list of string for date12 in YYMMDD-YYMMDD format
    Example:
        date12_list = threshold_temporal_baseline(date12_list, 200)
        date12_list = threshold_temporal_baseline(date12_list, 200, False)
    '''
    if not date12_list:  return []
    # Get date list and tbase list
    m_dates = [date12.split('-')[0] for date12 in date12_list]
    s_dates = [date12.split('-')[1] for date12 in date12_list]
    date8_list = sorted(ptime.yyyymmdd(list(set(m_dates + s_dates))))
    date6_list = ptime.yymmdd(date8_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]

    # Threshold
    date12_list_out = []
    for date12 in date12_list:
        date1, date2 = date12.split('-')
        idx1 = date6_list.index(date1)
        idx2 = date6_list.index(date2)
        tbase = int(abs(tbase_list[idx1] - tbase_list[idx2]))
        if btemp_min <= tbase <= btemp_max:
            date12_list_out.append(date12)
        elif keep_seasonal and tbase/30 in [11,12]:
            date12_list_out.append(date12)    
    return date12_list_out
Пример #5
0
def select_pairs_mst(date_list, pbase_list):
    '''Select Pairs using Minimum Spanning Tree technique
        Connection Cost is calculated using the baseline distance in perp and scaled temporal baseline (Pepe and Lanari,
        2006, TGRS) plane.
    Inputs:
        date_list  : list of date in YYMMDD/YYYYMMDD format
        pbase_list : list of float, perpendicular spatial baseline
    References:
        Pepe, A., and R. Lanari (2006), On the extension of the minimum cost flow algorithm for phase unwrapping
        of multitemporal differential SAR interferograms, IEEE TGRS, 44(9), 2374-2383.
        Perissin D., Wang T. (2012), Repeat-pass SAR interferometry with partially coherent targets. IEEE TGRS. 271-280
    '''
    # Get temporal baseline in days
    date6_list = ptime.yymmdd(date_list)
    date8_list = ptime.yyyymmdd(date_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]
    # Normalization (Pepe and Lanari, 2006, TGRS)
    temp2perp_scale = (max(pbase_list) - min(pbase_list)) / (max(tbase_list) -
                                                             min(tbase_list))
    tbase_list = [tbase * temp2perp_scale for tbase in tbase_list]

    # Get weight matrix
    ttMat1, ttMat2 = np.meshgrid(np.array(tbase_list), np.array(tbase_list))
    ppMat1, ppMat2 = np.meshgrid(np.array(pbase_list), np.array(pbase_list))
    ttMat = np.abs(ttMat1 - ttMat2)  # temporal distance matrix
    ppMat = np.abs(ppMat1 - ppMat2)  # spatial distance matrix

    weightMat = np.sqrt(
        np.square(ttMat) +
        np.square(ppMat))  # 2D distance matrix in temp/perp domain
    weightMat = csr_matrix(weightMat)  # compress sparse row matrix

    # MST path based on weight matrix
    mstMat = minimum_spanning_tree(weightMat)

    # Convert MST index matrix into date12 list
    [s_idx_list, m_idx_list
     ] = [date_idx_array.tolist() for date_idx_array in find(mstMat)[0:2]]
    date12_list = [
        date6_list[m_idx_list[i]] + '-' + date6_list[s_idx_list[i]]
        for i in range(len(m_idx_list))
    ]
    return date12_list
Пример #6
0
def Baseline_timeseries(igramsFile):
    h5file = h5py.File(igramsFile,'r')
    k=h5file.keys()
    if 'interferograms' in k: k[0] = 'interferograms'
    elif 'coherence'    in k: k[0] = 'coherence'
    igramList = h5file[k[0]].keys()
    Bp_igram=[]
    for igram in igramList:
        Bp_igram.append((float(h5file[k[0]][igram].attrs['P_BASELINE_BOTTOM_HDR'])+\
                         float(h5file[k[0]][igram].attrs['P_BASELINE_TOP_HDR']))/2)
  
    A,B=design_matrix(h5file)
    dateList       = ptime.date_list(igramsFile)
    tbase,dateDict = ptime.date_list2tbase(dateList)
    dt = np.diff(tbase)
  
    Bp_rate=np.dot(np.linalg.pinv(B),Bp_igram)
    zero = np.array([0.],np.float32)
    Bperp = np.concatenate((zero,np.cumsum([Bp_rate*dt])))
    h5file.close()
    
    return Bperp
Пример #7
0
def correct_lod_file(File, outFile=None):
    # Check Sensor Type
    print 'input file: ' + File
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    platform = atr['PLATFORM']
    print 'platform: ' + platform
    if not platform.lower() in ['env', 'envisat']:
        print 'No need to correct LOD for ' + platform
        sys.exit(1)

    # Output Filename
    if not outFile:
        ext = os.path.splitext(File)[1]
        outFile = os.path.splitext(File)[0] + '_LODcor' + ext

    # Get LOD phase ramp from empirical model
    width = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])
    range_resolution = float(atr['RANGE_PIXEL_SIZE'])

    r = np.linspace(0, width - 1, width)
    R = range_resolution * r * (3.87e-7)
    Ramp = np.tile(R, [length, 1])

    yref = int(atr['ref_y'])
    xref = int(atr['ref_x'])
    Ramp -= Ramp[yref][xref]

    # Correct LOD Ramp for Input File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(File, 'r')
        epochList = sorted(h5[k].keys())

        h5out = h5py.File(outFile, 'w')
        group = h5out.create_group(k)

        if k in ['interferograms', 'wrapped']:
            print 'number of interferograms: ' + str(len(epochList))
            wvl = float(atr['WAVELENGTH'])
            Ramp *= -4 * np.pi / wvl
            for epoch in epochList:
                print epoch
                data = h5[k][epoch].get(epoch)[:]
                atr = h5[k][epoch].attrs

                dates = ptime.yyyymmdd(atr['DATE12'].split('-'))
                dates = ptime.yyyymmdd2years(dates)
                dt = date[1] - date[0]
                data -= Ramp * dt

                gg = group.create_group(epoch)
                dset = gg.create_dataset(epoch, data=data, compression='gzip')
                for key, value in atr.iteritems():
                    gg.attrs[key] = value

        elif k == 'timeseries':
            print 'number of acquisitions: ' + str(len(epochList))
            tbase = [
                float(dy) / 365.25
                for dy in ptime.date_list2tbase(epochList)[0]
            ]
            for i in range(len(epochList)):
                epoch = epochList[i]
                print epoch
                data = h5[k].get(epoch)[:]

                data -= Ramp * tbase[i]

                dset = group.create_dataset(epoch,
                                            data=data,
                                            compression='gzip')
            for key, value in atr.iteritems():
                group.attrs[key] = value
        else:
            print 'No need to correct for LOD for ' + k + ' file'
            sys.exit(1)

        h5.close()
        h5out.close()

    else:
        data, atr = readfile.read(File)
        data -= Ramp
        writefile.write(data, atr, outFile)

    return outFile
Пример #8
0
def ifgram_inversion(ifgramFile='unwrapIfgram.h5', coherenceFile='coherence.h5', meta=None):
    '''Implementation of the SBAS algorithm.
    modified from sbas.py written by scott baker, 2012 

    Inputs:
        ifgramFile    - string, HDF5 file name of the interferograms
        coherenceFile - string, HDF5 file name of the coherence
        meta          - dict, including the following options:
                        weight_function
                        chunk_size - float, max number of data (ifgram_num*row_num*col_num)
                                     to read per loop; to control the memory
    Output:
        timeseriesFile - string, HDF5 file name of the output timeseries
        tempCohFile    - string, HDF5 file name of temporal coherence
    Example:
        meta = dict()
        meta['weight_function'] = 'variance'
        meta['chunk_size'] = 0.5e9
        meta['timeseriesFile'] = 'timeseries_var.h5'
        meta['tempCohFile'] = 'temporalCoherence_var.h5'
        ifgram_inversion('unwrapIfgram.h5', 'coherence.h5', meta)
    '''
    if 'tempCohFile' not in meta.keys():
        meta['tempCohFile'] = 'temporalCoherence.h5'
    meta['timeseriesStdFile'] = 'timeseriesDecorStd.h5'
    total = time.time()

    if not meta:
        meta = vars(cmdLineParse())

    if meta['update_mode'] and not ut.update_file(meta['timeseriesFile'], ifgramFile):
        return meta['timeseriesFile'], meta['tempCohFile']

    ##### Basic Info
    # length/width
    atr = readfile.read_attribute(ifgramFile)
    length = int(atr['FILE_LENGTH'])
    width  = int(atr['WIDTH'])
    meta['length'] = length
    meta['width']  = width

    # ifgram_list
    h5ifgram = h5py.File(ifgramFile,'r')
    ifgram_list = sorted(h5ifgram['interferograms'].keys())
    #if meta['weight_function'] in ['no','uniform']:
    #    ifgram_list = ut.check_drop_ifgram(h5ifgram)
    ifgram_list = ut.check_drop_ifgram(h5ifgram)
    meta['ifgram_list'] = ifgram_list
    ifgram_num = len(ifgram_list)

    # date12_list/date8_list/tbase_diff
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    m_dates = [i.split('-')[0] for i in date12_list]
    s_dates = [i.split('-')[1] for i in date12_list]
    date8_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
    date_num = len(date8_list)
    meta['date8_list'] = date8_list
    meta['date12_list'] = date12_list

    tbase_list = ptime.date_list2tbase(date8_list)[0]
    tbase_diff = np.diff(tbase_list).reshape((-1,1))
    meta['tbase_diff'] = tbase_diff

    print 'number of interferograms: %d' % (ifgram_num)
    print 'number of acquisitions  : %d' % (date_num)
    print 'number of columns: %d' % (width)
    print 'number of lines  : %d' % (length)

    ##### ref_y/x/value
    try:
        ref_x = int(atr['ref_x'])
        ref_y = int(atr['ref_y'])
        print 'reference pixel in y/x: [%d, %d]' % (ref_y, ref_x)
        ref_value = np.zeros((ifgram_num,1), np.float32)
        for j in range(ifgram_num):
            ifgram = ifgram_list[j]
            dset = h5ifgram['interferograms'][ifgram].get(ifgram)
            ref_value[j] = dset[ref_y,ref_x]
        meta['ref_y'] = ref_y
        meta['ref_x'] = ref_x
        meta['ref_value'] = ref_value
    except:
        if meta['skip_ref']:
            meta['ref_value'] = 0.0
            print 'skip checking reference pixel info - This is for SIMULATION ONLY.'
        else:
            print 'ERROR: No ref_x/y found! Can not invert interferograms without reference in space.'
            print 'run seed_data.py '+ifgramFile+' --mark-attribute for a quick referencing.'
            sys.exit(1)
    h5ifgram.close()

    ##### Rank of Design matrix for weighted inversion
    A, B = ut.design_matrix(ifgramFile, date12_list)
    print '-------------------------------------------------------------------------------'
    if meta['weight_function'] in ['no','uniform']:
        print 'generic least square inversion with min-norm phase velocity'
        print '    based on Berardino et al. (2002, IEEE-TGRS)'
        print '    OLS for pixels with fully     connected network'
        print '    SVD for pixels with partially connected network'
        if np.linalg.matrix_rank(A) < date_num-1:
            print 'WARNING: singular design matrix! Inversion result can be biased!'
            print 'continue using its SVD solution on all pixels'
    else:
        print 'weighted least square (WLS) inversion with min-norm phase, pixelwise'
        if np.linalg.matrix_rank(A) < date_num-1:
            print 'ERROR: singular design matrix!'
            print '    Input network of interferograms is not fully connected!'
            print '    Can not invert the weighted least square solution.'
            print 'You could try:'
            print '    1) Add more interferograms to make the network fully connected:'
            print '       a.k.a., no multiple subsets nor network islands'
            print "    2) Use '-w no' option for non-weighted SVD solution."
            sys.exit(-1)
    print '-------------------------------------------------------------------------------'


    ##### Invert time-series phase
    ##Check parallel environment
    if meta['weight_function'] in ['no','uniform']:
        meta['parallel'] = False
    if meta['parallel']:
        num_cores, meta['parallel'], Parallel, delayed = ut.check_parallel(1000, print_msg=False)

    ##Split into chunks to reduce memory usage
    r_step = meta['chunk_size']/ifgram_num/width         #split in lines
    if meta['weight_function'] not in ['no','uniform']:  #more memory usage (coherence) for WLS
        r_step /= 2.0
        if meta['parallel']:
            r_step /= num_cores
    r_step = int(ceil_to_1(r_step))
    meta['row_step'] = r_step
    chunk_num = int((length-1)/r_step)+1

    if chunk_num > 1:
        print 'maximum chunk size: %.1E' % (meta['chunk_size'])
        print 'split %d lines into %d patches for processing' % (length, chunk_num)
        print '    with each patch up to %d lines' % (r_step)
        if meta['parallel']:
            print 'parallel processing using %d cores ...' % (min([num_cores,chunk_num]))

    ##Computing the inversion
    box_list = []
    for i in range(chunk_num):
        r0 = i*r_step
        r1 = min([length, r0+r_step])
        box = (0,r0,width,r1)
        box_list.append(box)
    box_num = len(box_list)

    if not meta['parallel']:
        timeseries = np.zeros((date_num, length, width), np.float32)
        timeseriesStd = np.zeros((date_num, length, width), np.float32)
        tempCoh = np.zeros((length, width), np.float32)
        for i in range(box_num):
            if box_num > 1:
                print '\n------- Processing Patch %d out of %d --------------' % (i+1, box_num)
            box = box_list[i]
            ts, tcoh, tsStd = ifgram_inversion_patch(ifgramFile, coherenceFile, meta, box)
            tempCoh[box[1]:box[3],box[0]:box[2]] = tcoh
            timeseries[:,box[1]:box[3],box[0]:box[2]] = ts
            timeseriesStd[:,box[1]:box[3],box[0]:box[2]] = tsStd

    else:
        ##Temp file list
        meta['ftemp_base'] = 'timeseries_temp_'
        temp_file_list = [meta['ftemp_base']+str(i)+'.h5' for i in range(chunk_num)]

        ##Computation
        Parallel(n_jobs=num_cores)(delayed(ifgram_inversion_patch)\
                                   (ifgramFile, coherenceFile, meta, box) for box in box_list)

        ##Concatenate temp files
        print 'concatenating temporary timeseries files ...'
        timeseries = np.zeros((date_num, length, width), np.float32)
        tempCoh = np.zeros((length, width), np.float32)
        rmCmd = 'rm'
        for i in range(chunk_num):
            fname = temp_file_list[i]
            box = box_list[i]
            print 'reading '+fname
            h5temp = h5py.File(fname, 'r')
            dset = h5temp['timeseries'].get('timeseries')
            timeseries[:,box[1]:box[3],box[0]:box[2]] = dset[0:-1,:,:]
            tempCoh[box[1]:box[3],box[0]:box[2]] = dset[-1,:,:]
            h5temp.close()
            rmCmd += ' '+fname
        print rmCmd
        os.system(rmCmd)

    print 'converting phase to range'
    phase2range = -1*float(atr['WAVELENGTH'])/(4.*np.pi)
    timeseries *= phase2range
    timeseriesStd *= abs(phase2range)

    ##### Calculate time-series attributes
    print 'calculating perpendicular baseline timeseries'
    pbase, pbase_top, pbase_bottom = ut.perp_baseline_ifgram2timeseries(ifgramFile, ifgram_list)
    pbase = str(pbase.tolist()).translate(None,'[],')  # convert np.array into string separated by white space
    pbase_top = str(pbase_top.tolist()).translate(None,'[],')
    pbase_bottom = str(pbase_bottom.tolist()).translate(None,'[],')
    atr['P_BASELINE_TIMESERIES'] = pbase
    atr['P_BASELINE_TOP_TIMESERIES'] = pbase_top
    atr['P_BASELINE_BOTTOM_TIMESERIES'] = pbase_bottom
    atr['ref_date'] = date8_list[0]
    atr['FILE_TYPE'] = 'timeseries'
    atr['UNIT'] = 'm'

    ##### Output
    ## 1. Write time-series file
    meta['timeseriesFile'] = write_timeseries_hdf5_file(timeseries, date8_list, atr,\
                                                        timeseriesFile=meta['timeseriesFile'])
    if not np.all(timeseriesStd == 0.):
        meta['timeseriesStdFile'] = write_timeseries_hdf5_file(timeseriesStd, date8_list, atr,\
                                                               timeseriesFile=meta['timeseriesStdFile'])

    ## 2. Write Temporal Coherence File
    print 'writing >>> '+meta['tempCohFile']
    atr['FILE_TYPE'] = 'temporal_coherence'
    atr['UNIT'] = '1'
    meta['tempCohFile'] = writefile.write(tempCoh, atr, meta['tempCohFile'])

    print 'Time series inversion took ' + str(time.time()-total) +' secs\nDone.'
    return meta['timeseriesFile'], meta['tempCohFile']
Пример #9
0
def main(argv):
    try:
        timeseries_file = argv[0]
    except:
        usage()
        sys.exit(1)

    # Basic info
    atr = readfile.read_attribute(timeseries_file)
    k = atr['FILE_TYPE']
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    ##### Read time-series
    print "loading time series: " + timeseries_file
    h5 = h5py.File(timeseries_file)
    date_list = sorted(h5[k].keys())
    date_num = len(date_list)
    pixel_num = length * width

    tbase = np.array(ptime.date_list2tbase(date_list)[0], np.float32)

    prog_bar = ptime.progress_bar(maxValue=date_num)
    timeseries = np.zeros((date_num, pixel_num), np.float32)
    for i in range(date_num):
        date = date_list[i]
        d = h5[k].get(date)[:]
        timeseries[i, :] = d.flatten(0)
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    del d
    h5.close()

    ##### Calculate 1st and 2nd temporal derivatives
    print "calculating temporal 1st derivative ... "
    timeseries_1st = np.zeros((date_num - 1, pixel_num), np.float32)
    for i in range(date_num - 1):
        timeseries_1st[i][:] = timeseries[i + 1][:] - timeseries[i][:]

    print "calculating temporal 2nd derivative"
    timeseries_2nd = np.zeros((date_num - 2, pixel_num), np.float32)
    for i in range(date_num - 2):
        timeseries_2nd[i][:] = timeseries_1st[i + 1][:] - timeseries_1st[i][:]

    ##### Write 1st and 2nd temporal derivatives
    outfile1 = os.path.splitext(timeseries_file)[0] + '_1stDerivative.h5'
    print 'writing >>> ' + outfile1
    h5out = h5py.File(outfile1, 'w')
    group = h5out.create_group(k)

    prog_bar = ptime.progress_bar(maxValue=date_num - 1)
    for i in range(date_num - 1):
        date = date_list[i + 1]
        dset = group.create_dataset(date,
                                    data=np.reshape(timeseries_1st[i][:],
                                                    [length, width]),
                                    compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    for key, value in atr.iteritems():
        group.attrs[key] = value
    prog_bar.close()
    h5out.close()

    outfile2 = os.path.splitext(timeseries_file)[0] + '_2ndDerivative.h5'
    print 'writing >>> ' + outfile2
    h5out = h5py.File(outfile2, 'w')
    group = h5out.create_group(k)

    prog_bar = ptime.progress_bar(maxValue=date_num - 2)
    for i in range(date_num - 2):
        date = date_list[i + 2]
        dset = group.create_dataset(date,
                                    data=np.reshape(timeseries_2nd[i][:],
                                                    [length, width]),
                                    compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    for key, value in atr.iteritems():
        group.attrs[key] = value
    prog_bar.close()
    h5out.close()

    print 'Done.'
    return outfile1, outfile2
Пример #10
0
def main(argv):
    ##### Read Inputs
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    date12_orig = pnet.get_date12_list(inps.file[0])
    print 'input file(s) to be modified: ' + str(inps.file)
    print 'number of interferograms: ' + str(len(date12_orig))
    atr = readfile.read_attribute(inps.file[0])

    # Update inps if template is input
    if inps.template_file:
        inps = read_template2inps(inps.template_file, inps)

    if all(not i for i in [inps.reference_file, inps.max_temp_baseline, inps.max_perp_baseline,\
                           inps.exclude_ifg_index, inps.exclude_date, inps.coherence_based,\
                           inps.start_date, inps.end_date, inps.reset]):
        # Display network for manually modification when there is no other modification input.
        print 'No input option found to remove interferogram'
        if inps.template_file:
            print 'Keep all interferograms by enable --reset option'
            inps.reset = True
        else:
            print 'To manually modify network, please use --manual option '
            return

    if inps.reset:
        print '----------------------------------------------------------------------------'
        for file in inps.file:
            reset_pairs(file)
        mean_coh_txt_file = os.path.splitext(
            inps.coherence_file)[0] + '_spatialAverage.txt'
        if os.path.isfile(mean_coh_txt_file):
            rmCmd = 'rm ' + mean_coh_txt_file
            print rmCmd
            os.system(rmCmd)
        return

    # Convert index : input to continous index list
    if inps.exclude_ifg_index:
        ifg_index = list(inps.exclude_ifg_index)
        inps.exclude_ifg_index = []
        for index in ifg_index:
            index_temp = [int(i) for i in index.split(':')]
            index_temp.sort()
            if len(index_temp) == 2:
                for j in range(index_temp[0], index_temp[1] + 1):
                    inps.exclude_ifg_index.append(j)
            elif len(index_temp) == 1:
                inps.exclude_ifg_index.append(int(index))
            else:
                print 'Unrecoganized input: ' + index
        inps.exclude_ifg_index = sorted(inps.exclude_ifg_index)
        if max(inps.exclude_ifg_index) > len(date12_orig):
            raise Exception('Input index out of range!\n'+\
                            'input index:'+str(inps.exclude_ifg_index)+'\n'+\
                            'index range of file: '+str(len(date12_orig)))

    ##### Get date12_to_rmv
    date12_to_rmv = []

    # 1. Update date12_to_rmv from reference file
    if inps.reference_file:
        date12_to_keep = pnet.get_date12_list(inps.reference_file,
                                              check_drop_ifgram=True)
        print '----------------------------------------------------------------------------'
        print 'use reference pairs info from file: ' + inps.reference_file
        print 'number of interferograms in reference: ' + str(
            len(date12_to_keep))
        print 'date12 not in reference file:'
        date12_to_rmv_temp = []
        for date12 in date12_orig:
            if date12 not in date12_to_keep:
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.1 Update date12_to_rmv from coherence file
    if inps.coherence_based and os.path.isfile(inps.coherence_file):
        print '----------------------------------------------------------------------------'
        print 'use coherence-based network modification from coherence file: ' + inps.coherence_file
        # check mask AOI in lalo
        if inps.aoi_geo_box and inps.lookup_file:
            print 'input AOI in (lon0, lat1, lon1, lat0): ' + str(
                inps.aoi_geo_box)
            inps.aoi_pix_box = subset.bbox_geo2radar(inps.aoi_geo_box, atr,
                                                     inps.lookup_file)
        if inps.aoi_pix_box:
            # check mask AOI within the data coverage
            inps.aoi_pix_box = subset.check_box_within_data_coverage(
                inps.aoi_pix_box, atr)
            print 'input AOI in (x0,y0,x1,y1): ' + str(inps.aoi_pix_box)

        # Calculate spatial average coherence
        coh_list, coh_date12_list = ut.spatial_average(inps.coherence_file, inps.mask_file,\
                                                           inps.aoi_pix_box, saveList=True)

        # MST network
        if inps.keep_mst:
            print 'Get minimum spanning tree (MST) of interferograms with inverse of coherence.'
            print 'date12 with 1) average coherence < ' + str(
                inps.min_coherence) + ' AND 2) not in MST network: '
            mst_date12_list = pnet.threshold_coherence_based_mst(
                coh_date12_list, coh_list)
        else:
            print 'date12 with average coherence < ' + str(inps.min_coherence)
            mst_date12_list = []

        date12_to_rmv_temp = []
        for i in range(len(coh_date12_list)):
            date12 = coh_date12_list[i]
            if coh_list[
                    i] < inps.min_coherence and date12 not in mst_date12_list:
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.2 Update date12_to_rmv from temp baseline threshold
    if inps.max_temp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with temporal baseline > ' + str(
            inps.max_temp_baseline) + ' days'
        date8_list = ptime.ifgram_date_list(inps.file[0])
        date6_list = ptime.yymmdd(date8_list)
        tbase_list = ptime.date_list2tbase(date8_list)[0]
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            idx1 = date6_list.index(date1)
            idx2 = date6_list.index(date2)
            t_diff = tbase_list[idx2] - tbase_list[idx1]
            if t_diff > inps.max_temp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print 'number of pairs to drop: %d' % (len(date12_to_rmv_temp))
        print date12_to_rmv_temp

    # 2.3 Update date12_to_rmv from perp baseline threshold
    if inps.max_perp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with perpendicular spatial baseline > ' + str(
            inps.max_perp_baseline) + ' meters'
        ifg_bperp_list = pnet.igram_perp_baseline_list(inps.file[0])
        date12_to_rmv_temp = []
        for i in range(len(ifg_bperp_list)):
            if abs(ifg_bperp_list[i]) > inps.max_perp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print 'number of pairs to drop: %d' % (len(date12_to_rmv_temp))
        print date12_to_rmv_temp

    # 2.4 Update date12_to_rmv from exclude_ifg_index
    if inps.exclude_ifg_index:
        print '----------------------------------------------------------------------------'
        print 'drop date12/pair with the following index number:'
        for index in inps.exclude_ifg_index:
            date12 = date12_orig[index - 1]
            date12_to_rmv.append(date12)
            print str(index) + '    ' + date12

    # 2.5 Update date12_to_rmv from exclude_date
    if inps.exclude_date:
        inps.exclude_date = ptime.yymmdd(inps.exclude_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs including the following dates: \n' + str(
            inps.exclude_date)
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            if (date1 in inps.exclude_date) or (date2 in inps.exclude_date):
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.6 Update date12_to_rmv from start_date
    if inps.start_date:
        inps.start_date = ptime.yymmdd(inps.start_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with date earlier than start-date: ' + inps.start_date
        min_date = int(ptime.yyyymmdd(inps.start_date))
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) < min_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.7 Update date12_to_rmv from end_date
    if inps.end_date:
        inps.end_date = ptime.yymmdd(inps.end_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with date earlier than end-date: ' + inps.end_date
        max_date = int(ptime.yyyymmdd(inps.end_date))
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) > max_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 3. Manually drop pairs
    if inps.disp_network:
        date12_click = manual_select_pairs_to_remove(inps.file[0])
        for date12 in list(date12_click):
            if date12 not in date12_orig:
                date12_click.remove(date12)
        print 'date12 selected to remove:'
        print date12_click
        date12_to_rmv += date12_click

    # 4. drop duplicate date12 and sort in order
    date12_to_rmv = sorted(list(set(date12_to_rmv)))
    date12_keep = sorted(list(set(date12_orig) - set(date12_to_rmv)))
    print '----------------------------------------------------------------------------'
    print 'number of interferograms to remove: ' + str(len(date12_to_rmv))
    print 'number of interferograms kept     : ' + str(len(date12_keep))

    ##### Calculated date12_to_drop v.s. existing date12_to_drop
    # Get list of date12 of interferograms already been marked to drop
    k = readfile.read_attribute(inps.file[0])['FILE_TYPE']
    h5 = h5py.File(inps.file[0], 'r')
    ifgram_list_all = sorted(h5[k].keys())
    ifgram_list_keep = ut.check_drop_ifgram(h5, print_msg=False)
    ifgram_list_dropped = sorted(
        list(set(ifgram_list_all) - set(ifgram_list_keep)))
    date12_list_dropped = ptime.list_ifgram2date12(ifgram_list_dropped)
    h5.close()

    if date12_to_rmv == date12_list_dropped and inps.mark_attribute:
        print 'Calculated date12 to drop is the same as exsiting marked input file, skip update file attributes.'
        date12_to_rmv = []

    ##### Update date12 to drop
    if date12_to_rmv:
        ##### Update Input Files with date12_to_rmv
        Modified_CoherenceFile = 'Modified_coherence.h5'
        for File in inps.file:
            Modified_File = modify_file_date12_list(File, date12_to_rmv,
                                                    inps.mark_attribute)

            k = readfile.read_attribute(File)['FILE_TYPE']
            # Update Mask File
            if k == 'interferograms' and inps.update_aux:
                print 'update mask file for input ' + k + ' file based on ' + Modified_File
                inps.mask_file = 'mask.h5'
                print 'writing >>> ' + inps.mask_file
                ut.nonzero_mask(Modified_File, inps.mask_file)

            elif k == 'coherence' and inps.update_aux:
                inps.coherence_file = Modified_File
                print 'update average spatial coherence for input ' + k + ' file based on: ' + Modified_File
                outFile = 'averageSpatialCoherence.h5'
                print 'writing >>> ' + outFile
                ut.temporal_average(Modified_File, outFile)

                # Touch spatial average txt file of coherence if it's existed
                coh_spatialAverage_file = os.path.splitext(
                    Modified_File)[0] + '_spatialAverage.txt'
                if os.path.isfile(coh_spatialAverage_file):
                    touchCmd = 'touch ' + coh_spatialAverage_file
                    print touchCmd
                    os.system(touchCmd)

    # Plot result
    if inps.plot:
        print '\nplot modified network and save to file.'
        plotCmd = 'plot_network.py ' + inps.coherence_file + ' --coherence ' + inps.coherence_file + ' --nodisplay'
        if inps.template_file:
            plotCmd += ' --template ' + inps.template_file
        print plotCmd
        os.system(plotCmd)

    print 'Done.'
    return
Пример #11
0
def main(argv):
    inps = cmdLineParse()
    suffix = '_demErr'
    if not inps.outfile:
        inps.outfile = os.path.splitext(
            inps.timeseries_file)[0] + suffix + os.path.splitext(
                inps.timeseries_file)[1]

    # 1. template_file
    if inps.template_file:
        print 'read option from template file: ' + inps.template_file
        inps = read_template2inps(inps.template_file, inps)

    # Read Time Series
    print "loading time series: " + inps.timeseries_file
    atr = readfile.read_attribute(inps.timeseries_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    h5 = h5py.File(inps.timeseries_file)
    date_list = sorted(h5['timeseries'].keys())
    date_num = len(date_list)
    print 'number of acquisitions: ' + str(date_num)

    # Exclude date info
    #inps.ex_date = ['20070115','20100310']
    if inps.ex_date:
        inps = get_exclude_date(inps, date_list)
        if inps.ex_date:
            inps.ex_flag = np.array([i not in inps.ex_date for i in date_list])

    timeseries = np.zeros((len(date_list), length * width), np.float32)
    prog_bar = ptime.progress_bar(maxValue=date_num, prefix='loading: ')
    for i in range(date_num):
        date = date_list[i]
        d = h5['timeseries'].get(date)[:]
        timeseries[i][:] = d.flatten('F')
        prog_bar.update(i + 1, suffix=date)
    del d
    h5.close()
    prog_bar.close()

    # Perpendicular Baseline
    print 'read perpendicular baseline'
    try:
        inps.pbase = ut.perp_baseline_timeseries(atr, dimension=0)
        if inps.pbase.shape[1] > 1:
            print '\tconsider P_BASELINE variation in azimuth direction'
        else:
            pbase = inps.pbase
    except:
        print '\tCannot find P_BASELINE_TIMESERIES from timeseries file.'
        print '\tTrying to calculate it from interferograms file'
        if inps.ifgram_file:
            inps.pbase = np.array(
                ut.perp_baseline_ifgram2timeseries(
                    inps.ifgram_file)[0]).reshape(date_num, 1)
        else:
            message = 'No interferogram file input!\n'+\
                      'Can not correct for DEM residula without perpendicular base info!'
            raise Exception(message)

    # Temporal Baseline
    print 'read temporal baseline'
    inps.tbase = np.array(ptime.date_list2tbase(date_list)[0]).reshape(
        date_num, 1)

    # Incidence angle (look angle in the paper)
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            print 'reading incidence angle from file: ' + inps.incidence_angle
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            try:
                inps.incidence_angle = np.array(float(inps.incidence_angle))
                print 'use input incidence angle : ' + str(
                    inps.incidence_angle)
            except:
                raise ValueError('Can not read input incidence angle: ' +
                                 str(inps.incidence_angle))
    else:
        print 'calculate incidence angle using attributes of time series file'
        if inps.pbase.shape[1] > 1:
            inps.incidence_angle = ut.incidence_angle(atr, dimension=2)
        else:
            inps.incidence_angle = ut.incidence_angle(atr, dimension=1)
    inps.incidence_angle *= np.pi / 180.0

    # Range distance
    if inps.range_dis:
        if os.path.isfile(inps.range_dis):
            print 'reading range distance from file: ' + inps.range_dis
            inps.range_dis = readfile.read(inps.range_dis)[0]
        else:
            try:
                inps.range_dis = np.array(float(inps.range_dis))
                print 'use input range distance : ' + str(inps.range_dis)
            except:
                raise ValueError('Can not read input incidence angle: ' +
                                 str(inps.range_dis))
    else:
        print 'calculate range distance using attributes from time series file'
        if inps.pbase.shape[1] > 1:
            inps.range_dis = ut.range_distance(atr, dimension=2)
        else:
            inps.range_dis = ut.range_distance(atr, dimension=1)

    # Design matrix - temporal deformation model using tbase
    print '-------------------------------------------------'
    if inps.phase_velocity:
        print 'using phase velocity history'
        A1 = np.ones((date_num - 1, 1))
        A2 = (inps.tbase[1:date_num] + inps.tbase[0:date_num - 1]) / 2.0
        A3 = (inps.tbase[1:date_num]**3 - inps.tbase[0:date_num - 1]**
              3) / np.diff(inps.tbase, axis=0) / 6.0
        #A3 = (inps.tbase[1:date_num]**2 + inps.tbase[1:date_num]*inps.tbase[0:date_num-1] +\
        #      inps.tbase[0:date_num-1]**2) / 6.0
    else:
        print 'using phase history'
        A1 = np.hstack((np.ones((date_num, 1)), inps.tbase))
        A2 = inps.tbase**2 / 2.0
        A3 = inps.tbase**3 / 6.0

    # Polynomial order of model
    print "temporal deformation model's polynomial order = " + str(
        inps.poly_order)
    if inps.poly_order == 1: A_def = A1
    elif inps.poly_order == 2: A_def = np.hstack((A1, A2))
    elif inps.poly_order == 3: A_def = np.hstack((A1, A2, A3))

    # step function
    if inps.step_date:
        print "temporal deformation model's step function step at " + inps.step_date
        step_yy = ptime.yyyymmdd2years(inps.step_date)
        yy_list = ptime.yyyymmdd2years(date_list)
        flag_array = np.array(yy_list) >= step_yy
        A_step = np.zeros((date_num, 1))
        A_step[flag_array] = 1.0
        A_def = np.hstack((A_def, A_step))

    # Heresh's original code for phase history approach
    #A_def = np.hstack((A2,A1,np.ones((date_num,1))))
    print '-------------------------------------------------'

    ##---------------------------------------- Loop for L2-norm inversion  -----------------------------------##
    delta_z_mat = np.zeros([length, width], dtype=np.float32)
    resid_n = np.zeros([A_def.shape[0], length * width], dtype=np.float32)
    constC = np.zeros([length, width], dtype=np.float32)
    #delta_a_mat = np.zeros([length, width])
    if inps.incidence_angle.ndim == 2 and inps.range_dis.ndim == 2:
        print 'inversing using L2-norm minimization (unweighted least squares)'\
              ' pixel by pixel: %d loops in total' % (length*width)
        prog_bar = ptime.progress_bar(maxValue=length * width,
                                      prefix='calculating: ')
        for i in range(length * width):
            row = i % length
            col = i / length
            range_dis = inps.range_dis[row, col]
            inc_angle = inps.incidence_angle[row, col]
            # Consider P_BASELINE variation within one interferogram
            if inps.pbase.shape[1] > 1:
                pbase = inps.pbase[:, row].reshape(date_num, 1)

            # Design matrix - DEM error using pbase, range distance and incidence angle
            A_delta_z = pbase / (range_dis * np.sin(inc_angle))
            if inps.phase_velocity:
                pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
                A_delta_z_v = pbase_v / (range_dis * np.sin(inc_angle))
                A = np.hstack((A_delta_z_v, A_def))
            else:
                A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

            # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
            ts_dis = timeseries[:, i]
            if inps.phase_velocity:
                ts_dis = np.diff(ts_dis, axis=0) / np.diff(inps.tbase, axis=0)

            if inps.ex_date:
                X = np.dot(A_inv, ts_dis[inps.ex_flag])
            else:
                X = np.dot(A_inv, ts_dis)

            # Residual vector n
            resid_n[:, i] = ts_dis - np.dot(A, X)

            # Update DEM error / timeseries matrix
            delta_z = X[0]
            delta_z_mat[row, col] = delta_z
            if inps.update_timeseries:
                timeseries[:, i] -= np.dot(A_delta_z, delta_z).flatten()
            prog_bar.update(i + 1, every=length * width / 100)
        prog_bar.close()

    elif inps.incidence_angle.ndim == 1 and inps.range_dis.ndim == 1:
        print 'inversing using L2-norm minimization (unweighted least squares)'\
              ' column by column: %d loops in total' % (width)
        prog_bar = ptime.progress_bar(maxValue=width, prefix='calculating: ')
        for i in range(width):
            range_dis = inps.range_dis[i]
            inc_angle = inps.incidence_angle[i]

            # Design matrix - DEM error using pbase, range distance and incidence angle
            A_delta_z = pbase / (range_dis * np.sin(inc_angle))
            if inps.phase_velocity:
                pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
                A_delta_z_v = pbase_v / (range_dis * np.sin(inc_angle))
                A = np.hstack((A_delta_z_v, A_def))
            else:
                A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

            # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
            ts_dis = timeseries[:, i * length:(i + 1) * length]
            if inps.phase_velocity:
                ts_dis = np.diff(ts_dis, axis=0) / np.diff(inps.tbase, axis=0)

            if inps.ex_date:
                X = np.dot(A_inv, ts_dis[inps.ex_flag, :])
            else:
                X = np.dot(A_inv, ts_dis)

            # Residual vector n
            resid_n[:, i * length:(i + 1) * length] = ts_dis - np.dot(A, X)
            constC[:, i] = X[1].reshape((1, length))

            # Update DEM error / timeseries matrix
            delta_z = X[0].reshape((1, length))
            delta_z_mat[:, i] = delta_z
            if inps.update_timeseries:
                timeseries[:, i * length:(i + 1) * length] -= np.dot(
                    A_delta_z, delta_z)
            prog_bar.update(i + 1, every=width / 100)
        prog_bar.close()

    elif inps.incidence_angle.ndim == 0 and inps.range_dis.ndim == 0:
        print 'inversing using L2-norm minimization (unweighted least squares) for the whole area'

        # Design matrix - DEM error using pbase, range distance and incidence angle
        A_delta_z = pbase / (inps.range_dis * np.sin(inps.incidence_angle))
        if inps.phase_velocity:
            pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
            A_delta_z_v = pbase_v / (inps.range_dis *
                                     np.sin(inps.incidence_angle))
            A = np.hstack((A_delta_z_v, A_def))
        else:
            A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

        # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
        if inps.phase_velocity:
            timeseries = np.diff(timeseries, axis=0) / np.diff(inps.tbase,
                                                               axis=0)

        if inps.ex_date:
            X = np.dot(A_inv, timeseries[inps.ex_flag, :])
        else:
            X = np.dot(A_inv, timeseries)

        # Residual vector n
        resid_n = ts_dis - np.dot(A, X)

        # Update DEM error / timeseries matrix
        delta_z_mat = X[0].reshape((1, length * width))
        if inps.update_timeseries:
            timeseries -= np.dot(A_delta_z, delta_z_mat)
        delta_z_mat = np.reshape(delta_z_mat, [length, width], order='F')

    else:
        print 'ERROR: Script only support same dimension for both incidence angle and range distance matrix.'
        print 'dimension of incidence angle: ' + str(inps.incidence_angle.ndim)
        print 'dimension of range distance: ' + str(inps.range_dis.ndim)
        sys.exit(1)

    ##------------------------------------------------ Output  --------------------------------------------##
    # DEM error file
    if 'Y_FIRST' in atr.keys():
        dem_error_file = 'demGeo_error.h5'
    else:
        dem_error_file = 'demRadar_error.h5'
    #if inps.phase_velocity:  suffix = '_pha_poly'+str(inps.poly_order)
    #else:                    suffix = '_vel_poly'+str(inps.poly_order)
    #dem_error_file = os.path.splitext(dem_error_file)[0]+suffix+os.path.splitext(dem_error_file)[1]
    print 'writing >>> ' + dem_error_file
    atr_dem_error = atr.copy()
    atr_dem_error['FILE_TYPE'] = 'dem'
    atr_dem_error['UNIT'] = 'm'
    writefile.write(delta_z_mat, atr_dem_error, dem_error_file)

    ## Phase Constant C = resid_n[0,:]
    #atrC = atr.copy()
    #atrC['FILE_TYPE'] = 'mask'
    #atrC['UNIT'] = 'm'
    #writefile.write(constC, atrC, 'constD.h5')

    ## Corrected DEM file
    #if inps.dem_file:
    #    inps.dem_outfile = os.path.splitext(inps.dem_file)[0]+suffix+os.path.splitext(inps.dem_file)[1]
    #    print '--------------------------------------'
    #    print 'writing >>> '+inps.dem_outfile
    #    dem, atr_dem = readfile.read(inps.dem_file)
    #    writefile.write(dem+delta_z_mat, atr_dem, inps.dem_outfile)

    #outfile = 'delta_acc.h5'
    #print 'writing >>> '+outfile
    #atr_dem_error = atr.copy()
    #atr_dem_error['FILE_TYPE'] = 'velocity'
    #atr_dem_error['UNIT'] = 'm/s'
    #writefile.write(delta_a_mat, atr_dem_error, outfile)
    #print '**************************************'

    # Corrected Time Series
    if inps.update_timeseries:
        print 'writing >>> ' + inps.outfile
        print 'number of dates: ' + str(len(date_list))
        h5out = h5py.File(inps.outfile, 'w')
        group = h5out.create_group('timeseries')
        prog_bar = ptime.progress_bar(maxValue=date_num, prefix='writing: ')
        for i in range(date_num):
            date = date_list[i]
            d = np.reshape(timeseries[i][:], [length, width], order='F')
            dset = group.create_dataset(date, data=d, compression='gzip')
            prog_bar.update(i + 1, suffix=date)
        prog_bar.close()
        for key, value in atr.iteritems():
            group.attrs[key] = value
        h5out.close()

    outFile = os.path.splitext(inps.outfile)[0] + 'InvResid.h5'
    print 'writing >>> ' + outFile
    print 'number of dates: ' + str(A_def.shape[0])
    h5out = h5py.File(outFile, 'w')
    group = h5out.create_group('timeseries')
    prog_bar = ptime.progress_bar(maxValue=A_def.shape[0], prefix='writing: ')
    for i in range(A_def.shape[0]):
        date = date_list[i]
        d = np.reshape(resid_n[i][:], [length, width], order='F')
        dset = group.create_dataset(date, data=d, compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    # Attribute
    for key, value in atr.iteritems():
        group.attrs[key] = value
    if A_def.shape[0] == date_num:
        group.attrs['UNIT'] = 'm'
    else:
        group.attrs['UNIT'] = 'm/yr'
    h5out.close()

    return
Пример #12
0
def main(argv):
    inps = cmdLineParse()
    suffix = '_demErr'
    if not inps.outfile:
        inps.outfile = os.path.splitext(inps.timeseries_file)[0]+suffix+os.path.splitext(inps.timeseries_file)[1]
    if inps.template_file:
        print 'read option from template file: '+inps.template_file
        inps = read_template2inps(inps.template_file, inps)

    ##### Read Data
    atr = readfile.read_attribute(inps.timeseries_file)
    coordType = 'radar'
    if 'Y_FIRST' in atr.keys():
        coordType = 'geo'

    # 1. Incidence angle
    try:
        inps.inc_angle_file = ut.get_file_list(inps.inc_angle_file, coord=coordType)[0]
    except ValueError:
        print 'No incidence angle file found!\nRun incidence_angle.py to generate it.'
    print 'read incidence angle from file: '+str(inps.inc_angle_file)
    inps.inc_angle = readfile.read(inps.inc_angle_file, epoch='incidenceAngle')[0].flatten()
    inps.inc_angle *= np.pi/180.0

    # 2. Slant Range distance
    try:
        inps.range_dist_file = ut.get_file_list(inps.range_dist_file, coord=coordType)[0]
    except ValueError:
        print 'No range distance file found!\nRun range_distance.py to generate it.'
    print 'read slant range distance from file: '+str(inps.range_dist_file)
    inps.range_dist = readfile.read(inps.range_dist_file, epoch='slantRangeDistance')[0].flatten()

    # 3. Perp Baseline - 1D in time, 0D/1D in space (azimuth)
    print 'read perpendicular baseline'
    try:
        inps.pbase = ut.perp_baseline_timeseries(atr, dimension=1)
        if inps.pbase.shape[1] > 1:
            print 'consider perp baseline variance in azimuth direction'
    except valueError:
        print 'No P_BASELINE_TIMESERIES found in timeseries file.\n'+\
              'Can not correct for DEM residula without it!'

    # 4. Time Series - 1D in time, 1D in space (flattened)
    print "read time series file: " + inps.timeseries_file
    h5 = h5py.File(inps.timeseries_file)
    date_list = sorted(h5['timeseries'].keys())
    date_num = len(date_list)
    inps.tbase = np.array(ptime.date_list2tbase(date_list)[0]).reshape(-1,1)

    #Mark dates used in the estimation
    inps.ex_date = check_exclude_date(inps.ex_date, date_list)
    inps.date_flag = np.array([i not in inps.ex_date for i in date_list], dtype=np.bool_)
    if inps.poly_order > np.sum(inps.date_flag):
        raise ValueError("ERROR: input polynomial order=%d is larger than number of acquisition=%d used in estimation!" %\
                         (inps.poly_order, np.sum(inps.date_flag)))

    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    pixel_num = length*width
    timeseries = np.zeros((date_num, pixel_num),np.float32)
    for i in range(date_num):
        timeseries[i] = h5['timeseries'].get(date_list[i])[:].flatten()
        sys.stdout.write('\rreading acquisition %3d/%3d ...' % (i+1, date_num))
        sys.stdout.flush()
    h5.close()
    print ''


    ##### Design matrix - temporal deformation model
    print '-------------------------------------------------'
    print 'Correct topographic phase residual using Fattahi and Amelung (2013, IEEE-TGRS)'
    msg = 'minimum-norm constrain on: phase'
    if inps.phase_velocity:
        msg += ' velocity'
    print msg

    # Heresh's original code for phase history approach
    #A1 = np.hstack((np.ones((date_num, 1)), inps.tbase))
    #A2 = inps.tbase**2 / 2.0
    #A_def = np.hstack((A2,A1,np.ones((date_num,1))))

    # 1. Polynomial - 2D matrix in size of (date_num, polyOrder+1)
    print "temporal deformation model: polynomial order = "+str(inps.poly_order)
    A_def = np.ones((date_num, 1), np.float32)
    for i in range(inps.poly_order):
        Ai = inps.tbase**(i+1) / gamma(i+2)
        Ai = np.array(Ai, np.float32).reshape(-1,1)
        A_def = np.hstack((A_def, Ai))

    # 2. Step function - 2D matrix in size of (date_num, stepNum)
    if inps.step_date:
        print "temporal deformation model: step functions at "+str(inps.step_date)
        yySteps = ptime.yyyymmdd2years(inps.step_date)
        yyList = np.array(ptime.yyyymmdd2years(date_list)).reshape(-1,1)
        for yyStep in yySteps:
            Ai = yyList > yyStep
            Ai = np.array(Ai, np.float32).reshape(-1,1)
            A_def = np.hstack((A_def, Ai))
    inps.step_num = len(inps.step_date)

    print '-------------------------------------------------'


    ##---------------------------------------- Loop for L2-norm inversion  -----------------------------------##
    ## Output estimated steps 
    print 'ordinal least squares (OLS) inversion using L2-norm minimization'
    timeseriesCor = np.zeros((date_num, pixel_num), dtype=np.float32)
    timeseriesRes = np.zeros((date_num, pixel_num), dtype=np.float32)
    topoRes = np.zeros(pixel_num, dtype=np.float32)
    constC  = np.zeros(pixel_num, dtype=np.float32)
    if inps.step_num > 0:
        stepModel = np.zeros((inps.step_num, pixel_num), dtype=np.float32)

    print 'skip pixels with zero/nan value in geometry files - incidence angle and range distance'
    mask = np.multiply(~np.isnan(inps.inc_angle), ~np.isnan(inps.range_dist))
    mask[inps.inc_angle == 0.] = 0
    mask[inps.range_dist == 0.] = 0
    pixel_num2inv = np.sum(mask)
    pixel_idx2inv = np.where(mask)[0]
    print 'number of pixels in the file: %d' % (pixel_num)
    print 'number of pixels to  inverse: %d' % (pixel_num2inv)

    if inps.pbase.shape[1] == 1:
        pbase = inps.pbase
    prog_bar = ptime.progress_bar(maxValue=pixel_num)
    for i in range(pixel_num2inv):
        prog_bar.update(i+1, every=1000, suffix='%s/%s pixels'%(str(i+1), str(pixel_num2inv)))
        idx = pixel_idx2inv[i]

        r = inps.range_dist[idx]
        inc_angle = inps.inc_angle[idx]
        if inps.pbase.shape[1] > 1:
            pbase = inps.pbase[:, int(idx/width)].reshape(-1,1)
        A_deltaZ = pbase / (r * np.sin(inc_angle))

        A = np.hstack((A_deltaZ, A_def))
        ts = timeseries[:,idx].reshape(date_num,-1)
        deltaZ, tsCor, tsRes, stepEst = topographic_residual_inversion(ts, A, inps)
        topoRes[idx:idx+1] = deltaZ
        timeseriesCor[:,idx:idx+1] = tsCor
        timeseriesRes[:,idx:idx+1] = tsRes
        if inps.step_num > 0:
            stepModel[:,idx:idx+1] = stepEst
    prog_bar.close()


    ##------------------------------------------------ Output  --------------------------------------------##
    # 1. DEM error file
    if 'Y_FIRST' in atr.keys():
        deltaZFile = 'demGeo_error.h5'
    else:
        deltaZFile = 'demRadar_error.h5'
    print 'writing >>> '+deltaZFile
    atrDeltaZ = atr.copy()
    atrDeltaZ['FILE_TYPE'] = 'dem'
    atrDeltaZ['UNIT'] = 'm'
    writefile.write(topoRes.reshape(length, width), atrDeltaZ, deltaZFile)

    # 2. Topo Residual Corrected Time Series
    print 'writing >>> '+inps.outfile
    h5 = h5py.File(inps.outfile,'w')
    group = h5.create_group('timeseries')
    for i in range(date_num):
        sys.stdout.write('\rwriting acquisition %3d/%3d ...' % (i+1, date_num))
        sys.stdout.flush()
        dset = group.create_dataset(date_list[i], data=timeseriesCor[i].reshape(length, width), compression='gzip')
    print ''
    for key,value in atr.iteritems():
        group.attrs[key] = value
    h5.close()

    # 3. Inversion residual Time Series
    tsResFile = os.path.join(os.path.dirname(inps.outfile), 'timeseriesResidual.h5')
    print 'writing >>> '+os.path.basename(tsResFile)
    h5 = h5py.File(tsResFile,'w')
    group = h5.create_group('timeseries')
    for i in range(date_num):
        sys.stdout.write('\rwriting acquisition %3d/%3d ...' % (i+1, date_num))
        sys.stdout.flush()
        dset = group.create_dataset(date_list[i], data=timeseriesRes[i].reshape(length, width), compression='gzip')
    print ''
    # Attribute
    for key,value in atr.iteritems():
        group.attrs[key] = value
    h5.close()

    # 4. Step temporal Model estimation
    if inps.step_num > 0:
        stepFile = os.path.join(os.path.dirname(inps.outfile), 'timeseriesStepModel.h5')
        print 'writing >>> '+os.path.basename(stepFile)
        h5 = h5py.File(stepFile,'w')
        group = h5.create_group('timeseries')
        for i in range(inps.step_num):
            sys.stdout.write('\rwriting acquisition %3d/%3d ...' % (i+1, inps.step_num))
            sys.stdout.flush()
            dset = group.create_dataset(inps.step_date[i], data=stepModel[i].reshape(length, width), compression='gzip')
        print ''
        # Attribute
        for key,value in atr.iteritems():
            group.attrs[key] = value
        group.attrs.pop('ref_date')
        h5.close()

    print 'Done.'
    return
Пример #13
0
def main(argv):

    ## Default value
    phase_velocity = 'no'  # 'no' means use 'phase history'
    update_timeseries = 'yes'

    if len(sys.argv) > 2:
        try:
            opts, args = getopt.getopt(
                argv, 'h:f:F:o:v:', ['phase-velocity', 'no-timeseries-update'])
        except getopt.GetoptError:
            print 'Error in reading input options!'
            usage()
            sys.exit(1)

        for opt, arg in opts:
            if opt in ['-h', '--help']:
                usage()
                sys.exit()
            elif opt == '-f':
                timeSeriesFile = arg
            elif opt == '-F':
                igramsFile = arg
            elif opt == '-o':
                outname = arg
            elif opt == '--phase-velocity':
                phase_velocity = 'yes'
            elif opt == '--no-timeseries-update':
                update_timeseries = 'no'

    elif len(sys.argv) == 2:
        if argv[0] in ['-h', '--help']:
            usage()
            sys.exit(1)
        else:
            timeSeriesFile = argv[0]
    else:
        usage()
        sys.exit(1)

    try:
        outname
    except:
        outname = timeSeriesFile.replace('.h5', '') + '_demCor.h5'

    ##### Read Time Series
    #print '\n*************** Topographic Error Correction ****************'
    print "Loading time series: " + timeSeriesFile
    atr = readfile.read_attribute(timeSeriesFile)
    h5timeseries = h5py.File(timeSeriesFile)
    dateList = sorted(h5timeseries['timeseries'].keys())
    lt = len(dateList)
    print 'number of epochs: ' + str(lt)

    dateIndex = {}
    for ni in range(len(dateList)):
        dateIndex[dateList[ni]] = ni

    nrows = int(atr['FILE_LENGTH'])
    ncols = int(atr['WIDTH'])
    timeseries = np.zeros((len(dateList), nrows * ncols), np.float32)
    for i in range(lt):
        date = dateList[i]
        ut.print_progress(i + 1, lt, prefix='loading:', suffix=date)
        d = h5timeseries['timeseries'].get(date)[:]
        timeseries[dateIndex[date]][:] = d.flatten('F')
    del d
    h5timeseries.close()
    print '**************************************'

    ##### Temporal Baseline
    print 'read temporal baseline'
    tbase, date_dict = ptime.date_list2tbase(dateList)
    tbase = np.array(tbase).reshape(lt, 1)

    ##### Perpendicular Baseline
    try:
        Bp = [float(i) for i in atr['P_BASELINE_TIMESERIES'].split()]
        Bp = np.array(Bp).reshape(lt, 1)
    except:
        print 'Cannot find P_BASELINE_TIMESERIES from timeseries file.'
        print 'Trying to calculate it from interferograms file'
        try:
            Bp = ut.Baseline_timeseries(igramsFile)
            Bp = np.array(Bp).reshape(lt, 1)
        except:
            print 'Error in calculating baseline time series!'
            sys.exit(1)
    Bp_v = (Bp[1:lt] - Bp[0:lt - 1]) / (tbase[1:lt] - tbase[0:lt - 1])

    ##### Cubic Temporal Deformation Model
    ## Formula (10) in (Fattahi and Amelung, 2013, TGRS)
    if phase_velocity == 'yes':
        print 'using phase velocity history'
        M1 = np.ones((lt - 1, 1))
        M2 = (tbase[1:lt] + tbase[0:lt - 1]) / 2
        M3 = (tbase[1:lt]**2 + tbase[1:lt] * tbase[0:lt - 1] +
              tbase[0:lt - 1]**2) / 6
        M = np.hstack((M1, M2, M3))
    else:
        print 'using phase history'
        M = np.hstack((.5 * tbase**2, tbase, np.ones((lt, 1))))

    ## Testing
    #teta = (tetaN+tetaF)/2
    #r = (rN+rF)/2
    #teta=19.658799999999999*np.pi/180
    #r=846848.2
    #Bperp=1000*np.random.random((lt,1))

    ##### Range and Look Angle
    near_range = float(atr['STARTING_RANGE1'])
    dR = float(atr['RANGE_PIXEL_SIZE'])
    r = float(atr['EARTH_RADIUS'])
    H = float(atr['HEIGHT'])
    far_range = near_range + dR * (ncols - 1)
    incidence_n = np.pi - np.arccos(
        (r**2 + near_range**2 - (r + H)**2) / (2 * r * near_range))
    incidence_f = np.pi - np.arccos(
        (r**2 + far_range**2 - (r + H)**2) / (2 * r * far_range))

    various_range = 'yes'
    if various_range == 'yes':
        range_x = np.linspace(near_range,
                              far_range,
                              num=ncols,
                              endpoint='FALSE')
        look_angle_x = np.linspace(incidence_n,
                                   incidence_f,
                                   num=ncols,
                                   endpoint='FALSE')
    else:
        print 'using center range and look angle to represent the whole area'
        center_range = (near_range + far_range) / 2
        center_look_angle = np.pi - np.arccos(
            (r**2 + center_range**2 - (r + H)**2) / (2 * r * center_range))
        range_x = np.tile(center_range, ncols)
        look_angle_x = np.tile(center_look_angle, ncols)
    #C1_v = Bp_v / (center_range * np.sin(center_look_angle))
    #C1   = Bp   / (center_range * np.sin(center_look_angle))
    #timeseries_v = (timeseries[1:lt,:] - timeseries[0:lt-1,:]) / (tbase[1:lt] - tbase[0:lt-1])

    ##### Inversion column by column
    print 'inversing using L2-norm minimization (unweighted least squares)...'
    dz = np.zeros([1, nrows * ncols])

    for i in range(ncols):
        ## Design Matrix Inversion
        C1_v = Bp_v / (range_x[i] * np.sin(look_angle_x[i]))
        C1 = Bp / (range_x[i] * np.sin(look_angle_x[i]))
        if phase_velocity == 'yes': C = np.hstack((M, C1_v))
        else: C = np.hstack((M, C1))

        #print '    rank of the design matrix : '+str(np.linalg.matrix_rank(C))
        #if np.linalg.matrix_rank(C) == 4:  print '    design matrix has full rank'
        Cinv = np.linalg.pinv(C)

        ## (Phase) Velocity History
        ts_x = timeseries[:, i * nrows:(i + 1) * nrows]
        ts_xv = (ts_x[1:lt, :] - ts_x[0:lt - 1, :]) / (tbase[1:lt] -
                                                       tbase[0:lt - 1])

        ## DEM error
        if phase_velocity == 'yes': par = np.dot(Cinv, ts_xv)
        else: par = np.dot(Cinv, ts_x)
        dz_x = par[3].reshape((1, nrows))

        ## Update DEM error matrix and timeseries matrix
        dz[0][i * nrows:(i + 1) * nrows] = dz_x
        timeseries[:, i * nrows:(i + 1) * nrows] -= np.dot(C1, dz_x)

        ut.print_progress(i + 1, ncols)

    #dz[0][:] = par[3][:]
    dz = np.reshape(dz, [nrows, ncols], order='F')

    ########## Output - DEM error #######################
    #print '**************************************'
    #print 'writing DEM_error.hgt'
    #writefile.write_float32(dz,'DEM_error.hgt')
    #f = open('DEM_error.hgt.rsc','w')
    #f.write('FILE_LENGTH       '+str(int(nrows))+'\n')
    #f.write('WIDTH             '+str(int(ncols))+'\n')
    #print '**************************************'

    h5fileDEM = 'DEM_error.h5'
    print 'writing >>> ' + h5fileDEM
    h5rmse = h5py.File(h5fileDEM, 'w')
    group = h5rmse.create_group('dem')
    dset = group.create_dataset(os.path.basename('dem'),
                                data=dz,
                                compression='gzip')
    for key, value in atr.iteritems():
        group.attrs[key] = value
    group.attrs['UNIT'] = 'm'
    print '**************************************'

    ########### Output - Corrected Time Series ##########
    if update_timeseries == 'yes':
        print 'writing >>> ' + outname
        print 'number of dates: ' + str(len(dateList))

        h5timeseriesDEMcor = h5py.File(outname, 'w')
        group = h5timeseriesDEMcor.create_group('timeseries')
        for i in range(lt):
            date = dateList[i]
            ut.print_progress(i + 1, lt, prefix='writing:', suffix=date)
            d = np.reshape(timeseries[i][:], [nrows, ncols], order='F')
            dset = group.create_dataset(date, data=d, compression='gzip')
        #for date in dateList:
        #    print date
        #    if not date in h5timeseriesDEMcor['timeseries']:
        #        d = np.reshape(timeseries[dateIndex[date]][:],[nrows,ncols],order='F')
        #        dset = group.create_dataset(date, data=d, compression='gzip')
        for key, value in atr.iteritems():
            group.attrs[key] = value
        h5timeseriesDEMcor.close()
Пример #14
0
def main(argv):

    # Read inputs
    inps = cmdLineParse()
    inps = read_template2inps(inps.template_file, inps)
    log(os.path.basename(sys.argv[0]) + ' ' + inps.template_file)

    project_name = os.path.splitext(os.path.basename(inps.template_file))[0]
    print 'project name: ' + project_name
    if not inps.sensor:
        inps.sensor = project_name2sensor(project_name)

    # Auto path setting for Miami user
    if not inps.baseline_file and pysar.miami_path and 'SCRATCHDIR' in os.environ:
        if pysar.miami_path and 'SCRATCHDIR' in os.environ:
            try:
                inps.baseline_file = glob.glob(
                    os.getenv('SCRATCHDIR') + '/' + project_name +
                    '/SLC/bl_list.txt')[0]
            except:
                inps.baseline_file = None

    # Pair selection from reference
    if inps.reference_file:
        print 'Use pairs info from reference file: ' + inps.reference_file
        date12_list = pnet.get_date12_list(inps.reference_file)
        date12_list = [i.replace('_', '-') for i in date12_list]

        if inps.baseline_file:
            date8_list, pbase_list, dop_list = pnet.read_baseline_file(
                inps.baseline_file)[0:3]
            date6_list = ptime.yymmdd(date8_list)
            tbase_list = ptime.date_list2tbase(date8_list)[0]

    # Pair selection from temp/perp/dop baseline info
    else:
        if not inps.baseline_file:
            raise Exception('ERROR: No baseline file found!')

        # Check start/end/exclude date
        date8_list = pnet.read_baseline_file(inps.baseline_file)[0]
        inps.exclude_date = ptime.yyyymmdd(inps.exclude_date)
        if not inps.exclude_date:
            inps.exclude_date = []
        else:
            print 'input exclude dates: ' + str(inps.exclude_date)
        if inps.start_date:
            print 'input start date: ' + inps.start_date
            inps.exclude_date += [
                i for i in date8_list
                if float(i) < float(ptime.yyyymmdd(inps.start_date))
            ]
            inps.exclude_date = sorted(inps.exclude_date)
        if inps.end_date:
            print 'input end   date: ' + inps.end_date
            inps.exclude_date += [
                i for i in date8_list
                if float(i) > float(ptime.yyyymmdd(inps.end_date))
            ]
            inps.exclude_date = sorted(inps.exclude_date)
        if inps.exclude_date:
            print 'exclude    dates: '
            print inps.exclude_date

        # Read baseline list file: bl_list.txt
        inps.exclude_date = ptime.yymmdd(inps.exclude_date)
        date8_list, pbase_list, dop_list = pnet.read_baseline_file(
            inps.baseline_file, inps.exclude_date)[0:3]
        date6_list = ptime.yymmdd(date8_list)
        tbase_list = ptime.date_list2tbase(date8_list)[0]

        # Initial network using input methods
        inps.method = inps.method.lower().replace('-', '_')
        if inps.method in ['star', 'ps']: inps.method = 'star'
        elif inps.method.startswith('seq'): inps.method = 'sequential'
        elif inps.method.startswith('hierar'): inps.method = 'hierarchical'
        elif inps.method in [
                'mst', 'min_spanning_tree', 'minimum_spanning_tree'
        ]:
            inps.method = 'mst'
        print 'select method: ' + inps.method

        if inps.method == 'all':
            date12_list = pnet.select_pairs_all(date6_list)
        elif inps.method == 'delaunay':
            date12_list = pnet.select_pairs_delaunay(date6_list, pbase_list,
                                                     inps.norm)
        elif inps.method == 'star':
            date12_list = pnet.select_pairs_star(date6_list)
        elif inps.method == 'sequential':
            date12_list = pnet.select_pairs_sequential(date6_list,
                                                       inps.increment_num)
        elif inps.method == 'hierarchical':
            date12_list = pnet.select_pairs_hierarchical(
                date6_list, pbase_list, inps.temp_perp_list)
        elif inps.method == 'mst':
            date12_list = pnet.select_pairs_mst(date6_list, pbase_list)
        else:
            raise Exception('Unrecoganized select method: ' + inps.method)
        print 'initial number of interferograms: ' + str(len(date12_list))

        # Filter pairs (optional) using temp/perp/doppler baseline threshold
        if inps.method in ['star', 'hierarchical', 'mst']:
            inps.threshold = False
        if inps.threshold:
            # Temporal baseline
            date12_list = pnet.threshold_temporal_baseline(date12_list, inps.temp_base_max,\
                                                           inps.keep_seasonal, inps.temp_base_min)
            print 'number of interferograms after filtering of <%d, %d> days in temporal baseline: %d'\
                  % (inps.temp_base_min, inps.temp_base_max, len(date12_list))
            if inps.keep_seasonal:
                print '\tkeep seasonal pairs, i.e. pairs with temporal baseline == N*years +/- one month'

            # Perpendicular spatial baseline
            date12_list = pnet.threshold_perp_baseline(date12_list, date6_list,
                                                       pbase_list,
                                                       inps.perp_base_max)
            print 'number of interferograms after filtering of max %d meters in perpendicular baseline: %d'\
                  % (inps.perp_base_max, len(date12_list))

            # Doppler Overlap Percentage
            if inps.sensor:
                bandwidth_az = pnet.azimuth_bandwidth(inps.sensor)
                date12_list = pnet.threshold_doppler_overlap(date12_list, date6_list, dop_list,\
                                                             bandwidth_az, inps.dop_overlap_min/100.0)
                print 'number of interferograms after filtering of min '+str(inps.dop_overlap_min)+'%'+\
                      ' overlap in azimuth Doppler frequency: '+str(len(date12_list))

    # Write ifgram_list.txt
    if not date12_list:
        print 'WARNING: No interferogram selected!'
        return None

    # date12_list to date_list
    m_dates = [
        date12.replace('_', '-').split('-')[0] for date12 in date12_list
    ]
    s_dates = [
        date12.replace('_', '-').split('-')[1] for date12 in date12_list
    ]
    try:
        print 'number of acquisitions   input   : ' + str(len(date6_list))
    except:
        pass
    print 'number of acquisitions   selected: ' + str(
        len(list(set(m_dates + s_dates))))
    print 'number of interferograms selected: ' + str(len(date12_list))

    # Output directory/filename
    if not inps.outfile:
        if pysar.miami_path and 'SCRATCHDIR' in os.environ:
            inps.out_dir = os.getenv(
                'SCRATCHDIR') + '/' + project_name + '/PROCESS'
        else:
            try:
                inps.out_dir = os.path.dirname(
                    os.path.abspath(inps.reference_file))
            except:
                inps.out_dir = os.path.dirname(
                    os.path.abspath(inps.baseline_file))
        inps.outfile = inps.out_dir + '/ifgram_list.txt'
    inps.outfile = os.path.abspath(inps.outfile)
    inps.out_dir = os.path.dirname(inps.outfile)
    if not os.path.isdir(inps.out_dir):
        os.makedirs(inps.out_dir)

    print 'writing >>> ' + inps.outfile
    if not inps.baseline_file:
        np.savetxt(inps.outfile, date12_list, fmt='%s')
        return inps.outfile

    ## Calculate Bperp, Btemp and predicted coherence
    ifgram_num = len(date12_list)
    ifgram_pbase_list = []
    ifgram_tbase_list = []

    for i in range(ifgram_num):
        m_date, s_date = date12_list[i].split('-')
        m_idx = date6_list.index(m_date)
        s_idx = date6_list.index(s_date)
        pbase = pbase_list[s_idx] - pbase_list[m_idx]
        tbase = tbase_list[s_idx] - tbase_list[m_idx]
        ifgram_pbase_list.append(pbase)
        ifgram_tbase_list.append(tbase)

    try:
        inps.coherence_list = pnet.simulate_coherence(
            date12_list, inps.baseline_file,
            sensor=inps.sensor).flatten().tolist()
        inps.cbar_label = 'Simulated coherence'
    except:
        inps.coherence_list = None

    ##### Write txt file
    fl = open(inps.outfile, 'w')
    fl.write('#Interferograms configuration generated by select_network.py\n')
    fl.write('#   Date12      Btemp(days)    Bperp(m)    sim_coherence\n')
    for i in range(len(date12_list)):
        line = '%s   %6.0f         %6.1f' % (
            date12_list[i], ifgram_tbase_list[i], ifgram_pbase_list[i])
        if inps.coherence_list:
            line += '       %1.4f' % (inps.coherence_list[i])
        fl.write(line + '\n')
    fl.close()

    ##### Plot network info
    if not inps.disp_fig:
        plt.switch_backend('Agg')

    out_fig_name = 'BperpHistory.pdf'
    print 'plotting baseline history in temp/perp baseline domain to file: ' + out_fig_name
    fig2, ax2 = plt.subplots()
    ax2 = pnet.plot_perp_baseline_hist(ax2, date8_list, pbase_list)
    plt.savefig(inps.out_dir + '/' + out_fig_name, bbox_inches='tight')

    out_fig_name = 'Network.pdf'
    print 'plotting network / pairs  in temp/perp baseline domain to file: ' + out_fig_name
    fig1, ax1 = plt.subplots()
    ax1 = pnet.plot_network(ax1,
                            date12_list,
                            date8_list,
                            pbase_list,
                            plot_dict=vars(inps),
                            print_msg=False)
    plt.savefig(inps.out_dir + '/' + out_fig_name, bbox_inches='tight')

    out_fig_name = 'CoherenceMatrix.pdf'
    if inps.coherence_list:
        print 'plotting predicted coherence matrix to file: ' + out_fig_name
        fig3, ax3 = plt.subplots()
        ax3 = pnet.plot_coherence_matrix(ax3,
                                         date12_list,
                                         inps.coherence_list,
                                         plot_dict=vars(inps))
        plt.savefig(inps.out_dir + '/' + out_fig_name, bbox_inches='tight')

    if inps.disp_fig:
        plt.show()

    return inps.outfile
Пример #15
0
def main(argv):
    ##### Read Inputs
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    date12_orig = pnet.get_date12_list(inps.file[0])
    #print '\n****************** Network Modification ********************'

    if (not inps.reference_file and not inps.template_file and\
        not inps.max_temp_baseline and not inps.max_perp_baseline and\
        not inps.drop_ifg_index and not inps.drop_date and \
        not inps.coherence_file):
        # Display network for manually modification when there is no other modification input.
        print 'No input found to remove interferogram, continue by display the network to select it manually ...'
        inps.disp_network = True

    # Update inps if template is input
    if inps.template_file:
        inps = update_inps_with_template(inps, inps.template_file)

    # Convert index : input to continous index list
    if inps.drop_ifg_index:
        ifg_index = list(inps.drop_ifg_index)
        inps.drop_ifg_index = []
        for index in ifg_index:
            index_temp = [int(i) for i in index.split(':')]
            index_temp.sort()
            if len(index_temp) == 2:
                for j in range(index_temp[0], index_temp[1] + 1):
                    inps.drop_ifg_index.append(j)
            elif len(index_temp) == 1:
                inps.drop_ifg_index.append(int(index))
            else:
                print 'Unrecoganized input: ' + index
        inps.drop_ifg_index = sorted(inps.drop_ifg_index)
        if max(inps.drop_ifg_index) > len(date12_orig):
            raise Exception('Input index out of range!\n'+\
                            'input index:'+str(inps.drop_ifg_index)+'\n'+\
                            'index range of file: '+str(len(date12_orig)))

    ##### Get date12_to_rmv
    date12_to_rmv = []

    # 1. Update date12_to_rmv from reference file
    if inps.reference_file:
        date12_to_keep = pnet.get_date12_list(inps.reference_file)
        print '----------------------------------------------------------------------------'
        print 'use reference pairs info from file: ' + inps.reference_file
        print 'number of interferograms in reference: ' + str(
            len(date12_to_keep))
        print 'date12 not in reference file:'
        for date12 in date12_orig:
            if date12 not in date12_to_keep:
                date12_to_rmv.append(date12)
                print date12

    # 2.1 Update date12_to_rmv from coherence file
    if inps.coherence_file:
        print '----------------------------------------------------------------------------'
        print 'use coherence-based network modification from coherence file: ' + inps.coherence_file
        # Calculate spatial average coherence
        if not inps.mask_file:
            mask = readfile.read(inps.mask_file)[0]
            print 'mask coherence with file: ' + inps.mask_file
        else:
            mask = None
        cohTextFile = os.path.splitext(
            inps.coherence_file)[0] + '_spatialAverage.list'
        if os.path.isfile(cohTextFile):
            print 'average coherence in space has been calculated before and store in file: ' + cohTextFile
            print 'read it directly, or delete it and re-run the script to re-calculate the list'
            cohTxt = np.loadtxt(cohTextFile, dtype=str)
            mean_coherence_list = [float(i) for i in cohTxt[:, 1]]
            coh_date12_list = [i for i in cohTxt[:, 0]]
        else:
            print 'calculating average coherence of each interferogram ...'
            mean_coherence_list = ut.spatial_average(inps.coherence_file,
                                                     mask,
                                                     saveList=True)
            coh_date12_list = pnet.get_date12_list(inps.coherence_file)
        print 'date12 with average coherence < ' + str(
            inps.min_coherence) + ': '
        for i in range(len(coh_date12_list)):
            if mean_coherence_list[i] < inps.min_coherence:
                date12 = coh_date12_list[i]
                date12_to_rmv.append(date12)
                print date12

    # 2.2 Update date12_to_rmv from perp baseline threshold
    if inps.max_perp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with perpendicular spatial baseline > ' + str(
            inps.max_perp_baseline) + ' meters'
        ifg_bperp_list = pnet.igram_perp_baseline_list(inps.file[0])
        for i in range(len(ifg_bperp_list)):
            if ifg_bperp_list[i] > inps.max_perp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print date12

    # 2.3 Update date12_to_rmv from temp baseline threshold
    if inps.max_temp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with temporal baseline > ' + str(
            inps.max_temp_baseline) + ' days'
        date8_list = ptime.igram_date_list(inps.file[0])
        date6_list = ptime.yymmdd(date8_list)
        tbase_list = ptime.date_list2tbase(date8_list)[0]
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            idx1 = date6_list.index(date1)
            idx2 = date6_list.index(date2)
            t_diff = tbase_list[idx2] - tbase_list[idx1]
            if t_diff > inps.max_temp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print date12

    # 2.4 Update date12_to_rmv from drop_ifg_index
    if inps.drop_ifg_index:
        print '----------------------------------------------------------------------------'
        print 'drop date12/pair with the following index number:'
        for index in inps.drop_ifg_index:
            date12 = date12_orig[index - 1]
            date12_to_rmv.append(date12)
            print str(index) + '    ' + date12

    # 2.5 Update date12_to_rmv from drop_date
    if inps.drop_date:
        inps.drop_date = ptime.yymmdd(inps.drop_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs including the following dates: \n' + str(
            inps.drop_date)
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            if (date1 in inps.drop_date) or (date2 in inps.drop_date):
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print date12

    # 3. Manually drop pairs
    if inps.disp_network:
        date12_click = manual_select_pairs_to_remove(inps.file[0])
        for date12 in list(date12_click):
            if date12 not in date12_orig:
                date12_click.remove(date12)
        print 'date12 selected to remove:'
        print date12_click
        date12_to_rmv += date12_click

    # 4. drop duplicate date12 and sort in order
    date12_to_rmv = list(set(date12_to_rmv))
    date12_to_rmv = sorted(date12_to_rmv)
    print '----------------------------------------------------------------------------'
    print 'number of interferograms to remove: ' + str(len(date12_to_rmv))
    print 'list   of interferograms to remove:'
    print date12_to_rmv

    if date12_to_rmv:
        ##### Update Input Files with date12_to_rmv
        Modified_CoherenceFile = 'Modified_coherence.h5'
        for File in inps.file:
            Modified_File = modify_file_date12_list(File, date12_to_rmv)

            k = readfile.read_attribute(File)['FILE_TYPE']
            # Update Mask File
            if k == 'interferograms':
                print 'update mask file for input ' + k + ' file based on ' + Modified_File
                outFile = 'Modified_Mask.h5'
                print 'writing >>> ' + outFile
                ut.nonzero_mask(Modified_File, outFile)
            elif k == 'coherence':
                print 'update average spatial coherence for input ' + k + ' file based on: ' + Modified_File
                outFile = 'Modified_average_spatial_coherence.h5'
                print 'writing >>> ' + outFile
                ut.temporal_average(Modified_File, outFile)
                Modified_CoherenceFile = Modified_File

        # Plot result
        if inps.plot:
            print '\nplot modified network and save to file.'
            plotCmd = 'plot_network.py ' + Modified_File + ' --coherence ' + Modified_CoherenceFile + ' --nodisplay'
            print plotCmd
            os.system(plotCmd)

        print 'Done.'
        return
    else:
        print 'No interferogram dropped, skip update.'
        return
Пример #16
0
def correct_lod_file(File, rangeDistFile=None, outFile=None):
    # Check Sensor Type
    print 'correct Local Oscilator Drift for Envisat using an empirical model (Marinkovic and Larsen, 2013)'
    print 'input file: ' + File
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    platform = atr['PLATFORM']
    print 'platform: ' + platform
    if not platform.lower() in ['env', 'envisat']:
        print 'No need to correct LOD for ' + platform
        sys.exit(1)

    # Output Filename
    if not outFile:
        ext = os.path.splitext(File)[1]
        outFile = os.path.splitext(File)[0] + '_LODcor' + ext

    # Get LOD phase ramp from empirical model
    if not rangeDistFile:
        print 'calculate range distance from input file attributes'
        width = int(atr['WIDTH'])
        length = int(atr['FILE_LENGTH'])
        range_resolution = float(atr['RANGE_PIXEL_SIZE'])
        rangeDist1D = range_resolution * np.linspace(0, width - 1, width)
        rangeDist = np.tile(rangeDist1D, (length, 1))
    else:
        print 'read range distance from file: %s' % (rangeDistFile)
        rangeDist = readfile.read(rangeDistFile, epoch='slantRangeDistance')[0]

    yref = int(atr['ref_y'])
    xref = int(atr['ref_x'])
    rangeDist -= rangeDist[yref][xref]
    Ramp = np.array(rangeDist * 3.87e-7, np.float32)

    # Correct LOD Ramp for Input File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(File, 'r')
        epochList = sorted(h5[k].keys())
        epochNum = len(epochList)

        print 'writing >>> %s' % (outFile)
        h5out = h5py.File(outFile, 'w')
        group = h5out.create_group(k)

        prog_bar = ptime.progress_bar(maxValue=epochNum)
        if k in ['interferograms', 'wrapped']:
            Ramp *= -4 * np.pi / float(atr['WAVELENGTH'])
            print 'number of interferograms: ' + str(epochNum)
            date12List = ptime.list_ifgram2date12(epochList)
            for i in range(epochNum):
                epoch = epochList[i]
                data = h5[k][epoch].get(epoch)[:]
                atr = h5[k][epoch].attrs

                dates = ptime.yyyymmdd(atr['DATE12'].split('-'))
                dates = ptime.yyyymmdd2years(dates)
                dt = dates[1] - dates[0]
                data -= Ramp * dt

                gg = group.create_group(epoch)
                dset = gg.create_dataset(epoch, data=data, compression='gzip')
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12List[i])

        elif k == 'timeseries':
            print 'number of acquisitions: ' + str(len(epochList))
            tbase = [
                float(dy) / 365.25
                for dy in ptime.date_list2tbase(epochList)[0]
            ]
            for i in range(epochNum):
                epoch = epochList[i]
                data = h5[k].get(epoch)[:]

                data -= Ramp * tbase[i]

                dset = group.create_dataset(epoch,
                                            data=data,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=epoch)
            for key, value in atr.iteritems():
                group.attrs[key] = value
        else:
            print 'No need to correct for LOD for ' + k + ' file'
            sys.exit(1)
        prog_bar.close()
        h5.close()
        h5out.close()

    elif k in ['.unw']:
        data, atr = readfile.read(File)
        Ramp *= -4 * np.pi / float(atr['WAVELENGTH'])
        dates = ptime.yyyymmdd(atr['DATE12'].split('-'))
        dates = ptime.yyyymmdd2years(dates)
        dt = dates[1] - dates[0]
        data -= Ramp * dt
        print 'writing >>> %s' % (outFile)
        writefile.write(data, atr, outFile)
    else:
        print 'No need to correct for LOD for %s file' % (k)

    return outFile
Пример #17
0
def main(argv):

    ## Default value
    phase_velocity = "no"  # 'no' means use 'phase history'
    update_timeseries = "yes"

    if len(sys.argv) > 2:
        try:
            opts, args = getopt.getopt(argv, "h:f:F:o:v:", ["phase-velocity", "no-timeseries-update"])
        except getopt.GetoptError:
            print "Error in reading input options!"
            Usage()
            sys.exit(1)

        for opt, arg in opts:
            if opt in ["-h", "--help"]:
                Usage()
                sys.exit()
            elif opt == "-f":
                timeSeriesFile = arg
            elif opt == "-F":
                igramsFile = arg
            elif opt == "-o":
                outname = arg
            elif opt == "--phase-velocity":
                phase_velocity = "yes"
            elif opt == "--no-timeseries-update":
                update_timeseries = "no"

    elif len(sys.argv) == 2:
        if argv[0] in ["-h", "--help"]:
            Usage()
            sys.exit(1)
        else:
            timeSeriesFile = argv[0]
    else:
        Usage()
        sys.exit(1)

    try:
        outname
    except:
        outname = timeSeriesFile.replace(".h5", "") + "_demCor.h5"

    ##### Read Time Series
    print "\n*************** Topographic Error Correction ****************"
    print "Loading time series: " + timeSeriesFile
    atr = readfile.read_attributes(timeSeriesFile)
    h5timeseries = h5py.File(timeSeriesFile)
    dateList = h5timeseries["timeseries"].keys()
    dateList = sorted(dateList)
    lt = len(dateList)
    print "number of epochs: " + str(lt)

    dateIndex = {}
    for ni in range(len(dateList)):
        dateIndex[dateList[ni]] = ni

    nrows = int(atr["FILE_LENGTH"])
    ncols = int(atr["WIDTH"])
    timeseries = np.zeros((len(dateList), nrows * ncols), np.float32)
    for date in dateList:
        print date
        d = h5timeseries["timeseries"].get(date)[:]
        timeseries[dateIndex[date]][:] = d.flatten("F")
    del d
    h5timeseries.close()
    print "**************************************"

    ##### Temporal Baseline
    print "read temporal baseline"
    tbase, date_dict = ptime.date_list2tbase(dateList)
    tbase = np.array(tbase).reshape(lt, 1)

    ##### Perpendicular Baseline
    try:
        Bp = [float(i) for i in atr["P_BASELINE_TIMESERIES"].split()]
        Bp = np.array(Bp).reshape(lt, 1)
    except:
        print "Cannot find P_BASELINE_TIMESERIES from timeseries file."
        print "Trying to calculate it from interferograms file"
        try:
            Bp = ut.Baseline_timeseries(igramsFile)
            Bp = np.array(Bp).reshape(lt, 1)
        except:
            print "Error in calculating baseline time series!"
            sys.exit(1)
    Bp_v = (Bp[1:lt] - Bp[0 : lt - 1]) / (tbase[1:lt] - tbase[0 : lt - 1])

    ##### Cubic Temporal Deformation Model
    ## Formula (10) in (Fattahi and Amelung, 2013, TGRS)
    if phase_velocity == "yes":
        print "using phase velocity history"
        M1 = np.ones((lt - 1, 1))
        M2 = (tbase[1:lt] + tbase[0 : lt - 1]) / 2
        M3 = (tbase[1:lt] ** 2 + tbase[1:lt] * tbase[0 : lt - 1] + tbase[0 : lt - 1] ** 2) / 6
        M = np.hstack((M1, M2, M3))
    else:
        print "using phase history"
        M = np.hstack((0.5 * tbase ** 2, tbase, np.ones((lt, 1))))

    ## Testing
    # teta = (tetaN+tetaF)/2
    # r = (rN+rF)/2
    # teta=19.658799999999999*np.pi/180
    # r=846848.2
    # Bperp=1000*np.random.random((lt,1))

    ##### Range and Look Angle
    near_range = float(atr["STARTING_RANGE1"])
    dR = float(atr["RANGE_PIXEL_SIZE"])
    r = float(atr["EARTH_RADIUS"])
    H = float(atr["HEIGHT"])
    far_range = near_range + dR * (ncols - 1)
    incidence_n = np.pi - np.arccos((r ** 2 + near_range ** 2 - (r + H) ** 2) / (2 * r * near_range))
    incidence_f = np.pi - np.arccos((r ** 2 + far_range ** 2 - (r + H) ** 2) / (2 * r * far_range))

    various_range = "yes"
    if various_range == "yes":
        range_x = np.linspace(near_range, far_range, num=ncols, endpoint="FALSE")
        look_angle_x = np.linspace(incidence_n, incidence_f, num=ncols, endpoint="FALSE")
    else:
        print "using center range and look angle to represent the whole area"
        center_range = (near_range + far_range) / 2
        center_look_angle = np.pi - np.arccos((r ** 2 + center_range ** 2 - (r + H) ** 2) / (2 * r * center_range))
        range_x = np.tile(center_range, ncols)
        look_angle_x = np.tile(center_look_angle, ncols)
    # C1_v = Bp_v / (center_range * np.sin(center_look_angle))
    # C1   = Bp   / (center_range * np.sin(center_look_angle))
    # timeseries_v = (timeseries[1:lt,:] - timeseries[0:lt-1,:]) / (tbase[1:lt] - tbase[0:lt-1])

    ##### Inversion column by column
    print "inversing using L2-norm minimization (unweighted least squares)..."
    dz = np.zeros([1, nrows * ncols])

    for i in range(ncols):
        ## Design Matrix Inversion
        C1_v = Bp_v / (range_x[i] * np.sin(look_angle_x[i]))
        C1 = Bp / (range_x[i] * np.sin(look_angle_x[i]))
        if phase_velocity == "yes":
            C = np.hstack((M, C1_v))
        else:
            C = np.hstack((M, C1))

        # print '    rank of the design matrix : '+str(np.linalg.matrix_rank(C))
        # if np.linalg.matrix_rank(C) == 4:  print '    design matrix has full rank'
        Cinv = np.linalg.pinv(C)

        ## (Phase) Velocity History
        ts_x = timeseries[:, i * nrows : (i + 1) * nrows]
        ts_xv = (ts_x[1:lt, :] - ts_x[0 : lt - 1, :]) / (tbase[1:lt] - tbase[0 : lt - 1])

        ## DEM error
        if phase_velocity == "yes":
            par = np.dot(Cinv, ts_xv)
        else:
            par = np.dot(Cinv, ts_x)
        dz_x = par[3].reshape((1, nrows))

        ## Update DEM error matrix and timeseries matrix
        dz[0][i * nrows : (i + 1) * nrows] = dz_x
        timeseries[:, i * nrows : (i + 1) * nrows] -= np.dot(C1, dz_x)

        ut.printProgress(i + 1, ncols)

    # dz[0][:] = par[3][:]
    dz = np.reshape(dz, [nrows, ncols], order="F")

    ########## Output - DEM error #######################
    # print '**************************************'
    # print 'writing DEM_error.hgt'
    # writefile.write_float32(dz,'DEM_error.hgt')
    # f = open('DEM_error.hgt.rsc','w')
    # f.write('FILE_LENGTH       '+str(int(nrows))+'\n')
    # f.write('WIDTH             '+str(int(ncols))+'\n')
    # print '**************************************'

    h5fileDEM = "DEM_error.h5"
    print "writing >>> " + h5fileDEM
    h5rmse = h5py.File(h5fileDEM, "w")
    group = h5rmse.create_group("dem")
    dset = group.create_dataset(os.path.basename("dem"), data=dz, compression="gzip")
    for key, value in atr.iteritems():
        group.attrs[key] = value
    group.attrs["UNIT"] = "m"
    print "**************************************"

    ########### Output - Corrected Time Series ##########
    if update_timeseries == "yes":
        print "writing >>> " + outname
        print "number of dates: " + str(len(dateList))

        h5timeseriesDEMcor = h5py.File(outname, "w")
        group = h5timeseriesDEMcor.create_group("timeseries")
        for i in range(len(dateList)):
            print dateList[i]
            d = np.reshape(timeseries[i][:], [nrows, ncols], order="F")
            dset = group.create_dataset(dateList[i], data=d, compression="gzip")
        # for date in dateList:
        #    print date
        #    if not date in h5timeseriesDEMcor['timeseries']:
        #        d = np.reshape(timeseries[dateIndex[date]][:],[nrows,ncols],order='F')
        #        dset = group.create_dataset(date, data=d, compression='gzip')
        for key, value in atr.iteritems():
            group.attrs[key] = value
        h5timeseriesDEMcor.close()
Пример #18
0
def main(argv):
    inps = cmdLineParse()

    # Basic info
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    if k not in ['timeseries']:
        sys.exit('ERROR: only timeseries file supported, input is ' + k +
                 ' file!')

    h5 = h5py.File(inps.timeseries_file, 'r')
    date_list = sorted(h5[k].keys())
    date_num = len(date_list)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    pixel_num = length * width

    tbase = np.array(ptime.date_list2tbase(date_list)[0], np.float32).reshape(
        (date_num, 1))
    tbase /= 365.25

    # Read timeseries
    print 'loading time-series ...'
    timeseries = np.zeros((date_num, pixel_num))
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        d = h5[k].get(date)[:]
        timeseries[i, :] = d.flatten(0)
        prog_bar.update(i + 1, suffix=date)
    del d
    h5.close()
    prog_bar.close()

    # Smooth timeseries with moving window in time
    print 'smoothing time-series using moving gaussian window with size of %.1f years' % inps.time_win
    timeseries_filt = np.zeros((date_num, pixel_num))
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        # Weight from Gaussian (normal) distribution in time
        t_diff = tbase[i] - tbase
        weight = np.exp(-0.5 * (t_diff**2) / (inps.time_win**2))
        weight /= np.sum(weight)
        weightMat = np.tile(weight, (1, pixel_num))
        # Smooth the current acquisition - moving window in time one by one
        timeseries_filt[i, :] = np.sum(timeseries * weightMat, 0)
        prog_bar.update(i + 1, suffix=date)
    del weightMat
    del timeseries
    prog_bar.close()

    # Write smoothed timeseries file
    try:
        ref_date = atr['ref_date']
    except:
        ref_date = date_list[0]
    ref_date_idx = date_list.index(ref_date)
    print 'reference date: ' + ref_date
    print 'reference date index: ' + str(ref_date_idx)
    ref_data = np.reshape(timeseries_filt[ref_date_idx, :], [length, width])

    if not inps.outfile:
        inps.outfile = os.path.splitext(inps.timeseries_file)[0] + '_smooth.h5'
    print 'writing >>> ' + inps.outfile
    print 'number of acquisitions: ' + str(date_num)

    h5out = h5py.File(inps.outfile, 'w')
    group = h5out.create_group(k)
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        data = np.reshape(timeseries_filt[i, :], [length, width])
        dset = group.create_dataset(date,
                                    data=data - ref_data,
                                    compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    for key, value in atr.iteritems():
        group.attrs[key] = value
    h5out.close()
    prog_bar.close()

    print 'Done.'
    return inps.outfile
Пример #19
0
def timeseries_inversion(ifgramFile='unwrapIfgram.h5',
                         coherenceFile='coherence.h5',
                         inps_dict=None):
    '''Implementation of the SBAS algorithm.
    modified from sbas.py written by scott baker, 2012 

    Inputs:
        ifgramFile    - string, HDF5 file name of the interferograms
        coherenceFile - string, HDF5 file name of the coherence
        inps_dict     - dict, including the following options:
                        weight_function
                        min_coherence
                        max_coherence
    Output:
        timeseriesFile - string, HDF5 file name of the output timeseries
        tempCohFile    - string, HDF5 file name of temporal coherence
    '''
    total = time.time()

    if not inps_dict:
        inps_dict = vars(cmdLineParse())
    weight_func = inps_dict['weight_function']
    min_coh = inps_dict['min_coherence']
    max_coh = inps_dict['max_coherence']

    # Basic Info
    atr = readfile.read_attribute(ifgramFile)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    pixel_num = length * width

    h5ifgram = h5py.File(ifgramFile, 'r')
    ifgram_list = sorted(h5ifgram['interferograms'].keys())
    if inps_dict['weight_function'] == 'no':
        ifgram_list = ut.check_drop_ifgram(h5ifgram, atr, ifgram_list)
    ifgram_num = len(ifgram_list)

    # Convert ifgram_list to date12/8_list
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    m_dates = [i.split('-')[0] for i in date12_list]
    s_dates = [i.split('-')[1] for i in date12_list]
    date8_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
    date_num = len(date8_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]
    tbase_diff = np.diff(tbase_list).reshape((date_num - 1, 1))

    print 'number of interferograms: ' + str(ifgram_num)
    print 'number of acquisitions  : ' + str(date_num)
    print 'number of pixels: ' + str(pixel_num)

    # Reference pixel in space
    try:
        ref_x = int(atr['ref_x'])
        ref_y = int(atr['ref_y'])
        print 'reference pixel in y/x: [%d, %d]' % (ref_y, ref_x)
    except:
        print 'ERROR: No ref_x/y found! Can not inverse interferograms without reference in space.'
        print 'run seed_data.py ' + ifgramFile + ' --mark-attribute for a quick referencing.'
        sys.exit(1)

    ##### Read Interferograms
    print 'reading interferograms ...'
    ifgram_data = np.zeros((ifgram_num, pixel_num), np.float32)
    prog_bar = ptime.progress_bar(maxValue=ifgram_num)
    for j in range(ifgram_num):
        ifgram = ifgram_list[j]
        d = h5ifgram['interferograms'][ifgram].get(ifgram)[:]
        #d[d != 0.] -= d[ref_y, ref_x]
        d -= d[ref_y, ref_x]
        ifgram_data[j] = d.flatten()
        prog_bar.update(j + 1, suffix=date12_list[j])
    h5ifgram.close()
    prog_bar.close()

    #####---------------------- Inversion ----------------------#####
    # Design matrix
    A, B = ut.design_matrix(ifgramFile, date12_list)

    if weight_func == 'no':
        print 'generalized inversion using SVD (Berardino et al., 2002, IEEE-TGRS)'
        print 'inversing time series ...'
        B_inv = np.array(np.linalg.pinv(B), np.float32)
        ts_rate = np.dot(B_inv, ifgram_data)
        ts1 = ts_rate * np.tile(tbase_diff, (1, pixel_num))
        ts0 = np.array([0.] * pixel_num, np.float32)
        ts_data = np.vstack((ts0, np.cumsum(ts1, axis=0)))
        del ts_rate, ts0, ts1

        # Temporal coherence
        print 'calculating temporal coherence (Tizzani et al., 2007, RSE)'
        temp_coh = np.zeros((1, pixel_num), np.float32) + 0j
        prog_bar = ptime.progress_bar(maxValue=ifgram_num)
        for i in range(ifgram_num):
            ifgram_est = np.dot(A[i, :], ts_data[1:, :])
            ifgram_diff = ifgram_data[i, :] - ifgram_est
            temp_coh += np.exp(1j * ifgram_diff)
            prog_bar.update(i + 1, suffix=date12_list[i])
        prog_bar.close()
        del ifgram_data, ifgram_est, ifgram_diff
        temp_coh = np.array((np.absolute(temp_coh) / ifgram_num).reshape(
            (length, width)),
                            dtype=np.float32)

    else:
        print 'weighted least square (WLS) inversion using coherence pixel by pixel'
        if np.linalg.matrix_rank(A) < date_num - 1:
            print 'ERROR: singular design matrix!'
            print '    Input network of interferograms is not fully connected!'
            print '    Can not inverse the weighted least square solution.'
            print 'You could try:'
            print '    1) Add more interferograms to make the network fully connected:'
            print '       a.k.a., no multiple subsets nor network islands'
            print "    2) Use '-w no' option for non-weighted SVD solution."
            sys.exit(-1)

        pixel_mask = np.ones(pixel_num, np.bool_)
        print 'reading coherence: ' + os.path.basename(coherenceFile)
        h5coh = h5py.File(coherenceFile, 'r')
        coh_list = sorted(h5coh['coherence'].keys())
        coh_data = np.zeros((ifgram_num, pixel_num), np.float32)
        prog_bar = ptime.progress_bar(maxValue=ifgram_num)
        for j in range(ifgram_num):
            ifgram = coh_list[j]
            d = h5coh['coherence'][ifgram].get(ifgram)[:].flatten()
            d[np.isnan(d)] = 0.
            pixel_mask[d == 0.] = 0
            coh_data[j] = d
            prog_bar.update(j + 1, suffix=date12_list[j])
        h5coh.close()
        prog_bar.close()

        # Get mask of valid pixels to inverse
        print 'skip pixels with zero coherence in at least one interferogram'
        print 'skip pixels with zero phase     in all          interferograms'
        ifgram_stack = ut.get_file_stack(ifgramFile).flatten()
        pixel_mask[ifgram_stack == 0.] = 0

        pixel_num2inv = np.sum(pixel_mask)
        pixel_idx2inv = np.where(pixel_mask)[0]
        ifgram_data = ifgram_data[:, pixel_mask]
        coh_data = coh_data[:, pixel_mask]
        print 'number of pixels to inverse: %d' % (pixel_num2inv)

        ##### Calculate Weight matrix
        weight = coh_data
        if weight_func.startswith('var'):
            print 'convert coherence to weight using inverse of variance: x**2/(1-x**2) from Hanssen (2001, for 4.2.32)'
            weight[weight > 0.999] = 0.999
            if weight_func == 'variance-max-coherence':
                print 'constrain the max coherence to %f' % max_coh
                weight[weight > max_coh] = max_coh
            weight = np.square(weight)
            weight *= 1. / (1. - weight)
            if weight_func == 'variance-log':
                print 'use log(1/variance)+1 as weight'
                weight = np.log(weight + 1)
        elif weight_func.startswith('lin'):
            print 'use coherence as weight directly (Tong et al., 2016, RSE)'
        elif weight_func.startswith('norm'):
            print 'convert coherence to weight using CDF of normal distribution: N(%f, %f)' % (
                mu, std)
            mu = (min_coh + max_coh) / 2.0
            std = (max_coh - min_coh) / 6.0
            chunk_size = 1000
            chunk_num = int(pixel_num2inv / chunk_size) + 1
            prog_bar = ptime.progress_bar(maxValue=chunk_num)
            for i in range(chunk_num):
                i0 = (i - 1) * chunk_size
                i1 = min([pixel_num2inv, i0 + chunk_size])
                weight[:, i0:i1] = norm.cdf(weight[:, i0:i1], mu, std)
                prog_bar.update(i + 1, every=10)
            prog_bar.close()
            #weight = norm.cdf(weight, mu, std)
        else:
            print 'Un-recognized weight function: %s' % weight_func
            sys.exit(-1)

        ##### Weighted Inversion pixel by pixel
        print 'inversing time series ...'
        ts_data = np.zeros((date_num, pixel_num), np.float32)
        temp_coh = np.zeros(pixel_num, np.float32)
        prog_bar = ptime.progress_bar(maxValue=pixel_num2inv)
        for i in range(pixel_num2inv):
            # Inverse timeseries
            ifgram_pixel = ifgram_data[:, i]
            weight_pixel = weight[:, i]
            W = np.diag(weight_pixel)
            ts = np.linalg.inv(A.T.dot(W).dot(A)).dot(
                A.T).dot(W).dot(ifgram_pixel)
            ts_data[1:, pixel_idx2inv[i]] = ts

            # Calculate weighted temporal coherence
            ifgram_diff = ifgram_pixel - np.dot(A, ts)
            temp_coh_pixel = np.abs(
                np.sum(np.multiply(weight_pixel, np.exp(1j * ifgram_diff)),
                       axis=0)) / np.sum(weight_pixel)
            temp_coh[pixel_idx2inv[i]] = temp_coh_pixel

            prog_bar.update(i + 1, every=2000, suffix=str(i + 1) + ' pixels')
        prog_bar.close()
        del ifgram_data, weight

    #####---------------------- Outputs ----------------------#####
    ## 1.1 Convert time-series phase to displacement
    print 'converting phase to range'
    phase2range = -1 * float(atr['WAVELENGTH']) / (4. * np.pi)
    ts_data *= phase2range

    ## 1.2 Write time-series data matrix
    timeseriesFile = 'timeseries.h5'
    print 'writing >>> ' + timeseriesFile
    print 'number of acquisitions: ' + str(date_num)
    h5timeseries = h5py.File(timeseriesFile, 'w')
    group = h5timeseries.create_group('timeseries')
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date8_list[i]
        dset = group.create_dataset(date,
                                    data=ts_data[i].reshape(length, width),
                                    compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()

    ## 1.3 Write time-series attributes
    print 'calculating perpendicular baseline timeseries'
    pbase, pbase_top, pbase_bottom = ut.perp_baseline_ifgram2timeseries(
        ifgramFile, ifgram_list)
    pbase = str(pbase.tolist()).translate(
        None, '[],')  # convert np.array into string separated by white space
    pbase_top = str(pbase_top.tolist()).translate(None, '[],')
    pbase_bottom = str(pbase_bottom.tolist()).translate(None, '[],')
    atr['P_BASELINE_TIMESERIES'] = pbase
    atr['P_BASELINE_TOP_TIMESERIES'] = pbase_top
    atr['P_BASELINE_BOTTOM_TIMESERIES'] = pbase_bottom
    atr['ref_date'] = date8_list[0]
    atr['FILE_TYPE'] = 'timeseries'
    atr['UNIT'] = 'm'
    for key, value in atr.iteritems():
        group.attrs[key] = value
    h5timeseries.close()
    del ts_data

    ## 2. Write Temporal Coherence File
    tempCohFile = 'temporalCoherence.h5'
    print 'writing >>> ' + tempCohFile
    atr['FILE_TYPE'] = 'temporal_coherence'
    atr['UNIT'] = '1'
    writefile.write(temp_coh.reshape(length, width), atr, tempCohFile)

    print 'Time series inversion took ' + str(time.time() -
                                              total) + ' secs\nDone.'
    return timeseriesFile, tempCohFile