コード例 #1
0
ファイル: reference_epoch.py プロジェクト: jthetzel/PySAR
def ref_date_file(inFile, ref_date, outFile=None):
    '''Change input file reference date to a different one.'''
    if not outFile:
        outFile = os.path.splitext(inFile)[0] + '_refDate.h5'

    # Input file type
    atr = readfile.read_attribute(inFile)
    k = atr['FILE_TYPE']
    if not k in ['timeseries']:
        print 'Input file is ' + k + ', only timeseries is supported.'
        return None

    # Input reference date
    h5 = h5py.File(inFile, 'r')
    date_list = sorted(h5[k].keys())
    h5.close()
    date_num = len(date_list)
    try:
        ref_date_orig = atr['ref_date']
    except:
        ref_date_orig = date_list[0]

    ref_date = ptime.yyyymmdd(ref_date)
    print 'input reference date: ' + ref_date
    if not ref_date in date_list:
        print 'Input reference date was not found!\nAll dates available: ' + str(
            date_list)
        return None
    if ref_date == ref_date_orig:
        print 'Same reference date chosen as existing reference date.'
        print 'Copy %s to %s' % (inFile, outFile)
        shutil.copy2(inFile, outFile)
        return outFile

    # Referencing in time
    h5 = h5py.File(inFile, 'r')
    ref_data = h5[k].get(ref_date)[:]

    print 'writing >>> ' + outFile
    h5out = h5py.File(outFile, 'w')
    group = h5out.create_group(k)
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        data = h5[k].get(date)[:]
        dset = group.create_dataset(date,
                                    data=data - ref_data,
                                    compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    h5.close()

    ## Update attributes
    atr = ref_date_attribute(atr, ref_date, date_list)
    for key, value in atr.iteritems():
        group.attrs[key] = value
    h5out.close()

    return outFile
コード例 #2
0
def write_timeseries_hdf5_file(timeseries,
                               date8_list,
                               atr,
                               timeseriesFile=None):
    ''' Write to timeseries HDF5 file
    Inputs:
        timeseries - 3D np.array in size of (date_num, length, width)
                     cumulative time series phase
        date8_list - list of string in YYYYMMDD format
        atr        - dict, attributes of time-series file, including two parts:
                     1) attributes inherited from interferograms
                     2) attributes of time-series inverted from network of interferograms:
                         P_BASELINE_TIMESERIES
                         P_BASELINE_TOP_TIMESERIES
                         P_BASELINE_BOTTOM_TIMESERIES
                         ref_date
        timeseriesFile - string, file name of output time-series file
    Output:
        timeseriesFile - string, file name of output time-series file
    '''

    ## 1 Convert time-series phase to displacement
    print 'converting phase to range'
    phase2range = -1 * float(atr['WAVELENGTH']) / (4. * np.pi)
    timeseries *= phase2range

    ## 2 Write time-series data matrix
    if not timeseriesFile:
        timeseriesFile = 'timeseries.h5'
    print 'writing >>> ' + timeseriesFile

    date_num = len(date8_list)
    print 'number of acquisitions: ' + str(date_num)
    h5timeseries = h5py.File(timeseriesFile, 'w')
    group = h5timeseries.create_group('timeseries')
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date8_list[i]
        dset = group.create_dataset(date,
                                    data=timeseries[i],
                                    compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()

    for key, value in atr.iteritems():
        group.attrs[key] = value
    h5timeseries.close()

    return timeseriesFile
コード例 #3
0
def timeseries_inversion(ifgramFile='unwrapIfgram.h5',
                         coherenceFile='coherence.h5',
                         inps_dict=None):
    '''Implementation of the SBAS algorithm.
    modified from sbas.py written by scott baker, 2012 

    Inputs:
        ifgramFile    - string, HDF5 file name of the interferograms
        coherenceFile - string, HDF5 file name of the coherence
        inps_dict     - dict, including the following options:
                        weight_function
                        min_coherence
                        max_coherence
    Output:
        timeseriesFile - string, HDF5 file name of the output timeseries
        tempCohFile    - string, HDF5 file name of temporal coherence
    '''
    total = time.time()

    if not inps_dict:
        inps_dict = vars(cmdLineParse())
    weight_func = inps_dict['weight_function']
    min_coh = inps_dict['min_coherence']
    max_coh = inps_dict['max_coherence']

    # Basic Info
    atr = readfile.read_attribute(ifgramFile)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    pixel_num = length * width

    h5ifgram = h5py.File(ifgramFile, 'r')
    ifgram_list = sorted(h5ifgram['interferograms'].keys())
    if inps_dict['weight_function'] == 'no':
        ifgram_list = ut.check_drop_ifgram(h5ifgram, atr, ifgram_list)
    ifgram_num = len(ifgram_list)

    # Convert ifgram_list to date12/8_list
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    m_dates = [i.split('-')[0] for i in date12_list]
    s_dates = [i.split('-')[1] for i in date12_list]
    date8_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
    date_num = len(date8_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]
    tbase_diff = np.diff(tbase_list).reshape((date_num - 1, 1))

    print 'number of interferograms: ' + str(ifgram_num)
    print 'number of acquisitions  : ' + str(date_num)
    print 'number of pixels: ' + str(pixel_num)

    # Reference pixel in space
    try:
        ref_x = int(atr['ref_x'])
        ref_y = int(atr['ref_y'])
        print 'reference pixel in y/x: [%d, %d]' % (ref_y, ref_x)
    except:
        print 'ERROR: No ref_x/y found! Can not inverse interferograms without reference in space.'
        print 'run seed_data.py ' + ifgramFile + ' --mark-attribute for a quick referencing.'
        sys.exit(1)

    ##### Read Interferograms
    print 'reading interferograms ...'
    ifgram_data = np.zeros((ifgram_num, pixel_num), np.float32)
    prog_bar = ptime.progress_bar(maxValue=ifgram_num)
    for j in range(ifgram_num):
        ifgram = ifgram_list[j]
        d = h5ifgram['interferograms'][ifgram].get(ifgram)[:]
        #d[d != 0.] -= d[ref_y, ref_x]
        d -= d[ref_y, ref_x]
        ifgram_data[j] = d.flatten()
        prog_bar.update(j + 1, suffix=date12_list[j])
    h5ifgram.close()
    prog_bar.close()

    #####---------------------- Inversion ----------------------#####
    # Design matrix
    A, B = ut.design_matrix(ifgramFile, date12_list)

    if weight_func == 'no':
        print 'generalized inversion using SVD (Berardino et al., 2002, IEEE-TGRS)'
        print 'inversing time series ...'
        B_inv = np.array(np.linalg.pinv(B), np.float32)
        ts_rate = np.dot(B_inv, ifgram_data)
        ts1 = ts_rate * np.tile(tbase_diff, (1, pixel_num))
        ts0 = np.array([0.] * pixel_num, np.float32)
        ts_data = np.vstack((ts0, np.cumsum(ts1, axis=0)))
        del ts_rate, ts0, ts1

        # Temporal coherence
        print 'calculating temporal coherence (Tizzani et al., 2007, RSE)'
        temp_coh = np.zeros((1, pixel_num), np.float32) + 0j
        prog_bar = ptime.progress_bar(maxValue=ifgram_num)
        for i in range(ifgram_num):
            ifgram_est = np.dot(A[i, :], ts_data[1:, :])
            ifgram_diff = ifgram_data[i, :] - ifgram_est
            temp_coh += np.exp(1j * ifgram_diff)
            prog_bar.update(i + 1, suffix=date12_list[i])
        prog_bar.close()
        del ifgram_data, ifgram_est, ifgram_diff
        temp_coh = np.array((np.absolute(temp_coh) / ifgram_num).reshape(
            (length, width)),
                            dtype=np.float32)

    else:
        print 'weighted least square (WLS) inversion using coherence pixel by pixel'
        if np.linalg.matrix_rank(A) < date_num - 1:
            print 'ERROR: singular design matrix!'
            print '    Input network of interferograms is not fully connected!'
            print '    Can not inverse the weighted least square solution.'
            print 'You could try:'
            print '    1) Add more interferograms to make the network fully connected:'
            print '       a.k.a., no multiple subsets nor network islands'
            print "    2) Use '-w no' option for non-weighted SVD solution."
            sys.exit(-1)

        pixel_mask = np.ones(pixel_num, np.bool_)
        print 'reading coherence: ' + os.path.basename(coherenceFile)
        h5coh = h5py.File(coherenceFile, 'r')
        coh_list = sorted(h5coh['coherence'].keys())
        coh_data = np.zeros((ifgram_num, pixel_num), np.float32)
        prog_bar = ptime.progress_bar(maxValue=ifgram_num)
        for j in range(ifgram_num):
            ifgram = coh_list[j]
            d = h5coh['coherence'][ifgram].get(ifgram)[:].flatten()
            d[np.isnan(d)] = 0.
            pixel_mask[d == 0.] = 0
            coh_data[j] = d
            prog_bar.update(j + 1, suffix=date12_list[j])
        h5coh.close()
        prog_bar.close()

        # Get mask of valid pixels to inverse
        print 'skip pixels with zero coherence in at least one interferogram'
        print 'skip pixels with zero phase     in all          interferograms'
        ifgram_stack = ut.get_file_stack(ifgramFile).flatten()
        pixel_mask[ifgram_stack == 0.] = 0

        pixel_num2inv = np.sum(pixel_mask)
        pixel_idx2inv = np.where(pixel_mask)[0]
        ifgram_data = ifgram_data[:, pixel_mask]
        coh_data = coh_data[:, pixel_mask]
        print 'number of pixels to inverse: %d' % (pixel_num2inv)

        ##### Calculate Weight matrix
        weight = coh_data
        if weight_func.startswith('var'):
            print 'convert coherence to weight using inverse of variance: x**2/(1-x**2) from Hanssen (2001, for 4.2.32)'
            weight[weight > 0.999] = 0.999
            if weight_func == 'variance-max-coherence':
                print 'constrain the max coherence to %f' % max_coh
                weight[weight > max_coh] = max_coh
            weight = np.square(weight)
            weight *= 1. / (1. - weight)
            if weight_func == 'variance-log':
                print 'use log(1/variance)+1 as weight'
                weight = np.log(weight + 1)
        elif weight_func.startswith('lin'):
            print 'use coherence as weight directly (Tong et al., 2016, RSE)'
        elif weight_func.startswith('norm'):
            print 'convert coherence to weight using CDF of normal distribution: N(%f, %f)' % (
                mu, std)
            mu = (min_coh + max_coh) / 2.0
            std = (max_coh - min_coh) / 6.0
            chunk_size = 1000
            chunk_num = int(pixel_num2inv / chunk_size) + 1
            prog_bar = ptime.progress_bar(maxValue=chunk_num)
            for i in range(chunk_num):
                i0 = (i - 1) * chunk_size
                i1 = min([pixel_num2inv, i0 + chunk_size])
                weight[:, i0:i1] = norm.cdf(weight[:, i0:i1], mu, std)
                prog_bar.update(i + 1, every=10)
            prog_bar.close()
            #weight = norm.cdf(weight, mu, std)
        else:
            print 'Un-recognized weight function: %s' % weight_func
            sys.exit(-1)

        ##### Weighted Inversion pixel by pixel
        print 'inversing time series ...'
        ts_data = np.zeros((date_num, pixel_num), np.float32)
        temp_coh = np.zeros(pixel_num, np.float32)
        prog_bar = ptime.progress_bar(maxValue=pixel_num2inv)
        for i in range(pixel_num2inv):
            # Inverse timeseries
            ifgram_pixel = ifgram_data[:, i]
            weight_pixel = weight[:, i]
            W = np.diag(weight_pixel)
            ts = np.linalg.inv(A.T.dot(W).dot(A)).dot(
                A.T).dot(W).dot(ifgram_pixel)
            ts_data[1:, pixel_idx2inv[i]] = ts

            # Calculate weighted temporal coherence
            ifgram_diff = ifgram_pixel - np.dot(A, ts)
            temp_coh_pixel = np.abs(
                np.sum(np.multiply(weight_pixel, np.exp(1j * ifgram_diff)),
                       axis=0)) / np.sum(weight_pixel)
            temp_coh[pixel_idx2inv[i]] = temp_coh_pixel

            prog_bar.update(i + 1, every=2000, suffix=str(i + 1) + ' pixels')
        prog_bar.close()
        del ifgram_data, weight

    #####---------------------- Outputs ----------------------#####
    ## 1.1 Convert time-series phase to displacement
    print 'converting phase to range'
    phase2range = -1 * float(atr['WAVELENGTH']) / (4. * np.pi)
    ts_data *= phase2range

    ## 1.2 Write time-series data matrix
    timeseriesFile = 'timeseries.h5'
    print 'writing >>> ' + timeseriesFile
    print 'number of acquisitions: ' + str(date_num)
    h5timeseries = h5py.File(timeseriesFile, 'w')
    group = h5timeseries.create_group('timeseries')
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date8_list[i]
        dset = group.create_dataset(date,
                                    data=ts_data[i].reshape(length, width),
                                    compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()

    ## 1.3 Write time-series attributes
    print 'calculating perpendicular baseline timeseries'
    pbase, pbase_top, pbase_bottom = ut.perp_baseline_ifgram2timeseries(
        ifgramFile, ifgram_list)
    pbase = str(pbase.tolist()).translate(
        None, '[],')  # convert np.array into string separated by white space
    pbase_top = str(pbase_top.tolist()).translate(None, '[],')
    pbase_bottom = str(pbase_bottom.tolist()).translate(None, '[],')
    atr['P_BASELINE_TIMESERIES'] = pbase
    atr['P_BASELINE_TOP_TIMESERIES'] = pbase_top
    atr['P_BASELINE_BOTTOM_TIMESERIES'] = pbase_bottom
    atr['ref_date'] = date8_list[0]
    atr['FILE_TYPE'] = 'timeseries'
    atr['UNIT'] = 'm'
    for key, value in atr.iteritems():
        group.attrs[key] = value
    h5timeseries.close()
    del ts_data

    ## 2. Write Temporal Coherence File
    tempCohFile = 'temporalCoherence.h5'
    print 'writing >>> ' + tempCohFile
    atr['FILE_TYPE'] = 'temporal_coherence'
    atr['UNIT'] = '1'
    writefile.write(temp_coh.reshape(length, width), atr, tempCohFile)

    print 'Time series inversion took ' + str(time.time() -
                                              total) + ' secs\nDone.'
    return timeseriesFile, tempCohFile
コード例 #4
0
ファイル: geocode.py プロジェクト: louisemaubant/PySAR
def geocode_file_geo_lut(fname, lookup_file, fname_out, inps):
    '''Geocode file using ROI_PAC/Gamma lookup table file.
    Related module: scipy.interpolate.RegularGridInterpolator

    Inputs:
        fname      : string, file to be geocoded
        lookup_file   : string, optional, lookup table file genereated by ROIPAC or Gamma
                     i.e. geomap_4rlks.trans           from ROI_PAC
                          sim_150911-150922.UTM_TO_RDC from Gamma
        interp_method     : string, optional, interpolation/resampling method, supporting nearest, linear
        fill_value : value used for points outside of the interpolation domain.
        fname_out  : string, optional, output geocoded filename
    Output:
        fname_out  : string, optional, output geocoded filename
    '''

    start = time.time()
    ## Default Inputs and outputs
    if not fname_out:
        fname_out = geocode_output_filename(fname)

    ##### Interpolate value on irregular radar coordinates (from lookup table file value)
    ##### with known value on regular radar coordinates (from radar file attribute)
    ## Grid/regular coordinates from row/column number in radar file
    print '------------------------------------------------------'
    print 'geocoding file: '+fname
    atr_rdr = readfile.read_attribute(fname)
    len_rdr = int(atr_rdr['FILE_LENGTH'])
    wid_rdr = int(atr_rdr['WIDTH'])
    pts_old = (np.arange(len_rdr), np.arange(wid_rdr))

    ## Irregular coordinates from data value in lookup table
    print 'reading lookup table file: '+lookup_file
    atr_lut = readfile.read_attribute(lookup_file)
    rg = readfile.read(lookup_file, epoch='range')[0]
    az = readfile.read(lookup_file, epoch='azimuth')[0]
    len_geo = int(atr_lut['FILE_LENGTH'])
    wid_geo = int(atr_lut['WIDTH'])

    # adjustment if input radar file has been subseted.
    if 'subset_x0' in atr_rdr.keys():
        x0 = float(atr_rdr['subset_x0'])
        y0 = float(atr_rdr['subset_y0'])
        rg -= x0
        az -= y0
        print '\tinput radar coord file has been subsetted, adjust lookup table value'

    # extract pixels only available in radar file (get ride of invalid corners)
    idx = (az>0.0)*(az<=len_rdr)*(rg>0.0)*(rg<=wid_rdr)
    pts_new = np.hstack((az[idx].reshape(-1,1), rg[idx].reshape(-1,1)))
    del az, rg

    print 'geocoding using scipy.interpolate.RegularGridInterpolator ...'
    data_geo = np.empty((len_geo, wid_geo))
    data_geo.fill(inps.fill_value)
    k = atr_rdr['FILE_TYPE']
    ##### Multiple Dataset File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+fname_out

        if k in multi_dataset_hdf5_file:
            print 'number of datasets: '+str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                RGI_func = RGI(pts_old, data, method=inps.interp_method,\
                               bounds_error=False, fill_value=inps.fill_value)
                data_geo[idx] = RGI_func(pts_new)

                dset = group.create_dataset(date, data=data_geo, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            prog_bar.close()

            print 'update attributes'
            atr = update_attribute_geo_lut(atr_rdr, atr_lut)
            for key,value in atr.iteritems():
                group.attrs[key] = value

        elif k in multi_group_hdf5_file:
            print 'number of interferograms: '+str(epoch_num)
            try:    date12_list = ptime.list_ifgram2date12(epoch_list)
            except: date12_list = epoch_list
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                RGI_func = RGI(pts_old, data, method=inps.interp_method,\
                               bounds_error=False, fill_value=inps.fill_value)
                data_geo[idx] = RGI_func(pts_new)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_geo, compression='gzip')

                atr = update_attribute_geo_lut(h5[k][ifgram].attrs, atr_lut, print_msg=False)
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print 'reading '+fname
        data = readfile.read(fname)[0]
        RGI_func = RGI(pts_old, data, method=inps.interp_method,\
                       bounds_error=False, fill_value=inps.fill_value)
        data_geo[idx] = RGI_func(pts_new)

        print 'update attributes'
        atr = update_attribute_geo_lut(atr_rdr, atr_lut)

        print 'writing >>> '+fname_out
        writefile.write(data_geo, atr, fname_out)

    del data_geo
    print 'finished writing file: %s' % (fname_out)
    s = time.time()-start;  m, s = divmod(s, 60);  h, m = divmod(m, 60)
    print 'Time used: %02d hours %02d mins %02d secs' % (h, m, s)
    return fname_out
コード例 #5
0
def main(argv):
    inps = cmdLineParse()

    # Basic info
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    if k not in ['timeseries']:
        sys.exit('ERROR: only timeseries file supported, input is ' + k +
                 ' file!')

    h5 = h5py.File(inps.timeseries_file, 'r')
    date_list = sorted(h5[k].keys())
    date_num = len(date_list)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    pixel_num = length * width

    tbase = np.array(ptime.date_list2tbase(date_list)[0], np.float32).reshape(
        (date_num, 1))
    tbase /= 365.25

    # Read timeseries
    print 'loading time-series ...'
    timeseries = np.zeros((date_num, pixel_num))
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        d = h5[k].get(date)[:]
        timeseries[i, :] = d.flatten(0)
        prog_bar.update(i + 1, suffix=date)
    del d
    h5.close()
    prog_bar.close()

    # Smooth timeseries with moving window in time
    print 'smoothing time-series using moving gaussian window with size of %.1f years' % inps.time_win
    timeseries_filt = np.zeros((date_num, pixel_num))
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        # Weight from Gaussian (normal) distribution in time
        t_diff = tbase[i] - tbase
        weight = np.exp(-0.5 * (t_diff**2) / (inps.time_win**2))
        weight /= np.sum(weight)
        weightMat = np.tile(weight, (1, pixel_num))
        # Smooth the current acquisition - moving window in time one by one
        timeseries_filt[i, :] = np.sum(timeseries * weightMat, 0)
        prog_bar.update(i + 1, suffix=date)
    del weightMat
    del timeseries
    prog_bar.close()

    # Write smoothed timeseries file
    try:
        ref_date = atr['ref_date']
    except:
        ref_date = date_list[0]
    ref_date_idx = date_list.index(ref_date)
    print 'reference date: ' + ref_date
    print 'reference date index: ' + str(ref_date_idx)
    ref_data = np.reshape(timeseries_filt[ref_date_idx, :], [length, width])

    if not inps.outfile:
        inps.outfile = os.path.splitext(inps.timeseries_file)[0] + '_smooth.h5'
    print 'writing >>> ' + inps.outfile
    print 'number of acquisitions: ' + str(date_num)

    h5out = h5py.File(inps.outfile, 'w')
    group = h5out.create_group(k)
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        data = np.reshape(timeseries_filt[i, :], [length, width])
        dset = group.create_dataset(date,
                                    data=data - ref_data,
                                    compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    for key, value in atr.iteritems():
        group.attrs[key] = value
    h5out.close()
    prog_bar.close()

    print 'Done.'
    return inps.outfile
コード例 #6
0
def main(argv):
    try:
        timeseries_file = argv[0]
    except:
        usage()
        sys.exit(1)

    # Basic info
    atr = readfile.read_attribute(timeseries_file)
    k = atr['FILE_TYPE']
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    ##### Read time-series
    print "loading time series: " + timeseries_file
    h5 = h5py.File(timeseries_file)
    date_list = sorted(h5[k].keys())
    date_num = len(date_list)
    pixel_num = length * width

    tbase = np.array(ptime.date_list2tbase(date_list)[0], np.float32)

    prog_bar = ptime.progress_bar(maxValue=date_num)
    timeseries = np.zeros((date_num, pixel_num), np.float32)
    for i in range(date_num):
        date = date_list[i]
        d = h5[k].get(date)[:]
        timeseries[i, :] = d.flatten(0)
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    del d
    h5.close()

    ##### Calculate 1st and 2nd temporal derivatives
    print "calculating temporal 1st derivative ... "
    timeseries_1st = np.zeros((date_num - 1, pixel_num), np.float32)
    for i in range(date_num - 1):
        timeseries_1st[i][:] = timeseries[i + 1][:] - timeseries[i][:]

    print "calculating temporal 2nd derivative"
    timeseries_2nd = np.zeros((date_num - 2, pixel_num), np.float32)
    for i in range(date_num - 2):
        timeseries_2nd[i][:] = timeseries_1st[i + 1][:] - timeseries_1st[i][:]

    ##### Write 1st and 2nd temporal derivatives
    outfile1 = os.path.splitext(timeseries_file)[0] + '_1stDerivative.h5'
    print 'writing >>> ' + outfile1
    h5out = h5py.File(outfile1, 'w')
    group = h5out.create_group(k)

    prog_bar = ptime.progress_bar(maxValue=date_num - 1)
    for i in range(date_num - 1):
        date = date_list[i + 1]
        dset = group.create_dataset(date,
                                    data=np.reshape(timeseries_1st[i][:],
                                                    [length, width]),
                                    compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    for key, value in atr.iteritems():
        group.attrs[key] = value
    prog_bar.close()
    h5out.close()

    outfile2 = os.path.splitext(timeseries_file)[0] + '_2ndDerivative.h5'
    print 'writing >>> ' + outfile2
    h5out = h5py.File(outfile2, 'w')
    group = h5out.create_group(k)

    prog_bar = ptime.progress_bar(maxValue=date_num - 2)
    for i in range(date_num - 2):
        date = date_list[i + 2]
        dset = group.create_dataset(date,
                                    data=np.reshape(timeseries_2nd[i][:],
                                                    [length, width]),
                                    compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    for key, value in atr.iteritems():
        group.attrs[key] = value
    prog_bar.close()
    h5out.close()

    print 'Done.'
    return outfile1, outfile2
コード例 #7
0
ファイル: unwrap_error.py プロジェクト: ilearnProgramme/PySAR
def unwrap_error_correction_bridging(ifgram_file, mask_file, y_list, x_list, ramp_type='plane',\
                                     ifgram_cor_file=None, save_cor_deramp_file=False):
    '''Unwrapping error correction with bridging.
    Inputs:
        ifgram_file : string, name/path of interferogram(s) to be corrected
        mask_file   : string, name/path of mask file to mark different patches 
        y/x_list    : list of int, bonding points in y/x 
        ifgram_cor_file : string, optional, output file name
        save_cor_deramp_file : bool, optional
    Output:
        ifgram_cor_file
    Example:
        y_list = [235, 270, 350, 390]
        x_list = [880, 890, 1200, 1270]
        unwrap_error_correction_bridging('unwrapIfgram.h5', 'mask_all.h5', y_list, x_list, 'quadratic')
    '''
    ##### Mask and Ramp
    mask = readfile.read(mask_file)[0]
    ramp_mask = mask == 1
    print 'estimate phase ramp during the correction'
    print 'ramp type: ' + ramp_type

    ##### Bridge Info
    # Check
    for i in range(len(x_list)):
        if mask[y_list[i], x_list[i]] == 0:
            print '\nERROR: Connecting point (%d,%d) is out of masked area! Select them again!\n' % (
                y_list[i], x_list[i])
            sys.exit(1)
    print 'Number of bridges: ' + str(len(x_list) / 2)
    print 'Bonding points coordinates:\nx: ' + str(x_list) + '\ny: ' + str(
        y_list)

    # Plot Connecting Pair of Points
    plot_bonding_points = False
    if plot_bonding_points:
        point_yx = ''
        line_yx = ''
        n_bridge = len(x) / 2
        for i in range(n_bridge):
            pair_yx = str(y[2 * i]) + ',' + str(x[2 * i]) + ',' + str(
                y[2 * i + 1]) + ',' + str(x[2 * i + 1])
            if not i == n_bridge - 1:
                point_yx += pair_yx + ','
                line_yx += pair_yx + ';'
            else:
                point_yx += pair_yx
                line_yx += pair_yx

        try:
            plot_cmd = 'view.py --point-yx="'+point_yx+'" --line-yx="'+line_yx+\
                       '" --nodisplay -o bonding_points.png -f '+maskFile
            print plot_cmd
            os.system(plot_cmd)
        except:
            pass

    # Basic info
    ext = os.path.splitext(ifgram_file)[1]
    atr = readfile.read_attribute(ifgram_file)
    k = atr['FILE_TYPE']

    try:
        ref_y = int(atr['ref_y'])
        ref_x = int(atr['ref_x'])
        print 'reference pixel in y/x: %d/%d' % (ref_y, ref_x)
    except:
        sys.exit(
            'ERROR: Can not find ref_y/x value, input file is not referenced in space!'
        )

    # output file name
    if not ifgram_cor_file:
        ifgram_cor_file = os.path.splitext(ifgram_file)[0] + '_unwCor' + ext
    ifgram_cor_deramp_file = os.path.splitext(
        ifgram_cor_file)[0] + '_' + ramp_type + ext

    ##### HDF5 file
    if ext == '.h5':
        ##### Read
        h5 = h5py.File(ifgram_file, 'r')
        ifgram_list = sorted(h5[k].keys())
        ifgram_num = len(ifgram_list)

        h5out = h5py.File(ifgram_cor_file, 'w')
        group = h5out.create_group(k)
        print 'writing >>> ' + ifgram_cor_file

        if save_cor_deramp_file:
            h5out_deramp = h5py.File(ifgram_cor_deramp_file, 'w')
            group_deramp = h5out_deramp.create_group(k)
            print 'writing >>> ' + ifgram_cor_deramp_file

        ##### Loop
        print 'Number of interferograms: ' + str(ifgram_num)
        prog_bar = ptime.progress_bar(maxValue=ifgram_num)
        date12_list = ptime.list_ifgram2date12(ifgram_list)
        for i in range(ifgram_num):
            ifgram = ifgram_list[i]
            data = h5[k][ifgram].get(ifgram)[:]
            data -= data[ref_y, ref_x]

            data_deramp, ramp = rm.remove_data_surface(data, ramp_mask,
                                                       ramp_type)
            data_derampCor = bridging_data(data_deramp, mask, x_list, y_list)

            ramp[data == 0.] = 0.
            gg = group.create_group(ifgram)
            dset = gg.create_dataset(ifgram,
                                     data=data_derampCor + ramp,
                                     compression='gzip')
            for key, value in h5[k][ifgram].attrs.iteritems():
                gg.attrs[key] = value

            if save_cor_deramp_file:
                gg_deramp = group_deramp.create_group(ifgram)
                dset = gg_deramp.create_dataset(ifgram,
                                                data=data_derampCor,
                                                compression='gzip')
                for key, value in h5[k][ifgram].attrs.iteritems():
                    gg_deramp.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

        prog_bar.close()
        h5.close()
        h5out.close()
        try:
            h5out_deramp.close()
        except:
            pass

    #### .unw file
    elif ext == '.unw':
        print 'read ' + ifgram_file
        data = readfile.read(ifgram_file)[0]
        data -= data[ref_y, ref_x]

        data_deramp, ramp = rm.remove_data_surface(data, ramp_mask, ramp_type)
        data_derampCor = bridging_data(data_deramp, mask, x_list, y_list)

        print 'writing >>> ' + ifgram_cor_file
        ramp[data == 0.] = 0.
        ifgram_cor_file = writefile.write(data_derampCor + ramp, atr,
                                          ifgram_cor_file)
        if save_cor_deramp_file:
            print 'writing >>> ' + ifgram_cor_deramp_file
            ifgram_cor_deramp_file = writefile.write(data_derampCor, atr,
                                                     ifgram_cor_deramp_file)

    else:
        sys.exit('Un-supported file type: ' + ext)

    return ifgram_cor_file, ifgram_cor_deramp_file
コード例 #8
0
ファイル: _remove_surface.py プロジェクト: zyh900908/PySAR
def remove_surface(File, surf_type, maskFile=None, outFile=None, ysub=None):
    start = time.time()
    atr = readfile.read_attribute(File)

    # Output File Name
    if not outFile:
        outFile = os.path.splitext(
            File)[0] + '_' + surf_type + os.path.splitext(File)[1]

    if maskFile:
        Mask = readfile.read(maskFile)[0]
        print 'read mask file: ' + maskFile
    else:
        Mask = np.ones((int(atr['FILE_LENGTH']), int(atr['WIDTH'])))
        print 'use mask of the whole area'

    ##### Input File Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print 'Input file is ' + k
    print 'remove ramp type: ' + surf_type

    ## Multiple Datasets File
    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5flat = h5py.File(outFile, 'w')
        group = h5flat.create_group(k)
        print 'writing >>> ' + outFile

    if k in ['timeseries']:
        print 'number of acquisitions: ' + str(len(epochList))
        for i in range(epoch_num):
            epoch = epochList[i]
            data = h5file[k].get(epoch)[:]

            if not ysub:
                data_n, ramp = remove_data_surface(data, Mask, surf_type)
            else:
                data_n = remove_data_multiple_surface(data, Mask, surf_type,
                                                      ysub)

            dset = group.create_dataset(epoch, data=data_n, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)
        for key, value in h5file[k].attrs.iteritems():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print 'number of interferograms: ' + str(len(epochList))
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epoch_num):
            epoch = epochList[i]
            data = h5file[k][epoch].get(epoch)[:]

            if not ysub:
                data_n, ramp = remove_data_surface(data, Mask, surf_type)
            else:
                data_n = remove_data_multiple_surface(data, Mask, surf_type,
                                                      ysub)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data_n, compression='gzip')
            for key, value in h5file[k][epoch].attrs.iteritems():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

    ## Single Dataset File
    else:
        data, atr = readfile.read(File)
        print 'Removing ' + surf_type + ' from ' + k

        if not ysub:
            data_n, ramp = remove_data_surface(data, Mask, surf_type)
        else:
            data_n = remove_data_multiple_surface(data, Mask, surf_type, ysub)

        print 'writing >>> ' + outFile
        writefile.write(data_n, atr, outFile)

    try:
        h5file.close()
        h5flat.close()
        prog_bar.close()
    except:
        pass

    print 'Remove ' + surf_type + ' took ' + str(time.time() - start) + ' secs'
    return outFile
コード例 #9
0
ファイル: geocode.py プロジェクト: zyh900908/PySAR
def geocode_file_with_geo_lookup_table(fname,
                                       lookup_file=None,
                                       interp_method='nearest',
                                       fname_out=None):
    '''Geocode file using ROI_PAC/Gamma lookup table file.
    Inputs:
        fname         : string, file to be geocoded
        lookup_file   : string, optional, lookup table file genereated by ROIPAC or Gamma
                        i.e. geomap_4rlks.trans           from ROI_PAC
                             sim_150911-150922.UTM_TO_RDC from Gamma
        interp_method : string, optional, interpolation/resampling method, supporting nearest, linear, cubic
        fname_out : string, optional, output geocoded filename
    Output:
        fname_out

    A faster way is as below:
    https://stackoverflow.com/questions/20915502/speedup-scipy-griddata-for-multiple-interpolations-between-two-irregular-grids
    '''
    atr_rdr = readfile.read_attribute(fname)
    if not fname_out:
        fname_out = 'geo_' + fname

    # Default values:
    if not lookup_file:
        if atr_rdr['INSAR_PROCESSOR'] == 'roipac':
            lookup_file = ['geomap*lks_tight.trans', 'geomap*lks.trans']
        elif atr_rdr['INSAR_PROCESSOR'] == 'gamma':
            lookup_file = ['sim*_tight.UTM_TO_RDC', 'sim*.UTM_TO_RDC']

    # Check lookup table file
    try:
        lookup_file = ut.get_file_list(lookup_file)[0]
    except:
        lookup_file = None
    if not lookup_file:
        sys.exit(
            'ERROR: No lookup table file found! Can not geocoded without it.')

    ##### 1. Get Y/X coordinates in radar file
    print '------------------------------------------------------'
    print 'geocoding file: ' + fname
    print 'getting Y/X coordinates from file in radar coordinates'
    len_rdr = int(atr_rdr['FILE_LENGTH'])
    wid_rdr = int(atr_rdr['WIDTH'])
    yy, xx = np.mgrid[0:len_rdr - 1:len_rdr * 1j, 0:wid_rdr - 1:wid_rdr * 1j]
    yx_rdr = np.hstack((yy.reshape(-1, 1), xx.reshape(-1, 1)))

    ##### 2. Get Y/X coordinates in geo*trans file
    print 'reading ' + lookup_file
    rg, az, atr_lut = readfile.read(lookup_file)
    len_geo = int(atr_lut['FILE_LENGTH'])
    wid_geo = int(atr_lut['WIDTH'])

    # adjustment if input radar file has been subseted.
    if 'subset_x0' in atr_rdr.keys():
        x0 = float(atr_rdr['subset_x0'])
        y0 = float(atr_rdr['subset_y0'])
        rg -= x0
        az -= y0
        print '\tinput radar coord file has been subsetted, adjust value read from lookup table file'

    # extract pixels only available in radar file (get ride of invalid corners)
    az = az.flatten()
    rg = rg.flatten()
    idx = (az > 0.0) * (az <= len_rdr) * (rg > 0.0) * (rg <= wid_rdr)
    yx_geo = np.hstack((az[idx].reshape(-1, 1), rg[idx].reshape(-1, 1)))

    print 'interpolation method: ' + interp_method
    k = atr_rdr['FILE_TYPE']

    ##### Multiple Dataset File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(fname, 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)
        print 'writing >>> ' + fname_out

        if k == 'timeseries':
            print 'number of acquisitions: ' + str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:].flatten()

                data_geo = np.zeros(len_geo * wid_geo, dtype=data.dtype)
                data_geo[idx] = griddata(yx_rdr,
                                         data,
                                         yx_geo,
                                         method=interp_method)

                dset = group.create_dataset(date,
                                            data=data_geo.reshape(
                                                (len_geo, wid_geo)),
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=date)
            prog_bar.close()

            print 'update attributes'
            atr = geocode_attribute_with_geo_lookup_table(atr_rdr, atr_lut)
            for key, value in atr.iteritems():
                group.attrs[key] = value

        elif k in ['interferograms', 'wrapped', 'coherence']:
            print 'number of interferograms: ' + str(epoch_num)
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:].flatten()

                data_geo = np.zeros(len_geo * wid_geo, dtype=data.dtype)
                data_geo[idx] = griddata(yx_rdr,
                                         data,
                                         yx_geo,
                                         method=interp_method)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram,
                                         data=data_geo.reshape(
                                             (len_geo, wid_geo)),
                                         compression='gzip')
                atr = geocode_attribute_with_geo_lookup_table(
                    h5[k][ifgram].attrs, atr_lut, print_message=False)
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print 'reading ' + fname
        data = readfile.read(fname)[0].flatten()
        print 'geocoding'
        data_geo = np.zeros(len_geo * wid_geo, dtype=data.dtype)
        data_geo[idx] = griddata(yx_rdr, data, yx_geo, method=interp_method)
        print 'update attributes'
        atr = geocode_attribute_with_geo_lookup_table(atr_rdr, atr_lut)
        print 'writing >>> ' + fname_out
        writefile.write(data_geo.reshape((len_geo, wid_geo)), atr, fname_out)

    return fname_out
コード例 #10
0
def main(argv):
    inps = cmdLineParse()
    suffix = '_demErr'
    if not inps.outfile:
        inps.outfile = os.path.splitext(
            inps.timeseries_file)[0] + suffix + os.path.splitext(
                inps.timeseries_file)[1]

    # 1. template_file
    if inps.template_file:
        print 'read option from template file: ' + inps.template_file
        inps = read_template2inps(inps.template_file, inps)

    # Read Time Series
    print "loading time series: " + inps.timeseries_file
    atr = readfile.read_attribute(inps.timeseries_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    h5 = h5py.File(inps.timeseries_file)
    date_list = sorted(h5['timeseries'].keys())
    date_num = len(date_list)
    print 'number of acquisitions: ' + str(date_num)

    # Exclude date info
    #inps.ex_date = ['20070115','20100310']
    if inps.ex_date:
        inps = get_exclude_date(inps, date_list)
        if inps.ex_date:
            inps.ex_flag = np.array([i not in inps.ex_date for i in date_list])

    timeseries = np.zeros((len(date_list), length * width), np.float32)
    prog_bar = ptime.progress_bar(maxValue=date_num, prefix='loading: ')
    for i in range(date_num):
        date = date_list[i]
        d = h5['timeseries'].get(date)[:]
        timeseries[i][:] = d.flatten('F')
        prog_bar.update(i + 1, suffix=date)
    del d
    h5.close()
    prog_bar.close()

    # Perpendicular Baseline
    print 'read perpendicular baseline'
    try:
        inps.pbase = ut.perp_baseline_timeseries(atr, dimension=0)
        if inps.pbase.shape[1] > 1:
            print '\tconsider P_BASELINE variation in azimuth direction'
        else:
            pbase = inps.pbase
    except:
        print '\tCannot find P_BASELINE_TIMESERIES from timeseries file.'
        print '\tTrying to calculate it from interferograms file'
        if inps.ifgram_file:
            inps.pbase = np.array(
                ut.perp_baseline_ifgram2timeseries(
                    inps.ifgram_file)[0]).reshape(date_num, 1)
        else:
            message = 'No interferogram file input!\n'+\
                      'Can not correct for DEM residula without perpendicular base info!'
            raise Exception(message)

    # Temporal Baseline
    print 'read temporal baseline'
    inps.tbase = np.array(ptime.date_list2tbase(date_list)[0]).reshape(
        date_num, 1)

    # Incidence angle (look angle in the paper)
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            print 'reading incidence angle from file: ' + inps.incidence_angle
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            try:
                inps.incidence_angle = np.array(float(inps.incidence_angle))
                print 'use input incidence angle : ' + str(
                    inps.incidence_angle)
            except:
                raise ValueError('Can not read input incidence angle: ' +
                                 str(inps.incidence_angle))
    else:
        print 'calculate incidence angle using attributes of time series file'
        if inps.pbase.shape[1] > 1:
            inps.incidence_angle = ut.incidence_angle(atr, dimension=2)
        else:
            inps.incidence_angle = ut.incidence_angle(atr, dimension=1)
    inps.incidence_angle *= np.pi / 180.0

    # Range distance
    if inps.range_dis:
        if os.path.isfile(inps.range_dis):
            print 'reading range distance from file: ' + inps.range_dis
            inps.range_dis = readfile.read(inps.range_dis)[0]
        else:
            try:
                inps.range_dis = np.array(float(inps.range_dis))
                print 'use input range distance : ' + str(inps.range_dis)
            except:
                raise ValueError('Can not read input incidence angle: ' +
                                 str(inps.range_dis))
    else:
        print 'calculate range distance using attributes from time series file'
        if inps.pbase.shape[1] > 1:
            inps.range_dis = ut.range_distance(atr, dimension=2)
        else:
            inps.range_dis = ut.range_distance(atr, dimension=1)

    # Design matrix - temporal deformation model using tbase
    print '-------------------------------------------------'
    if inps.phase_velocity:
        print 'using phase velocity history'
        A1 = np.ones((date_num - 1, 1))
        A2 = (inps.tbase[1:date_num] + inps.tbase[0:date_num - 1]) / 2.0
        A3 = (inps.tbase[1:date_num]**3 - inps.tbase[0:date_num - 1]**
              3) / np.diff(inps.tbase, axis=0) / 6.0
        #A3 = (inps.tbase[1:date_num]**2 + inps.tbase[1:date_num]*inps.tbase[0:date_num-1] +\
        #      inps.tbase[0:date_num-1]**2) / 6.0
    else:
        print 'using phase history'
        A1 = np.hstack((np.ones((date_num, 1)), inps.tbase))
        A2 = inps.tbase**2 / 2.0
        A3 = inps.tbase**3 / 6.0

    # Polynomial order of model
    print "temporal deformation model's polynomial order = " + str(
        inps.poly_order)
    if inps.poly_order == 1: A_def = A1
    elif inps.poly_order == 2: A_def = np.hstack((A1, A2))
    elif inps.poly_order == 3: A_def = np.hstack((A1, A2, A3))

    # step function
    if inps.step_date:
        print "temporal deformation model's step function step at " + inps.step_date
        step_yy = ptime.yyyymmdd2years(inps.step_date)
        yy_list = ptime.yyyymmdd2years(date_list)
        flag_array = np.array(yy_list) >= step_yy
        A_step = np.zeros((date_num, 1))
        A_step[flag_array] = 1.0
        A_def = np.hstack((A_def, A_step))

    # Heresh's original code for phase history approach
    #A_def = np.hstack((A2,A1,np.ones((date_num,1))))
    print '-------------------------------------------------'

    ##---------------------------------------- Loop for L2-norm inversion  -----------------------------------##
    delta_z_mat = np.zeros([length, width], dtype=np.float32)
    resid_n = np.zeros([A_def.shape[0], length * width], dtype=np.float32)
    constC = np.zeros([length, width], dtype=np.float32)
    #delta_a_mat = np.zeros([length, width])
    if inps.incidence_angle.ndim == 2 and inps.range_dis.ndim == 2:
        print 'inversing using L2-norm minimization (unweighted least squares)'\
              ' pixel by pixel: %d loops in total' % (length*width)
        prog_bar = ptime.progress_bar(maxValue=length * width,
                                      prefix='calculating: ')
        for i in range(length * width):
            row = i % length
            col = i / length
            range_dis = inps.range_dis[row, col]
            inc_angle = inps.incidence_angle[row, col]
            # Consider P_BASELINE variation within one interferogram
            if inps.pbase.shape[1] > 1:
                pbase = inps.pbase[:, row].reshape(date_num, 1)

            # Design matrix - DEM error using pbase, range distance and incidence angle
            A_delta_z = pbase / (range_dis * np.sin(inc_angle))
            if inps.phase_velocity:
                pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
                A_delta_z_v = pbase_v / (range_dis * np.sin(inc_angle))
                A = np.hstack((A_delta_z_v, A_def))
            else:
                A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

            # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
            ts_dis = timeseries[:, i]
            if inps.phase_velocity:
                ts_dis = np.diff(ts_dis, axis=0) / np.diff(inps.tbase, axis=0)

            if inps.ex_date:
                X = np.dot(A_inv, ts_dis[inps.ex_flag])
            else:
                X = np.dot(A_inv, ts_dis)

            # Residual vector n
            resid_n[:, i] = ts_dis - np.dot(A, X)

            # Update DEM error / timeseries matrix
            delta_z = X[0]
            delta_z_mat[row, col] = delta_z
            if inps.update_timeseries:
                timeseries[:, i] -= np.dot(A_delta_z, delta_z).flatten()
            prog_bar.update(i + 1, every=length * width / 100)
        prog_bar.close()

    elif inps.incidence_angle.ndim == 1 and inps.range_dis.ndim == 1:
        print 'inversing using L2-norm minimization (unweighted least squares)'\
              ' column by column: %d loops in total' % (width)
        prog_bar = ptime.progress_bar(maxValue=width, prefix='calculating: ')
        for i in range(width):
            range_dis = inps.range_dis[i]
            inc_angle = inps.incidence_angle[i]

            # Design matrix - DEM error using pbase, range distance and incidence angle
            A_delta_z = pbase / (range_dis * np.sin(inc_angle))
            if inps.phase_velocity:
                pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
                A_delta_z_v = pbase_v / (range_dis * np.sin(inc_angle))
                A = np.hstack((A_delta_z_v, A_def))
            else:
                A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

            # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
            ts_dis = timeseries[:, i * length:(i + 1) * length]
            if inps.phase_velocity:
                ts_dis = np.diff(ts_dis, axis=0) / np.diff(inps.tbase, axis=0)

            if inps.ex_date:
                X = np.dot(A_inv, ts_dis[inps.ex_flag, :])
            else:
                X = np.dot(A_inv, ts_dis)

            # Residual vector n
            resid_n[:, i * length:(i + 1) * length] = ts_dis - np.dot(A, X)
            constC[:, i] = X[1].reshape((1, length))

            # Update DEM error / timeseries matrix
            delta_z = X[0].reshape((1, length))
            delta_z_mat[:, i] = delta_z
            if inps.update_timeseries:
                timeseries[:, i * length:(i + 1) * length] -= np.dot(
                    A_delta_z, delta_z)
            prog_bar.update(i + 1, every=width / 100)
        prog_bar.close()

    elif inps.incidence_angle.ndim == 0 and inps.range_dis.ndim == 0:
        print 'inversing using L2-norm minimization (unweighted least squares) for the whole area'

        # Design matrix - DEM error using pbase, range distance and incidence angle
        A_delta_z = pbase / (inps.range_dis * np.sin(inps.incidence_angle))
        if inps.phase_velocity:
            pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
            A_delta_z_v = pbase_v / (inps.range_dis *
                                     np.sin(inps.incidence_angle))
            A = np.hstack((A_delta_z_v, A_def))
        else:
            A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

        # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
        if inps.phase_velocity:
            timeseries = np.diff(timeseries, axis=0) / np.diff(inps.tbase,
                                                               axis=0)

        if inps.ex_date:
            X = np.dot(A_inv, timeseries[inps.ex_flag, :])
        else:
            X = np.dot(A_inv, timeseries)

        # Residual vector n
        resid_n = ts_dis - np.dot(A, X)

        # Update DEM error / timeseries matrix
        delta_z_mat = X[0].reshape((1, length * width))
        if inps.update_timeseries:
            timeseries -= np.dot(A_delta_z, delta_z_mat)
        delta_z_mat = np.reshape(delta_z_mat, [length, width], order='F')

    else:
        print 'ERROR: Script only support same dimension for both incidence angle and range distance matrix.'
        print 'dimension of incidence angle: ' + str(inps.incidence_angle.ndim)
        print 'dimension of range distance: ' + str(inps.range_dis.ndim)
        sys.exit(1)

    ##------------------------------------------------ Output  --------------------------------------------##
    # DEM error file
    if 'Y_FIRST' in atr.keys():
        dem_error_file = 'demGeo_error.h5'
    else:
        dem_error_file = 'demRadar_error.h5'
    #if inps.phase_velocity:  suffix = '_pha_poly'+str(inps.poly_order)
    #else:                    suffix = '_vel_poly'+str(inps.poly_order)
    #dem_error_file = os.path.splitext(dem_error_file)[0]+suffix+os.path.splitext(dem_error_file)[1]
    print 'writing >>> ' + dem_error_file
    atr_dem_error = atr.copy()
    atr_dem_error['FILE_TYPE'] = 'dem'
    atr_dem_error['UNIT'] = 'm'
    writefile.write(delta_z_mat, atr_dem_error, dem_error_file)

    ## Phase Constant C = resid_n[0,:]
    #atrC = atr.copy()
    #atrC['FILE_TYPE'] = 'mask'
    #atrC['UNIT'] = 'm'
    #writefile.write(constC, atrC, 'constD.h5')

    ## Corrected DEM file
    #if inps.dem_file:
    #    inps.dem_outfile = os.path.splitext(inps.dem_file)[0]+suffix+os.path.splitext(inps.dem_file)[1]
    #    print '--------------------------------------'
    #    print 'writing >>> '+inps.dem_outfile
    #    dem, atr_dem = readfile.read(inps.dem_file)
    #    writefile.write(dem+delta_z_mat, atr_dem, inps.dem_outfile)

    #outfile = 'delta_acc.h5'
    #print 'writing >>> '+outfile
    #atr_dem_error = atr.copy()
    #atr_dem_error['FILE_TYPE'] = 'velocity'
    #atr_dem_error['UNIT'] = 'm/s'
    #writefile.write(delta_a_mat, atr_dem_error, outfile)
    #print '**************************************'

    # Corrected Time Series
    if inps.update_timeseries:
        print 'writing >>> ' + inps.outfile
        print 'number of dates: ' + str(len(date_list))
        h5out = h5py.File(inps.outfile, 'w')
        group = h5out.create_group('timeseries')
        prog_bar = ptime.progress_bar(maxValue=date_num, prefix='writing: ')
        for i in range(date_num):
            date = date_list[i]
            d = np.reshape(timeseries[i][:], [length, width], order='F')
            dset = group.create_dataset(date, data=d, compression='gzip')
            prog_bar.update(i + 1, suffix=date)
        prog_bar.close()
        for key, value in atr.iteritems():
            group.attrs[key] = value
        h5out.close()

    outFile = os.path.splitext(inps.outfile)[0] + 'InvResid.h5'
    print 'writing >>> ' + outFile
    print 'number of dates: ' + str(A_def.shape[0])
    h5out = h5py.File(outFile, 'w')
    group = h5out.create_group('timeseries')
    prog_bar = ptime.progress_bar(maxValue=A_def.shape[0], prefix='writing: ')
    for i in range(A_def.shape[0]):
        date = date_list[i]
        d = np.reshape(resid_n[i][:], [length, width], order='F')
        dset = group.create_dataset(date, data=d, compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    # Attribute
    for key, value in atr.iteritems():
        group.attrs[key] = value
    if A_def.shape[0] == date_num:
        group.attrs['UNIT'] = 'm'
    else:
        group.attrs['UNIT'] = 'm/yr'
    h5out.close()

    return
コード例 #11
0
ファイル: tropcor_GACOS.py プロジェクト: louisemaubant/PySAR
def main(argv):
    inps = cmdLineParse()

    if inps.timeseries_file:
        inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
        atr = readfile.read_attribute(inps.timeseries_file)
        k = atr['FILE_TYPE']
        if 'ref_y' not in atr.keys() and inps.ref_yx:
            print 'No reference info found in input file, use input ref_yx: ' + str(
                inps.ref_yx)
            atr['ref_y'] = inps.ref_yx[0]
            atr['ref_x'] = inps.ref_yx[1]

    #****reading incidence angle file***/
    if os.path.isfile(inps.inc_angle):
        inps.inc_angle = readfile.read(inps.inc_angle,
                                       epoch='incidenceAngle')[0]
        inps.inc_angle = np.nan_to_num(inps.inc_angle)
    else:
        inps.inps.inc_angle = float(inps.inc_angle)
        print 'incidence angle: ' + str(inps.inc_angle)
    cinc = np.cos(inps.inc_angle * np.pi / 180.0)

    #****look up file****/
    if inps.lookup_file:
        inps.lookup_file = ut.get_file_list(
            [inps.lookup_file])[0]  #'geomap_32rlks_tight.trans'

    #****GACOS****/
    delay_source = 'GACOS'
    # Get weather directory
    if not inps.GACOS_dir:
        if inps.timeseries_file:
            inps.GACOS_dir = os.path.dirname(
                os.path.abspath(inps.timeseries_file)) + '/../WEATHER/GACOS'
        elif inps.lookup_file:
            inps.GACOS_dir = os.path.dirname(os.path.abspath(
                inps.lookup_file)) + '/../WEATHER/GACOS'
        else:
            inps.GACOS_dir = os.path.abspath(os.getcwd())

    print 'Store weather data into directory: ' + inps.GACOS_dir

    #source_dir=os.path.dirname(os.path.abspath('timeseries_file'))+'/Agung/GACOS/data';print source_dir
    #os.makedirs(GACOS_dir)  -----------------------------------------------add part to copy/download weather data------#
    #----get date list-----#
    if not inps.date_list_file:
        print 'read date list info from: ' + inps.timeseries_file
        h5 = h5py.File(inps.timeseries_file, 'r')
        if 'timeseries' in h5.keys():
            date_list = sorted(h5[k].keys())
        elif k in ['interferograms', 'coherence', 'wrapped']:
            ifgram_list = sorted(h5[k].keys())
            date12_list = ptime.list_ifgram2date12(ifgram_list)
            m_dates = [i.split('-')[0] for i in date12_list]
            s_dates = [i.split('-')[1] for i in date12_list]
            date_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
        else:
            raise ValueError('Un-support input file type:' + k)
        h5.close()
    else:
        date_list = ptime.yyyymmdd(
            np.loadtxt(inps.date_list_file, dtype=str, usecols=(0, )).tolist())
        print 'read date list info from: ' + inps.date_list_file

    #****cheacking availability of delays****/
    print 'checking availability of delays'
    delay_file_list = []
    for d in date_list:
        if delay_source == 'GACOS':
            delay_file = inps.GACOS_dir + '/' + d + '.ztd'
        delay_file_list.append(delay_file)
    delay_file_existed = ut.get_file_list(delay_file_list)

    if len(delay_file_existed) == len(date_list):
        print 'no missing files'
    else:
        print 'no. of date files found:', len(delay_file_existed)
        print 'no. of dates:', len(date_list)

    #*****Calculating delays***/
    print 'calculating delays'

    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    #initialise delay files
    date_num = len(date_list)
    trop_ts = np.zeros((date_num, length, width), np.float32)

    #reading wrf files for each epoch and getting delay
    for i in range(date_num):
        delay_file = delay_file_existed[i]
        date = date_list[i]
        print 'calculating delay for date', date
        trop_ts[i] = get_delay(delay_file, atr, inps.lookup_file, cinc)

    print 'Delays Calculated'
    # Convert relative phase delay on reference date
    try:
        ref_date = atr['ref_date']
    except:
        ref_date = date_list[0]
    print 'convert to relative phase delay with reference date: ' + ref_date
    ref_idx = date_list.index(ref_date)
    trop_ts -= np.tile(trop_ts[ref_idx, :, :], (date_num, 1, 1))

    ## Write tropospheric delay to HDF5
    tropFile = 'GACOSdelays' + '.h5'
    print 'writing >>> %s' % (tropFile)
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')
    print 'number of acquisitions: ' + str(date_num)
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        group_trop.create_dataset(date, data=trop_ts[i], compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()

    # Write Attributes
    for key, value in atr.iteritems():
        group_trop.attrs[key] = value
    h5trop.close()

    ## Write corrected Time series to HDF5
    if k == 'timeseries':
        if not inps.out_file:
            inps.out_file = os.path.splitext(
                inps.timeseries_file)[0] + '_' + 'GACOS' + '.h5'
        print 'writing trop corrected timeseries file %s' % (inps.out_file)
        h5ts = h5py.File(inps.timeseries_file, 'r')
        h5tsCor = h5py.File(inps.out_file, 'w')
        group_tsCor = h5tsCor.create_group('timeseries')
        print 'number of acquisitions: ' + str(date_num)
        prog_bar = ptime.progress_bar(maxValue=date_num)
        for i in range(date_num):
            date = date_list[i]
            print date
            ts = h5ts['timeseries'].get(date)[:]
            group_tsCor.create_dataset(date,
                                       data=ts - trop_ts[i],
                                       compression='gzip')
            prog_bar.update(i + 1, suffix=date)
        prog_bar.close()
        h5ts.close()
        # Write Attributes
        for key, value in atr.iteritems():
            group_tsCor.attrs[key] = value
        h5tsCor.close()
        print 'delays written to %s' % (inps.out_file)

    print 'finished'
    return inps.out_file
コード例 #12
0
def main(argv):
    inps = cmdLineParse()
    suffix = '_demErr'
    if not inps.outfile:
        inps.outfile = os.path.splitext(inps.timeseries_file)[0]+suffix+os.path.splitext(inps.timeseries_file)[1]
    if inps.template_file:
        print 'read option from template file: '+inps.template_file
        inps = read_template2inps(inps.template_file, inps)

    ##### Read Data
    atr = readfile.read_attribute(inps.timeseries_file)
    coordType = 'radar'
    if 'Y_FIRST' in atr.keys():
        coordType = 'geo'

    # 1. Incidence angle
    try:
        inps.inc_angle_file = ut.get_file_list(inps.inc_angle_file, coord=coordType)[0]
    except ValueError:
        print 'No incidence angle file found!\nRun incidence_angle.py to generate it.'
    print 'read incidence angle from file: '+str(inps.inc_angle_file)
    inps.inc_angle = readfile.read(inps.inc_angle_file, epoch='incidenceAngle')[0].flatten()
    inps.inc_angle *= np.pi/180.0

    # 2. Slant Range distance
    try:
        inps.range_dist_file = ut.get_file_list(inps.range_dist_file, coord=coordType)[0]
    except ValueError:
        print 'No range distance file found!\nRun range_distance.py to generate it.'
    print 'read slant range distance from file: '+str(inps.range_dist_file)
    inps.range_dist = readfile.read(inps.range_dist_file, epoch='slantRangeDistance')[0].flatten()

    # 3. Perp Baseline - 1D in time, 0D/1D in space (azimuth)
    print 'read perpendicular baseline'
    try:
        inps.pbase = ut.perp_baseline_timeseries(atr, dimension=1)
        if inps.pbase.shape[1] > 1:
            print 'consider perp baseline variance in azimuth direction'
    except valueError:
        print 'No P_BASELINE_TIMESERIES found in timeseries file.\n'+\
              'Can not correct for DEM residula without it!'

    # 4. Time Series - 1D in time, 1D in space (flattened)
    print "read time series file: " + inps.timeseries_file
    h5 = h5py.File(inps.timeseries_file)
    date_list = sorted(h5['timeseries'].keys())
    date_num = len(date_list)
    inps.tbase = np.array(ptime.date_list2tbase(date_list)[0]).reshape(-1,1)

    #Mark dates used in the estimation
    inps.ex_date = check_exclude_date(inps.ex_date, date_list)
    inps.date_flag = np.array([i not in inps.ex_date for i in date_list], dtype=np.bool_)
    if inps.poly_order > np.sum(inps.date_flag):
        raise ValueError("ERROR: input polynomial order=%d is larger than number of acquisition=%d used in estimation!" %\
                         (inps.poly_order, np.sum(inps.date_flag)))

    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    pixel_num = length*width
    timeseries = np.zeros((date_num, pixel_num),np.float32)
    for i in range(date_num):
        timeseries[i] = h5['timeseries'].get(date_list[i])[:].flatten()
        sys.stdout.write('\rreading acquisition %3d/%3d ...' % (i+1, date_num))
        sys.stdout.flush()
    h5.close()
    print ''


    ##### Design matrix - temporal deformation model
    print '-------------------------------------------------'
    print 'Correct topographic phase residual using Fattahi and Amelung (2013, IEEE-TGRS)'
    msg = 'minimum-norm constrain on: phase'
    if inps.phase_velocity:
        msg += ' velocity'
    print msg

    # Heresh's original code for phase history approach
    #A1 = np.hstack((np.ones((date_num, 1)), inps.tbase))
    #A2 = inps.tbase**2 / 2.0
    #A_def = np.hstack((A2,A1,np.ones((date_num,1))))

    # 1. Polynomial - 2D matrix in size of (date_num, polyOrder+1)
    print "temporal deformation model: polynomial order = "+str(inps.poly_order)
    A_def = np.ones((date_num, 1), np.float32)
    for i in range(inps.poly_order):
        Ai = inps.tbase**(i+1) / gamma(i+2)
        Ai = np.array(Ai, np.float32).reshape(-1,1)
        A_def = np.hstack((A_def, Ai))

    # 2. Step function - 2D matrix in size of (date_num, stepNum)
    if inps.step_date:
        print "temporal deformation model: step functions at "+str(inps.step_date)
        yySteps = ptime.yyyymmdd2years(inps.step_date)
        yyList = np.array(ptime.yyyymmdd2years(date_list)).reshape(-1,1)
        for yyStep in yySteps:
            Ai = yyList > yyStep
            Ai = np.array(Ai, np.float32).reshape(-1,1)
            A_def = np.hstack((A_def, Ai))
    inps.step_num = len(inps.step_date)

    print '-------------------------------------------------'


    ##---------------------------------------- Loop for L2-norm inversion  -----------------------------------##
    ## Output estimated steps 
    print 'ordinal least squares (OLS) inversion using L2-norm minimization'
    timeseriesCor = np.zeros((date_num, pixel_num), dtype=np.float32)
    timeseriesRes = np.zeros((date_num, pixel_num), dtype=np.float32)
    topoRes = np.zeros(pixel_num, dtype=np.float32)
    constC  = np.zeros(pixel_num, dtype=np.float32)
    if inps.step_num > 0:
        stepModel = np.zeros((inps.step_num, pixel_num), dtype=np.float32)

    print 'skip pixels with zero/nan value in geometry files - incidence angle and range distance'
    mask = np.multiply(~np.isnan(inps.inc_angle), ~np.isnan(inps.range_dist))
    mask[inps.inc_angle == 0.] = 0
    mask[inps.range_dist == 0.] = 0
    pixel_num2inv = np.sum(mask)
    pixel_idx2inv = np.where(mask)[0]
    print 'number of pixels in the file: %d' % (pixel_num)
    print 'number of pixels to  inverse: %d' % (pixel_num2inv)

    if inps.pbase.shape[1] == 1:
        pbase = inps.pbase
    prog_bar = ptime.progress_bar(maxValue=pixel_num)
    for i in range(pixel_num2inv):
        prog_bar.update(i+1, every=1000, suffix='%s/%s pixels'%(str(i+1), str(pixel_num2inv)))
        idx = pixel_idx2inv[i]

        r = inps.range_dist[idx]
        inc_angle = inps.inc_angle[idx]
        if inps.pbase.shape[1] > 1:
            pbase = inps.pbase[:, int(idx/width)].reshape(-1,1)
        A_deltaZ = pbase / (r * np.sin(inc_angle))

        A = np.hstack((A_deltaZ, A_def))
        ts = timeseries[:,idx].reshape(date_num,-1)
        deltaZ, tsCor, tsRes, stepEst = topographic_residual_inversion(ts, A, inps)
        topoRes[idx:idx+1] = deltaZ
        timeseriesCor[:,idx:idx+1] = tsCor
        timeseriesRes[:,idx:idx+1] = tsRes
        if inps.step_num > 0:
            stepModel[:,idx:idx+1] = stepEst
    prog_bar.close()


    ##------------------------------------------------ Output  --------------------------------------------##
    # 1. DEM error file
    if 'Y_FIRST' in atr.keys():
        deltaZFile = 'demGeo_error.h5'
    else:
        deltaZFile = 'demRadar_error.h5'
    print 'writing >>> '+deltaZFile
    atrDeltaZ = atr.copy()
    atrDeltaZ['FILE_TYPE'] = 'dem'
    atrDeltaZ['UNIT'] = 'm'
    writefile.write(topoRes.reshape(length, width), atrDeltaZ, deltaZFile)

    # 2. Topo Residual Corrected Time Series
    print 'writing >>> '+inps.outfile
    h5 = h5py.File(inps.outfile,'w')
    group = h5.create_group('timeseries')
    for i in range(date_num):
        sys.stdout.write('\rwriting acquisition %3d/%3d ...' % (i+1, date_num))
        sys.stdout.flush()
        dset = group.create_dataset(date_list[i], data=timeseriesCor[i].reshape(length, width), compression='gzip')
    print ''
    for key,value in atr.iteritems():
        group.attrs[key] = value
    h5.close()

    # 3. Inversion residual Time Series
    tsResFile = os.path.join(os.path.dirname(inps.outfile), 'timeseriesResidual.h5')
    print 'writing >>> '+os.path.basename(tsResFile)
    h5 = h5py.File(tsResFile,'w')
    group = h5.create_group('timeseries')
    for i in range(date_num):
        sys.stdout.write('\rwriting acquisition %3d/%3d ...' % (i+1, date_num))
        sys.stdout.flush()
        dset = group.create_dataset(date_list[i], data=timeseriesRes[i].reshape(length, width), compression='gzip')
    print ''
    # Attribute
    for key,value in atr.iteritems():
        group.attrs[key] = value
    h5.close()

    # 4. Step temporal Model estimation
    if inps.step_num > 0:
        stepFile = os.path.join(os.path.dirname(inps.outfile), 'timeseriesStepModel.h5')
        print 'writing >>> '+os.path.basename(stepFile)
        h5 = h5py.File(stepFile,'w')
        group = h5.create_group('timeseries')
        for i in range(inps.step_num):
            sys.stdout.write('\rwriting acquisition %3d/%3d ...' % (i+1, inps.step_num))
            sys.stdout.flush()
            dset = group.create_dataset(inps.step_date[i], data=stepModel[i].reshape(length, width), compression='gzip')
        print ''
        # Attribute
        for key,value in atr.iteritems():
            group.attrs[key] = value
        group.attrs.pop('ref_date')
        h5.close()

    print 'Done.'
    return
コード例 #13
0
def ifgram_inversion_patch(ifgramFile, coherenceFile, meta, box=None):
    '''
    Inputs:
        ifgramFile    - string, interferograms hdf5 file
        coherenceFile - string, coherence hdf5 file
        box           - 4-tuple, left, upper, right, and lower pixel coordinate of area of interest
        meta          - dict, including the following attributes:

                        #Interferograms
                        length/width - int, file size for each interferogram
                        ifgram_list  - list of string, interferogram dataset name
                        date12_list  - list of string, YYMMDD-YYMMDD
                        ref_value    - np.array in size of (ifgram_num, 1)
                                       reference pixel coordinate in row/column number
                        ref_y/x      - int, reference pixel coordinate in row/column number

                        #Time-series
                        date8_list   - list of string in YYYYMMDD
                        tbase_diff   - np.array in size of (date_num-1, 1), differential temporal baseline

                        #Inversion
                        weight_function   - no, fim, var, coh
    Outputs:
        ts       - 3D np.array in size of (date_num, row_num, col_num)
        temp_coh - 2D np.array in size of (row_num, col_num)
        tsStd    - 3D np.array in size of (date_num, row_num, col_num)
    '''

    ##### Get patch size/index
    if not box:
        box = (0,0,meta['width'],meta['length'])
    c0,r0,c1,r1 = box
    print 'processing %8d/%d lines ...' % (r1, meta['length'])

    ## Initiate output data matrixs
    row_num = r1-r0
    col_num = c1-c0
    pixel_num = row_num * col_num
    date_num = len(meta['date8_list'])
    ts = np.zeros((date_num, pixel_num), np.float32)
    tsStd = np.zeros((date_num, pixel_num), np.float32)
    temp_coh = np.zeros(pixel_num, np.float32)

    ##### Mask for pixels to invert
    mask = np.ones(pixel_num, np.bool_)
    ## 1 - Water Mask
    if meta['water_mask_file']:
        print 'skip pixels on water with mask from file: %s' % (os.path.basename(meta['water_mask_file']))
        try:    waterMask = readfile.read(meta['water_mask_file'], epoch='waterMask')[0][r0:r1,c0:c1].flatten()
        except: waterMask = readfile.read(meta['water_mask_file'], epoch='mask')[0][r0:r1,c0:c1].flatten()
        mask *= np.array(waterMask, np.bool_)

    ## 2 - Mask for Zero Phase in ALL ifgrams
    print 'skip pixels with zero/nan value in all interferograms'
    ifgram_stack = ut.get_file_stack(ifgramFile)[r0:r1,c0:c1].flatten()
    mask *= ~np.isnan(ifgram_stack)
    mask *= ifgram_stack != 0.

    ## Invert pixels on mask 1+2
    pixel_num2inv = np.sum(mask)
    pixel_idx2inv = np.where(mask)[0]
    print 'number of pixels to invert: %s out of %s' % (pixel_num2inv, pixel_num)
    if pixel_num2inv < 1:
        ts = ts.reshape(date_num, row_num, col_num)
        temp_coh = temp_coh.reshape(row_num, col_num)
        tsStd = tsStd.reshape(date_num, row_num, col_num)
        return ts, temp_coh, tsStd

    ##### Read interferograms
    ifgram_num = len(meta['ifgram_list'])
    ifgram_data = np.zeros((ifgram_num, pixel_num), np.float32)
    date12_list = meta['date12_list']

    if meta['skip_zero_phase']:
        print 'skip zero phase value (masked out and filled during phase unwrapping)'
    atr = readfile.read_attribute(ifgramFile)
    h5ifgram = h5py.File(ifgramFile,'r')
    for j in range(ifgram_num):
        ifgram = meta['ifgram_list'][j]
        d = h5ifgram['interferograms'][ifgram].get(ifgram)[r0:r1,c0:c1].flatten()
        if meta['skip_zero_phase']:
            d[d != 0.] -= meta['ref_value'][j]
        else:
            d -= meta['ref_value'][j]
        ifgram_data[j] = d
        sys.stdout.write('\rreading interferograms %s/%s ...' % (j+1, ifgram_num))
        sys.stdout.flush()
    print ' '
    h5ifgram.close()
    #ifgram_data -= meta['ref_value']

    ## 3 - Mask for Non-Zero Phase in ALL ifgrams (share one B in sbas inversion)
    maskAllNet = np.all(ifgram_data, axis=0)
    maskAllNet *= mask
    maskPartNet = mask ^ maskAllNet

    ##### Design matrix
    A,B = ut.design_matrix(ifgramFile, date12_list)
    try:    ref_date = str(np.loadtxt('reference_date.txt', dtype=str))
    except: ref_date = meta['date8_list'][0]
    #print 'calculate decorrelation noise covariance with reference date = %s' % (ref_date)
    refIdx = meta['date8_list'].index(ref_date)
    timeIdx = [i for i in range(date_num)]
    timeIdx.remove(refIdx)
    Astd = ut.design_matrix(ifgramFile, date12_list, referenceDate=ref_date)[0]

    ##### Inversion
    if meta['weight_function'] in ['no','uniform']:
        if np.sum(maskAllNet) > 0:
            print 'inverting pixels with valid phase in all     ifgrams with OLS (%.0f pixels) ...' % (np.sum(maskAllNet))
            ts1, tempCoh1 = network_inversion_sbas(B, ifgram_data[:,maskAllNet], meta['tbase_diff'], skipZeroPhase=False)
            ts[1:,maskAllNet] = ts1
            temp_coh[maskAllNet] = tempCoh1

        if np.sum(maskPartNet) > 0:
            print 'inverting pixels with valid phase in part of ifgrams with SVD ...'
            pixel_num2inv = np.sum(maskPartNet)
            pixel_idx2inv = np.where(maskPartNet)[0]
            prog_bar = ptime.progress_bar(maxValue=pixel_num2inv)
            for i in range(pixel_num2inv):
                idx = pixel_idx2inv[i]
                ts1, tempCoh1 = network_inversion_sbas(B, ifgram_data[:,idx], meta['tbase_diff'], meta['skip_zero_phase'])
                ts[1:, idx] = ts1.flatten()
                temp_coh[idx] = tempCoh1
                prog_bar.update(i+1, every=100, suffix=str(i+1)+'/'+str(pixel_num2inv)+' pixels')
            prog_bar.close()

    else:
        ##### Read coherence
        coh_data = np.zeros((ifgram_num, pixel_num), np.float32)
        h5coh = h5py.File(coherenceFile,'r')
        coh_list = sorted(h5coh['coherence'].keys())
        coh_list = ut.check_drop_ifgram(h5coh)
        for j in range(ifgram_num):
            ifgram = coh_list[j]
            d = h5coh['coherence'][ifgram].get(ifgram)[r0:r1,c0:c1]
            d[np.isnan(d)] = 0.
            coh_data[j] = d.flatten()
            sys.stdout.write('\rreading coherence %s/%s ...' % (j+1, ifgram_num))
            sys.stdout.flush()
        print ' '
        h5coh.close()

        ##### Calculate Weight matrix
        weight = np.array(coh_data, np.float64)
        L = int(atr['ALOOKS']) * int(atr['RLOOKS'])
        epsilon = 1e-4
        if meta['weight_function'].startswith('var'):
            print 'convert coherence to weight using inverse of phase variance'
            print '    with phase PDF for distributed scatterers from Tough et al. (1995)'
            weight = 1.0 / coherence2phase_variance_ds(weight, L, print_msg=True)

        elif meta['weight_function'].startswith(('lin','coh','cor')):
            print 'use coherence as weight directly (Perissin & Wang, 2012; Tong et al., 2016)'
            weight[weight < epsilon] = epsilon

        elif meta['weight_function'].startswith(('fim','fisher')):
            print 'convert coherence to weight using Fisher Information Index (Seymour & Cumming, 1994)'
            weight = coherence2fisher_info_index(weight, L)

        else:
            print 'Un-recognized weight function: %s' % meta['weight_function']
            sys.exit(-1)

        ##### Weighted Inversion pixel by pixel
        print 'inverting time series ...'
        prog_bar = ptime.progress_bar(maxValue=pixel_num2inv)
        for i in range(pixel_num2inv):
            idx = pixel_idx2inv[i]
            ts1, tempCoh1, tsStd1 = network_inversion_wls(A, ifgram_data[:,idx], weight[:,idx], Astd=Astd,\
                                                          skipZeroPhase=meta['skip_zero_phase'])
            ts[1:, idx] = ts1.flatten()
            temp_coh[idx] = tempCoh1
            tsStd[timeIdx, idx] = tsStd1.flatten()
            prog_bar.update(i+1, every=100, suffix=str(i+1)+'/'+str(pixel_num2inv)+' pixels')
        prog_bar.close()

    ts = ts.reshape(date_num, row_num, col_num)
    temp_coh = temp_coh.reshape(row_num, col_num)
    tsStd = tsStd.reshape(date_num, row_num, col_num)


    ##Write to temp hdf5 files for parallel processing
    if meta['parallel']:
        fname = meta['ftemp_base']+str(int(r0/meta['row_step']))+'.h5'
        print 'writing >>> '+fname
        h5temp = h5py.File(fname, 'w')
        group = h5temp.create_group('timeseries')
        dset = group.create_dataset('timeseries', shape=(date_num+1, row_num, col_num), dtype=np.float32)
        dset[0:-1,:,:] = ts
        dset[1,:,:] = temp_coh
        h5temp.close()
        return
    else:
        return ts, temp_coh, tsStd
コード例 #14
0
def main(argv):
    inps = cmdLineParse()

    ##### Check default input arguments
    # default output filename
    if not inps.outfile:
        inps.outfile = os.path.splitext(
            inps.timeseries_file)[0] + '_tropHgt.h5'

    # Basic info
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    pix_num = length * width

    # default DEM file
    if not inps.dem_file:
        if 'X_FIRST' in atr.keys():
            inps.dem_file = ['demGeo_tight.h5', 'demGeo.h5']
        else:
            inps.dem_file = ['demRadar.h5']
    try:
        inps.dem_file = ut.get_file_list(inps.dem_file)[0]
    except:
        inps.dem_file = None
        sys.exit('ERROR: No DEM file found!')

    # default Mask file
    if not inps.mask_file:
        if 'X_FIRST' in atr.keys():
            inps.mask_file = 'geo_maskTempCoh.h5'
        else:
            inps.mask_file = 'maskTempCoh.h5'
        if not os.path.isfile(inps.mask_file):
            inps.mask_file = None
            sys.exit('ERROR: No mask file found!')

    ##### Read Mask
    print 'reading mask from file: ' + inps.mask_file
    mask = readfile.read(inps.mask_file, epoch='mask')[0].flatten(1)
    ndx = mask != 0
    msk_num = np.sum(ndx)
    print 'total            pixel number: %d' % pix_num
    print 'estimating using pixel number: %d' % msk_num

    ##### Read DEM
    print 'read DEM from file: ' + inps.dem_file
    dem = readfile.read(inps.dem_file, epoch='height')[0]

    ref_y = int(atr['ref_y'])
    ref_x = int(atr['ref_x'])
    dem -= dem[ref_y, ref_x]

    print 'considering the incidence angle of each pixel ...'
    inc_angle = ut.incidence_angle(atr, dimension=2)
    dem *= 1.0 / np.cos(inc_angle * np.pi / 180.0)

    ##### Design matrix for elevation v.s. phase
    dem = dem.flatten(1)
    if inps.poly_order == 1:
        A = np.vstack((dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem, np.ones(pix_num))).T
    elif inps.poly_order == 2:
        A = np.vstack((dem[ndx]**2, dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem**2, dem, np.ones(pix_num))).T
    elif inps.poly_order == 3:
        A = np.vstack((dem[ndx]**3, dem[ndx]**2, dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem**3, dem**2, dem, np.ones(pix_num))).T
    print 'polynomial order: %d' % inps.poly_order

    A_inv = np.linalg.pinv(A)

    ##### Calculate correlation coefficient
    print 'Estimating the tropospheric effect between the differences of the subsequent epochs and DEM'

    h5 = h5py.File(inps.timeseries_file)
    date_list = sorted(h5[k].keys())
    date_num = len(date_list)
    print 'number of acquisitions: ' + str(date_num)
    try:
        ref_date = atr['ref_date']
    except:
        ref_date = date_list[0]

    print '----------------------------------------------------------'
    print 'correlation of DEM with each time-series epoch:'
    corr_array = np.zeros(date_num)
    par_dict = {}
    for i in range(date_num):
        date = date_list[i]
        if date == ref_date:
            cc = 0.0
            par = np.zeros(inps.poly_order + 1)
        else:
            data = h5[k].get(date)[:].flatten(1)

            C = np.zeros((2, msk_num))
            C[0, :] = dem[ndx]
            C[1, :] = data[ndx]
            cc = np.corrcoef(C)[0, 1]

            corr_array[i] = cc
            if inps.threshold and np.abs(cc) < inps.threshold:
                par = np.zeros(inps.poly_order + 1)
            else:
                par = np.dot(A_inv, data[ndx])
        print '%s: %.2f' % (date, cc)
        par_dict[date] = par

    average_phase_height_corr = np.nansum(np.abs(corr_array)) / (date_num - 1)
    print '----------------------------------------------------------'
    print 'Average Correlation of DEM with time-series epochs: %.2f' % average_phase_height_corr

    # Correlation of DEM with Difference of subsequent epochs (Not used for now)
    corr_diff_dict = {}
    par_diff_dict = {}
    for i in range(date_num - 1):
        date1 = date_list[i]
        date2 = date_list[i + 1]
        date12 = date1 + '-' + date2

        data1 = h5[k].get(date1)[:].flatten(1)
        data2 = h5[k].get(date2)[:].flatten(1)
        data_diff = data2 - data1

        C_diff = np.zeros((2, msk_num))
        C_diff[0, :] = dem[ndx]
        C_diff[1, :] = data_diff[ndx]
        cc_diff = np.corrcoef(C_diff)[0, 1]

        corr_diff_dict[date12] = cc_diff
        par = np.dot(A_inv, data_diff[ndx])
        par_diff_dict[date12] = par

    ##### Correct and write time-series file
    print '----------------------------------------------------------'
    print 'removing the stratified tropospheric delay from each epoch'
    print 'writing >>> ' + inps.outfile
    h5out = h5py.File(inps.outfile, 'w')
    group = h5out.create_group(k)

    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        data = h5[k].get(date)[:]

        if date != ref_date:
            par = par_dict[date]
            trop_delay = np.reshape(np.dot(B, par), [width, length]).T
            trop_delay -= trop_delay[ref_y, ref_x]
            data -= trop_delay

        dset = group.create_dataset(date, data=data, compression='gzip')
        prog_bar.update(i + 1, suffix=date)

    for key, value in atr.iteritems():
        group.attrs[key] = value

    prog_bar.close()
    h5out.close()
    h5.close()

    print 'Done.'
    return inps.outfile
コード例 #15
0
def main(argv):

    ##### Inputs
    try:
        ifgram_file = argv[0]
        timeseries_file = argv[1]
    except:
        usage(); sys.exit(1)
  
    try:    outfile = argv[2]
    except: outfile = 'reconstructed_'+ifgram_file

    atr = readfile.read_attribute(timeseries_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    ##### Read time-series file
    print 'loading timeseries ...'
    h5ts = h5py.File(timeseries_file, 'r')
    date_list = sorted(h5ts['timeseries'].keys())
    date_num = len(date_list)
    timeseries = np.zeros((date_num, length*width))

    print 'number of acquisitions: '+str(date_num)
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        d = h5ts['timeseries'].get(date)[:]
        timeseries[i,:] = d.flatten(0)
        prog_bar.update(i+1, suffix=date)
    prog_bar.close()
    h5ts.close()
    del d

    range2phase = -4*np.pi/float(atr['WAVELENGTH'])
    timeseries = range2phase*timeseries

    #####  Estimate interferograms from timeseries
    print 'estimating interferograms from timeseries using design matrix from input interferograms'
    A,B = ut.design_matrix(ifgram_file)
    p = -1*np.ones([A.shape[0],1])
    Ap = np.hstack((p,A))
    estData = np.dot(Ap, timeseries)
    del timeseries

    ##### Write interferograms file
    print 'writing >>> '+outfile
    h5 = h5py.File(ifgram_file,'r')
    ifgram_list = sorted(h5['interferograms'].keys())
    ifgram_num = len(ifgram_list)
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    
    h5out = h5py.File(outfile,'w')
    group = h5out.create_group('interferograms')

    print 'number of interferograms: '+str(ifgram_num)
    prog_bar = ptime.progress_bar(maxValue=ifgram_num)
    for i in range(ifgram_num):
        ifgram = ifgram_list[i]
        data = np.reshape(estData[i,:],(length, width))

        gg = group.create_group(ifgram)
        dset = gg.create_dataset(ifgram, data=data, compression='gzip')
        for key, value in h5['interferograms'][ifgram].attrs.iteritems():
            gg.attrs[key] = value
        prog_bar.update(i+1, suffix=date12_list[i])
    prog_bar.close()
    h5.close()
    h5out.close()
    print 'Done.'
    return outfile
コード例 #16
0
def seed_file_reference_value(File, outName, refList, ref_y='', ref_x=''):
    ## Seed Input File with reference value in refList
    print 'Reference value: '
    print refList

    #####  IO Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print 'file type: ' + k

    ##### Multiple Dataset File
    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        ##### Input File Info
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epochNum = len(epochList)

        ##### Check Epoch Number
        if not epochNum == len(refList):
            print '\nERROR: Reference value has different epoch number'+\
                  'from input file.'
            print 'Reference List epoch number: ' + str(refList)
            print 'Input file     epoch number: ' + str(epochNum)
            sys.exit(1)

        ##### Output File Info
        h5out = h5py.File(outName, 'w')
        group = h5out.create_group(k)
        print 'writing >>> ' + outName
        prog_bar = ptime.progress_bar(maxValue=epochNum, prefix='seeding: ')

    ## Loop
    if k == 'timeseries':
        print 'number of acquisitions: ' + str(epochNum)
        for i in range(epochNum):
            epoch = epochList[i]
            data = h5file[k].get(epoch)[:]
            data -= refList[i]
            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)
        atr = seed_attributes(atr, ref_x, ref_y)
        for key, value in atr.iteritems():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print 'number of interferograms: ' + str(epochNum)
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epochNum):
            epoch = epochList[i]
            #print epoch
            data = h5file[k][epoch].get(epoch)[:]
            atr = h5file[k][epoch].attrs

            data -= refList[i]
            atr = seed_attributes(atr, ref_x, ref_y)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr.iteritems():
                gg.attrs[key] = value

            prog_bar.update(i + 1, suffix=date12_list[i])

    ##### Single Dataset File
    else:
        print 'writing >>> ' + outName
        data, atr = readfile.read(File)
        data -= refList
        atr = seed_attributes(atr, ref_x, ref_y)
        writefile.write(data, atr, outName)

    ##### End & Cleaning
    try:
        prog_bar.close()
        h5file.close()
        h5out.close()
    except:
        pass

    return outName
コード例 #17
0
ファイル: add.py プロジェクト: louisemaubant/PySAR
def add_files(fname_list, fname_out=None):
    '''Generate sum of all input files
    Inputs:
        fname_list - list of string, path/name of input files to be added
        fname_out  - string, optional, path/name of output file
    Output:
        fname_out  - string, path/name of output file
    Example:
        'mask_all.h5' = add_file(['mask_1.h5','mask_2.h5','mask_3.h5'], 'mask_all.h5')
    '''
    # Default output file name
    ext = os.path.splitext(fname_list[0])[1]
    if not fname_out:
        fname_out = os.path.splitext(fname_list[0])[0]
        for i in range(1, len(fname_list)):
            fname_out += '_plus_' + os.path.splitext(
                os.path.basename(fname_list[i]))[0]
        fname_out += ext

    # Basic Info
    atr = readfile.read_attribute(fname_list[0])
    k = atr['FILE_TYPE']
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    print 'First input file is ' + atr['PROCESSOR'] + ' ' + k

    ## Multi-dataset/group file
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        # File Type Check
        for i in range(1, len(fname_list)):
            ki = readfile.read_attribute(fname_list[i])['FILE_TYPE']
            if (k in multi_dataset_hdf5_file and ki in multi_dataset_hdf5_file
                    or k in multi_group_hdf5_file
                    and ki in multi_group_hdf5_file):
                pass
            else:
                print 'Input files structure are not the same: ' + k + ' v.s. ' + ki
                sys.exit(1)

        print 'writing >>> ' + fname_out
        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)

        h5 = h5py.File(fname_list[0], 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

    if k in multi_dataset_hdf5_file:
        print 'number of acquisitions: %d' % epoch_num
        for i in range(epoch_num):
            epoch = epoch_list[i]
            data = np.zeros((length, width))
            for fname in fname_list:
                h5file = h5py.File(fname, 'r')
                d = h5file[k].get(epoch)[:]
                data = add_matrix(data, d)

            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)

        for key, value in atr.iteritems():
            group.attrs[key] = value
        h5out.close()
        h5.close()
        prog_bar.close()

    elif k in multi_group_hdf5_file:
        print 'number of interferograms: %d' % epoch_num
        date12_list = ptime.list_ifgram2date12(epoch_list)
        for i in range(epoch_num):
            epoch = epoch_list[i]
            data = np.zeros((length, width))
            for fname in fname_list:
                h5file = h5py.File(fname, 'r')
                temp_k = h5file.keys()[0]
                temp_epoch_list = sorted(h5file[temp_k].keys())
                d = h5file[temp_k][temp_epoch_list[i]].get(
                    temp_epoch_list[i])[:]
                data = add_matrix(data, d)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in h5[k][epoch].attrs.iteritems():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])
        h5out.close()
        h5.close()
        prog_bar.close()

    ## Single dataset files
    else:
        data = np.zeros((length, width))
        for fname in fname_list:
            print 'loading ' + fname
            d, r = readfile.read(fname)
            data = add_matrix(data, d)

        print 'writing >>> ' + fname_out
        writefile.write(data, atr, fname_out)

    return fname_out
コード例 #18
0
def subset_file(File, subset_dict_input, outFile=None):
    '''Subset file with
    Inputs:
        File        : str, path/name of file
        outFile     : str, path/name of output file
        subset_dict : dict, subsut parameter, including the following items:
                      subset_x   : list of 2 int,   subset in x direction,   default=None
                      subset_y   : list of 2 int,   subset in y direction,   default=None
                      subset_lat : list of 2 float, subset in lat direction, default=None
                      subset_lon : list of 2 float, subset in lon direction, default=None
                      fill_value : float, optional. filled value for area outside of data coverage. default=None
                                   None/not-existed to subset within data coverage only.
                      tight  : bool, tight subset or not, for lookup table file, i.e. geomap*.trans
    Outputs:
        outFile :  str, path/name of output file; 
                   outFile = 'subset_'+File, if File is in current directory;
                   outFile = File, if File is not in the current directory.
    '''

    # Input File Info
    try:
        atr_dict = readfile.read_attribute(File)
    except:
        return None
    width = int(atr_dict['WIDTH'])
    length = int(atr_dict['FILE_LENGTH'])
    k = atr_dict['FILE_TYPE']
    print 'subset ' + k + ' file: ' + File + ' ...'

    subset_dict = subset_dict_input.copy()
    # Read Subset Inputs into 4-tuple box in pixel and geo coord
    pix_box, geo_box = subset_input_dict2box(subset_dict, atr_dict)

    # if fill_value exists and not None, subset data and fill assigned value for area out of its coverage.
    # otherwise, re-check subset to make sure it's within data coverage and initialize the matrix with np.nan
    outfill = False
    if 'fill_value' in subset_dict.keys() and subset_dict['fill_value']:
        outfill = True
    else:
        outfill = False
    if not outfill:
        pix_box = check_box_within_data_coverage(pix_box, atr_dict)
        subset_dict['fill_value'] = np.nan

    geo_box = box_pixel2geo(pix_box, atr_dict)
    data_box = (0, 0, width, length)
    print 'data   range in y/x: ' + str(data_box)
    print 'subset range in y/x: ' + str(pix_box)
    print 'data   range in lat/lon: ' + str(box_pixel2geo(data_box, atr_dict))
    print 'subset range in lat/lon: ' + str(geo_box)

    if pix_box == data_box:
        print 'Subset range == data coverage, no need to subset. Skip.'
        return File

    # Calculate Subset/Overlap Index
    pix_box4data, pix_box4subset = get_box_overlap_index(data_box, pix_box)

    ###########################  Data Read and Write  ######################
    # Output File Name
    if not outFile:
        if os.getcwd() == os.path.dirname(os.path.abspath(File)):
            if 'tight' in subset_dict.keys() and subset_dict['tight']:
                outFile = os.path.splitext(
                    File)[0] + '_tight' + os.path.splitext(File)[1]
            else:
                outFile = 'subset_' + os.path.basename(File)
        else:
            outFile = os.path.basename(File)
    print 'writing >>> ' + outFile

    ##### Multiple Dataset File
    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        ##### Open Input File
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epochNum = len(epochList)
        if k in multi_dataset_hdf5_file:
            print 'number of acquisitions: ' + str(epochNum)
        else:
            print 'number of interferograms: ' + str(epochNum)

        ##### Open Output File
        h5out = h5py.File(outFile)
        group = h5out.create_group(k)
        prog_bar = ptime.progress_bar(maxValue=epochNum)

    ## Loop
    if k == 'timeseries':
        for i in range(epochNum):
            epoch = epochList[i]
            dset = h5file[k].get(epoch)
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)

        atr_dict = subset_attribute(atr_dict, pix_box)
        for key, value in atr_dict.iteritems():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epochNum):
            epoch = epochList[i]
            dset = h5file[k][epoch].get(epoch)
            atr_dict = h5file[k][epoch].attrs
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            atr_dict = subset_attribute(atr_dict, pix_box, print_msg=False)
            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr_dict.iteritems():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

    ##### Single Dataset File
    elif k in ['.jpeg', '.jpg', '.png', '.ras', '.bmp']:
        data, atr_dict = readfile.read(File, pix_box)
        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    elif k == '.trans':
        rg_overlap, az_overlap, atr_dict = readfile.read(File, pix_box4data)

        rg = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        rg[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = rg_overlap

        az = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        az[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = az_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(rg, az, atr_dict, outFile)
    else:
        data_overlap, atr_dict = readfile.read(File, pix_box4data)

        data = np.ones((pix_box[3] - pix_box[1],
                        pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        data[pix_box4subset[1]:pix_box4subset[3],
             pix_box4subset[0]:pix_box4subset[2]] = data_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    ##### End Cleaning
    try:
        prog_bar.close()
        h5file.close()
        h5out.close()
    except:
        pass

    return outFile
コード例 #19
0
def geocode_file_with_geo_lut(fname,
                              lut_file=None,
                              method='nearest',
                              fill_value=np.nan,
                              fname_out=None):
    '''Geocode file using ROI_PAC/Gamma lookup table file.
    Related module: scipy.interpolate.RegularGridInterpolator

    Inputs:
        fname      : string, file to be geocoded
        lut_file   : string, optional, lookup table file genereated by ROIPAC or Gamma
                     i.e. geomap_4rlks.trans           from ROI_PAC
                          sim_150911-150922.UTM_TO_RDC from Gamma
        method     : string, optional, interpolation/resampling method, supporting nearest, linear
        fill_value : value used for points outside of the interpolation domain.
                     If None, values outside the domain are extrapolated.
        fname_out  : string, optional, output geocoded filename
    Output:
        fname_out  : string, optional, output geocoded filename
    '''

    start = time.time()
    ## Default Inputs and outputs
    if not fname_out:
        fname_out = 'geo_' + fname

    # Default lookup table file:
    atr_rdr = readfile.read_attribute(fname)
    if not lut_file:
        if atr_rdr['INSAR_PROCESSOR'] == 'roipac':
            lut_file = ['geomap*lks_tight.trans', 'geomap*lks.trans']
        elif atr_rdr['INSAR_PROCESSOR'] == 'gamma':
            lut_file = ['sim*_tight.UTM_TO_RDC', 'sim*.UTM_TO_RDC']

    try:
        lut_file = ut.get_file_list(lut_file)[0]
    except:
        lut_file = None
    if not lut_file:
        sys.exit(
            'ERROR: No lookup table file found! Can not geocoded without it.')

    ## Original coordinates: row/column number in radar file
    print '------------------------------------------------------'
    print 'geocoding file: ' + fname
    len_rdr = int(atr_rdr['FILE_LENGTH'])
    wid_rdr = int(atr_rdr['WIDTH'])
    pts_rdr = (np.arange(len_rdr), np.arange(wid_rdr))

    ## New coordinates: data value in lookup table
    print 'reading lookup table file: ' + lut_file
    rg, az, atr_lut = readfile.read(lut_file)
    len_geo = int(atr_lut['FILE_LENGTH'])
    wid_geo = int(atr_lut['WIDTH'])

    # adjustment if input radar file has been subseted.
    if 'subset_x0' in atr_rdr.keys():
        x0 = float(atr_rdr['subset_x0'])
        y0 = float(atr_rdr['subset_y0'])
        rg -= x0
        az -= y0
        print '\tinput radar coord file has been subsetted, adjust lookup table value'

    # extract pixels only available in radar file (get ride of invalid corners)
    idx = (az > 0.0) * (az <= len_rdr) * (rg > 0.0) * (rg <= wid_rdr)
    pts_geo = np.hstack((az[idx].reshape(-1, 1), rg[idx].reshape(-1, 1)))
    del az, rg

    print 'geocoding using scipy.interpolate.RegularGridInterpolator ...'
    data_geo = np.empty((len_geo, wid_geo)) * fill_value
    k = atr_rdr['FILE_TYPE']
    ##### Multiple Dataset File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(fname, 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)
        print 'writing >>> ' + fname_out

        if k == 'timeseries':
            print 'number of acquisitions: ' + str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]
                RGI_func = RGI(pts_rdr,
                               data,
                               method,
                               bounds_error=False,
                               fill_value=fill_value)

                data_geo.fill(fill_value)
                data_geo[idx] = RGI_func(pts_geo)

                dset = group.create_dataset(date,
                                            data=data_geo,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=date)
            prog_bar.close()

            print 'update attributes'
            atr = geocode_attribute_with_geo_lut(atr_rdr, atr_lut)
            for key, value in atr.iteritems():
                group.attrs[key] = value

        elif k in ['interferograms', 'wrapped', 'coherence']:
            print 'number of interferograms: ' + str(epoch_num)
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]
                RGI_func = RGI(pts_rdr,
                               data,
                               method,
                               bounds_error=False,
                               fill_value=fill_value)

                data_geo.fill(fill_value)
                data_geo[idx] = RGI_func(pts_geo)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram,
                                         data=data_geo,
                                         compression='gzip')

                atr = geocode_attribute_with_geo_lut(h5[k][ifgram].attrs,
                                                     atr_lut,
                                                     print_msg=False)
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print 'reading ' + fname
        data = readfile.read(fname)[0]
        RGI_func = RGI(pts_rdr,
                       data,
                       method,
                       bounds_error=False,
                       fill_value=fill_value)

        data_geo.fill(fill_value)
        data_geo[idx] = RGI_func(pts_geo)

        print 'update attributes'
        atr = geocode_attribute_with_geo_lut(atr_rdr, atr_lut)

        print 'writing >>> ' + fname_out
        writefile.write(data_geo, atr, fname_out)

    del data_geo
    s = time.time() - start
    m, s = divmod(s, 60)
    h, m = divmod(m, 60)
    print 'Time used: %02d hours %02d mins %02d secs' % (h, m, s)
    return fname_out
コード例 #20
0
ファイル: spatial_filter.py プロジェクト: zyh900908/PySAR
def filter_file(fname, filter_type, filter_par=None, fname_out=None):
    '''Filter 2D matrix with selected filter
    Inputs:
        fname       : string, name/path of file to be filtered
        filter_type : string, filter type
        filter_par  : string, optional, parameter for low/high pass filter
                      for low/highpass_avg, it's kernel size in int
                      for low/highpass_gaussain, it's sigma in float
    Output:
        fname_out   : string, optional, output file name/path
    '''

    # Basic info
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    try:    ref_yx = [int(atr['ref_y']), int(atr['ref_x'])]
    except: ref_yx = None

    filter_type = filter_type.lower()
    MSG = 'filtering '+k+' file: '+fname+' using '+filter_type+' filter'
    if filter_type.endswith('avg'):
        if not filter_par:
            filter_par = 5
        MSG += ' with kernel size of %d' % int(filter_par)
    elif filter_type.endswith('gaussian'):
        if not filter_par:
            filter_par = 3.0
        MSG += ' with sigma of %.1f' % filter_par
    print MSG

    if not fname_out:
        ext = os.path.splitext(fname)[1]
        fname_out = os.path.splitext(fname)[0]+'_'+filter_type+ext

    ##### Multiple Dataset File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+fname_out

        if k == 'timeseries':
            print 'number of acquisitions: '+str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_filt = filter_data(data, filter_type, filter_par)
                if ref_yx:
                    data_filt -= data_filt[ref_yx[0], ref_yx[1]]

                dset = group.create_dataset(date, data=data_filt, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            for key,value in atr.iteritems():
                group.attrs[key] = value

        elif k in ['interferograms','wrapped','coherence']:
            print 'number of interferograms: '+str(epoch_num)
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_filt = filter_data(data, filter_type, filter_par)
                if ref_yx and k in ['interferograms']:
                    data_filt -= data_filt[ref_yx[0], ref_yx[1]]

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_filt, compression='gzip')
                for key, value in h5[k][ifgram].attrs.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])

        h5.close()
        h5out.close()
        prog_bar.close()

    ##### Single Dataset File
    else:
        data, atr = readfile.read(fname)
        data_filt = filter_data(data, filter_type, filter_par)
        if ref_yx and k in ['.unw','velocity']:
            data_filt -= data_filt[ref_yx[0], ref_yx[1]]
        print 'writing >>> '+fname_out
        writefile.write(data_filt, atr, fname_out)

    return fname_out
コード例 #21
0
def main(argv):
    try:
        timeseriesFile = argv[0]
    except:
        usage()
        sys.exit(1)

    try:
        outname = argv[1]
    except:
        outname = 'sum_' + timeseriesFile

    ##### Read Timeseries
    atr = readfile.read_attribute(timeseriesFile)
    k = atr['FILE_TYPE']
    print "loading time series: " + timeseriesFile
    h5timeseries = h5py.File(timeseriesFile)
    dateList = sorted(h5timeseries['timeseries'].keys())
    date_num = len(dateList)
    print 'number of acquisitions: %d' % date_num

    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    D = np.zeros((date_num, length * width), np.float32)

    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = dateList[i]
        d = h5timeseries['timeseries'].get(date)[:]
        D[i][:] = d.flatten(0)
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    h5timeseries.close()

    ##### Calculate Sum
    print 'calculating epochs sum ...'
    sumD = np.zeros(D.shape)
    prog_bar.reset()
    for i in range(date_num):
        sumD[j, :] = np.sum(np.abs(D - D[j, :]), 0) / date_num
        prog_bar.update(i + 1)
    prog_bar.close()

    ## Normalize to 0 and 1
    ## with high atmosphere equal to 0 and no atmosphere equal to 1
    sumD -= np.max(sumD, 0)
    sumD *= -1
    sumD /= np.max(sumD, 0)
    sumD[np.isnan(sumD)] = 1

    ##### Write sum epochs file
    print 'writing to >>> ' + outname
    h5sum = h5py.File(outname, 'w')
    group = h5sum.create_group('timeseries')
    prog_bar.reset()
    for i in range(date_num):
        date = dateList[i]
        d = np.reshape(sumD[i][:], [length, width])
        dset = group.create_dataset(date, data=d, compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()

    for key, value in atr.iteritems():
        group.attrs[key] = value
    h5sum.close()
    print 'Done.'
コード例 #22
0
def main(argv):
    inps = cmdLineParse()

    #print '\n********** Inversion: Time Series to Velocity ***********'
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    print 'input ' + k + ' file: ' + inps.timeseries_file
    if not k == 'timeseries':
        sys.exit('ERROR: input file is not timeseries!')
    h5file = h5py.File(inps.timeseries_file)

    #####################################
    ## Date Info
    dateListAll = sorted(h5file[k].keys())
    print '--------------------------------------------'
    print 'Dates from input file: ' + str(len(dateListAll))
    print dateListAll

    inps.ex_date = get_exclude_date(inps, dateListAll)

    dateList = sorted(list(set(dateListAll) - set(inps.ex_date)))
    print '--------------------------------------------'
    if len(dateList) == len(dateListAll):
        print 'using all dates to calculate the velocity'
    else:
        print 'Dates used to estimate the velocity: ' + str(len(dateList))
        print dateList
    print '--------------------------------------------'

    # Date Aux Info
    dates, datevector = ptime.date_list2vector(dateList)

    #####################################
    ## Inversion
    # Design matrix
    B = np.ones([len(datevector), 2])
    B[:, 0] = datevector
    #B_inv = np.linalg.pinv(B)
    B_inv = np.dot(np.linalg.inv(np.dot(B.T, B)), B.T)
    B_inv = np.array(B_inv, np.float32)

    # Loading timeseries
    print "Loading time series file: " + inps.timeseries_file + ' ...'
    width = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])
    dateNum = len(dateList)
    timeseries = np.zeros([dateNum, length * width], np.float32)
    prog_bar = ptime.progress_bar(maxValue=dateNum, prefix='loading: ')
    for i in range(dateNum):
        date = dateList[i]
        timeseries[i, :] = h5file[k].get(date)[:].flatten()
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    h5file.close()

    # Velocity Inversion
    print 'Calculating velocity ...'
    X = np.dot(B_inv, timeseries)
    velocity = np.reshape(X[0, :], [length, width])

    print 'Calculating rmse ...'
    timeseries_linear = np.dot(B, X)
    timeseries_residual = timeseries - timeseries_linear
    rmse = np.reshape(np.sqrt((np.sum((timeseries_residual)**2, 0)) / dateNum),
                      [length, width])

    print 'Calculating the standard deviation of the estimated velocity ...'
    s1 = np.sqrt(np.sum(timeseries_residual**2, 0) / (dateNum - 2))
    s2 = np.sqrt(np.sum((datevector - np.mean(datevector))**2))
    std = np.reshape(s1 / s2, [length, width])

    # SSt=np.sum((timeseries-np.mean(timeseries,0))**2,0)
    # SSres=np.sum(residual**2,0)
    # SS_REG=SSt-SSres
    # Rsquared=np.reshape(SS_REG/SSt,[length,width])
    ######################################################
    # covariance of the velocities

    #####################################
    # Output file name
    if not inps.outfile:
        inps.outfile = 'velocity.h5'

    inps.outfile_rmse = os.path.splitext(
        inps.outfile)[0] + 'Rmse' + os.path.splitext(inps.outfile)[1]
    inps.outfile_std = os.path.splitext(
        inps.outfile)[0] + 'Std' + os.path.splitext(inps.outfile)[1]
    inps.outfile_r2 = os.path.splitext(
        inps.outfile)[0] + 'R2' + os.path.splitext(inps.outfile)[1]

    # Attributes
    atr['date1'] = datevector[0]
    atr['date2'] = datevector[dateNum - 1]

    # File Writing
    print '--------------------------------------'
    atr['FILE_TYPE'] = 'velocity'
    print 'writing >>> ' + inps.outfile
    writefile.write(velocity, atr, inps.outfile)

    #atr['FILE_TYPE'] = 'rmse'
    print 'writing >>> ' + inps.outfile_rmse
    writefile.write(rmse, atr, inps.outfile_rmse)

    #atr['FILE_TYPE'] = 'rmse'
    print 'writing >>> ' + inps.outfile_std
    writefile.write(std, atr, inps.outfile_std)

    print 'Done.\n'
    return inps.outfile
コード例 #23
0
ファイル: unwrap_error.py プロジェクト: ilearnProgramme/PySAR
def unwrap_error_correction_phase_closure(ifgram_file,
                                          mask_file,
                                          ifgram_cor_file=None):
    '''Correct unwrapping errors in network of interferograms using phase closure.
    Inputs:
        ifgram_file     - string, name/path of interferograms file
        mask_file       - string, name/path of mask file to mask the pixels to be corrected
        ifgram_cor_file - string, optional, name/path of corrected interferograms file
    Output:
        ifgram_cor_file
    Example:
        'unwrapIfgram_unwCor.h5' = unwrap_error_correction_phase_closure('Seeded_unwrapIfgram.h5','mask.h5')
    '''
    print 'read mask from file: ' + mask_file
    mask = readfile.read(mask_file)[0].flatten(1)

    atr = readfile.read_attribute(ifgram_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    k = atr['FILE_TYPE']
    pixel_num = length * width

    # Check reference pixel
    try:
        ref_y = int(atr['ref_y'])
        ref_x = int(atr['ref_x'])
        print 'reference pixel in y/x: %d/%d' % (ref_y, ref_x)
    except:
        sys.exit(
            'ERROR: Can not find ref_y/x value, input file is not referenced in space!'
        )

    h5 = h5py.File(ifgram_file, 'r')
    ifgram_list = sorted(h5[k].keys())
    ifgram_num = len(ifgram_list)

    ##### Prepare curls
    curls, Triangles, C = ut.get_triangles(h5)
    curl_num = np.shape(curls)[0]
    print 'Number of      triangles: ' + str(curl_num)

    curl_file = 'curls.h5'
    if not os.path.isfile(curl_file):
        print 'writing >>> ' + curl_file
        ut.generate_curls(curl_file, h5, Triangles, curls)

    thr = 0.50
    curls = np.array(curls)
    n1 = curls[:, 0]
    n2 = curls[:, 1]
    n3 = curls[:, 2]

    print 'reading interferograms...'
    print 'Number of interferograms: ' + str(ifgram_num)
    data = np.zeros((ifgram_num, pixel_num), np.float32)
    prog_bar = ptime.progress_bar(maxValue=ifgram_num)
    for ni in range(ifgram_num):
        ifgram = ifgram_list[ni]
        d = h5[k][ifgram].get(ifgram)[:].flatten(1)
        data[ni, :] = d
        prog_bar.update(ni + 1)
    prog_bar.close()

    print 'reading curls ...'
    print 'number of culrs: ' + str(curl_num)
    h5curl = h5py.File(curl_file, 'r')
    curl_list = sorted(h5curl[k].keys())
    curl_data = np.zeros((curl_num, pixel_num), np.float32)
    prog_bar = ptime.progress_bar(maxValue=curl_num)
    for ni in range(curl_num):
        d = h5curl[k][curl_list[ni]].get(curl_list[ni])[:].flatten(1)
        curl_data[ni, :] = d.flatten(1)
        prog_bar.update(ni + 1)
    prog_bar.close()
    h5curl.close()

    print 'estimating unwrapping error pixel by pixel ...'
    EstUnwrap = np.zeros((ifgram_num, pixel_num), np.float32)
    prog_bar = ptime.progress_bar(maxValue=pixel_num)
    for ni in range(pixel_num):
        if mask[ni] == 1:
            dU = data[:, ni]
            unwCurl = np.array(curl_data[:, ni])

            ind = np.abs(unwCurl) >= thr
            N1 = n1[ind]
            N2 = n2[ind]
            N3 = n3[ind]
            indC = np.abs(unwCurl) < thr
            Nc1 = n1[indC]
            Nc2 = n2[indC]
            Nc3 = n3[indC]

            N = np.hstack([N1, N2, N3])
            UniN = np.unique(N)
            Nc = np.hstack([Nc1, Nc2, Nc3])
            UniNc = np.unique(Nc)

            inter = list(set(UniNc) & set(UniN))  # intersetion
            UniNc = list(UniNc)
            for x in inter:
                UniNc.remove(x)

            D = np.zeros([len(UniNc), ifgram_num])
            for i in range(len(UniNc)):
                D[i, UniNc[i]] = 1

            AAA = np.vstack([-2 * np.pi * C, D])
            AAAA = np.vstack([AAA, 0.25 * np.eye(ifgram_num)])

            ##########
            # with Tikhonov regularization:
            LLL = list(np.dot(C, dU)) + list(np.zeros(
                np.shape(UniNc)[0])) + list(np.zeros(ifgram_num))
            ind = np.isnan(AAAA)
            M1 = pinv(AAAA)
            M = np.dot(M1, LLL)
            EstUnwrap[:, ni] = np.round(M[0:ifgram_num]) * 2.0 * np.pi
        prog_bar.update(ni + 1, suffix='%s/%d' % (ni, pixel_num))
    prog_bar.close()

    dataCor = data + EstUnwrap

    ##### Output
    if not ifgram_cor_file:
        ifgram_cor_file = os.path.splitext(ifgram_file)[0] + '_unwCor.h5'
    print 'writing >>> ' + ifgram_cor_file
    h5unwCor = h5py.File(ifgram_cor_file, 'w')
    gg = h5unwCor.create_group(k)

    prog_bar = ptime.progress_bar(maxValue=ifgram_num)
    for i in range(ifgram_num):
        ifgram = ifgram_list[i]
        group = gg.create_group(ifgram)
        dset = group.create_dataset(ifgram,
                                    data=np.reshape(dataCor[i, :],
                                                    [width, length]).T,
                                    compression='gzip')
        for key, value in h5[k][ifgram].attrs.iteritems():
            group.attrs[key] = value
        prog_bar.update(i + 1)
    prog_bar.close()
    h5unwCor.close()
    h5.close()
    return ifgram_cor_file
コード例 #24
0
ファイル: diff.py プロジェクト: jthetzel/PySAR
def diff_file(file1, file2, outName=None, force=False):
    '''Subtraction/difference of two input files'''
    if not outName:
        outName = os.path.splitext(file1)[0]+'_diff_'+os.path.splitext(os.path.basename(file2))[0]+\
                  os.path.splitext(file1)[1]

    print file1 + ' - ' + file2
    # Read basic info
    atr = readfile.read_attribute(file1)
    print 'Input first file is ' + atr['PROCESSOR'] + ' ' + atr['FILE_TYPE']
    k = atr['FILE_TYPE']

    # Multi-dataset/group file
    if k in ['timeseries', 'interferograms', 'coherence', 'wrapped']:
        # Check input files type for multi_dataset/group files
        atr2 = readfile.read_attribute(file2)
        k2 = atr2['FILE_TYPE']

        h5_1 = h5py.File(file1)
        h5_2 = h5py.File(file2)
        epochList = sorted(h5_1[k].keys())
        epochList2 = sorted(h5_2[k2].keys())
        if not all(i in epochList2 for i in epochList):
            print 'ERROR: ' + file2 + ' does not contain all group of ' + file1
            if force and k in ['timeseries']:
                print 'Continue and enforce the differencing for their shared dates only!'
            else:
                sys.exit(1)

        h5out = h5py.File(outName, 'w')
        group = h5out.create_group(k)
        print 'writing >>> ' + outName

        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

    if k in ['timeseries']:
        print 'number of acquisitions: ' + str(len(epochList))
        # check reference date
        if atr['ref_date'] == atr2['ref_date']:
            ref_date = None
        else:
            ref_date = atr['ref_date']
            data2_ref = h5_2[k2].get(ref_date)[:]
            print 'consider different reference date'
        # check reference pixel
        ref_y = int(atr['ref_y'])
        ref_x = int(atr['ref_x'])
        if ref_y == int(atr2['ref_y']) and ref_x == int(atr2['ref_x']):
            ref_y = None
            ref_x = None
        else:
            print 'consider different reference pixel'

        # calculate difference in loop
        for i in range(epoch_num):
            date = epochList[i]
            data1 = h5_1[k].get(date)[:]
            try:
                data2 = h5_2[k2].get(date)[:]
                if ref_date:
                    data2 -= data2_ref
                if ref_x and ref_y:
                    data2 -= data2[ref_y, ref_x]
                data = diff_data(data1, data2)
            except:
                data = data1
            dset = group.create_dataset(date, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=date)
        for key, value in atr.iteritems():
            group.attrs[key] = value

        prog_bar.close()
        h5out.close()
        h5_1.close()
        h5_2.close()

    elif k in ['interferograms', 'coherence', 'wrapped']:
        print 'number of interferograms: ' + str(len(epochList))
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epoch_num):
            epoch1 = epochList[i]
            epoch2 = epochList2[i]
            data1 = h5_1[k][epoch1].get(epoch1)[:]
            data2 = h5_2[k2][epoch2].get(epoch2)[:]
            data = diff_data(data1, data2)
            gg = group.create_group(epoch1)
            dset = gg.create_dataset(epoch1, data=data, compression='gzip')
            for key, value in h5_1[k][epoch1].attrs.iteritems():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

        prog_bar.close()
        h5out.close()
        h5_1.close()
        h5_2.close()

    # Sing dataset file
    else:
        data1, atr1 = readfile.read(file1)
        data2, atr2 = readfile.read(file2)
        data = diff_data(data1, data2)
        print 'writing >>> ' + outName
        writefile.write(data, atr1, outName)

    return outName
コード例 #25
0
def temporal_coherence(timeseriesFile, ifgramFile):
    '''Calculate temporal coherence based on input timeseries file and interferograms file
    Inputs:
        timeseriesFile - string, path of time series file
        ifgramFile     - string, path of interferograms file
    Output:
        temp_coh - 2D np.array, temporal coherence in float32
    '''

    # Basic Info
    atr_ts = readfile.read_attribute(timeseriesFile)
    length = int(atr_ts['FILE_LENGTH'])
    width = int(atr_ts['WIDTH'])
    pixel_num = length * width

    # Read time series data
    h5timeseries = h5py.File(timeseriesFile, 'r')
    date_list = sorted(h5timeseries['timeseries'].keys())
    date_num = len(date_list)

    print "load time series: " + timeseriesFile
    print 'number of acquisitions: ' + str(date_num)
    timeseries = np.zeros((date_num, pixel_num), np.float32)
    prog_bar = ptime.progress_bar(maxValue=date_num, prefix='loading: ')
    for i in range(date_num):
        date = date_list[i]
        d = h5timeseries['timeseries'].get(date)[:]
        timeseries[i][:] = d.flatten(0)
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    h5timeseries.close()

    # Convert displacement from meter to radian
    range2phase = -4 * np.pi / float(atr_ts['WAVELENGTH'])
    timeseries *= range2phase

    # interferograms data
    print "interferograms file: " + ifgramFile
    atr_ifgram = readfile.read_attribute(ifgramFile)
    h5ifgram = h5py.File(ifgramFile, 'r')
    ifgram_list = sorted(h5ifgram['interferograms'].keys())
    ifgram_list = ut.check_drop_ifgram(h5ifgram, atr_ifgram, ifgram_list)
    ifgram_num = len(ifgram_list)

    # Design matrix
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    A1, B = ut.design_matrix(ifgramFile, date12_list)
    A0 = -1 * np.ones([ifgram_num, 1])
    A = np.hstack((A0, A1))

    # Get reference pixel
    try:
        ref_x = int(atr_ts['ref_x'])
        ref_y = int(atr_ts['ref_y'])
        print 'find reference pixel in y/x: [%d, %d]' % (ref_y, ref_x)
    except ValueError:
        print 'No ref_x/y found! Can not calculate temporal coherence without it.'

    print 'calculating temporal coherence interferogram by interferogram ...'
    print 'number of interferograms: ' + str(ifgram_num)
    temp_coh = np.zeros(pixel_num, dtype=np.float32) + 0j
    prog_bar = ptime.progress_bar(maxValue=ifgram_num, prefix='calculating: ')
    for i in range(ifgram_num):
        ifgram = ifgram_list[i]
        # read interferogram
        data = h5ifgram['interferograms'][ifgram].get(ifgram)[:]
        data -= data[ref_y, ref_x]
        data = data.flatten(0)

        # calculate difference between observed and estimated data
        dataEst = np.dot(A[i, :], timeseries)
        dataDiff = data - dataEst
        temp_coh += np.exp(1j * dataDiff)
        prog_bar.update(i + 1, suffix=date12_list[i])
    prog_bar.close()
    del timeseries, data, dataEst, dataDiff
    h5ifgram.close()

    temp_coh = np.array((np.absolute(temp_coh) / ifgram_num).reshape(
        (length, width)),
                        dtype=np.float32)
    return temp_coh
コード例 #26
0
ファイル: modify_network.py プロジェクト: Chenjiajun01/PySAR
def modify_file_date12_list(File,
                            date12_to_rmv,
                            mark_attribute=False,
                            outFile=None):
    '''Update multiple group hdf5 file using date12 to remove
    Inputs:
        File          - multi_group HDF5 file, i.e. unwrapIfgram.h5, coherence.h5
        date12_to_rmv - list of string indicating interferograms in YYMMDD-YYMMDD format
        mark_attribute- bool, if True, change 'drop_ifgram' attribute only; otherwise, write
                        resutl to a new file
        outFile       - string, output file name
    Output:
        outFile       - string, output file name, if mark_attribute=True, outFile = File
    '''
    k = readfile.read_attribute(File)['FILE_TYPE']
    print '----------------------------------------------------------------------------'
    print 'file: ' + File

    if mark_attribute:
        print "set drop_ifgram to 'yes' for all interferograms to remove, and 'no' for all the others."
        h5 = h5py.File(File, 'r+')
        ifgram_list = sorted(h5[k].keys())
        for ifgram in ifgram_list:
            if h5[k][ifgram].attrs['DATE12'] in date12_to_rmv:
                h5[k][ifgram].attrs['drop_ifgram'] = 'yes'
            else:
                h5[k][ifgram].attrs['drop_ifgram'] = 'no'
        h5.close()
        outFile = File

    else:
        date12_orig = pnet.get_date12_list(File)
        date12_to_write = sorted(list(set(date12_orig) - set(date12_to_rmv)))
        print 'number of interferograms in file      : ' + str(
            len(date12_orig))
        print 'number of interferograms to keep/write: ' + str(
            len(date12_to_write))
        print 'list   of interferograms to keep/write: '
        print date12_to_write
        date12Num = len(date12_to_write)

        if not outFile:
            outFile = 'Modified_' + os.path.basename(File)
        print 'writing >>> ' + outFile
        h5out = h5py.File(outFile, 'w')
        gg = h5out.create_group(k)

        h5 = h5py.File(File, 'r')
        igramList = sorted(h5[k].keys())
        date12_list = ptime.list_ifgram2date12(igramList)
        prog_bar = ptime.progress_bar(maxValue=date12Num, prefix='writing: ')
        for i in range(date12Num):
            date12 = date12_to_write[i]
            idx = date12_orig.index(date12)
            igram = igramList[idx]

            data = h5[k][igram].get(igram)[:]
            group = gg.create_group(igram)
            dset = group.create_dataset(igram, data=data, compression='gzip')
            for key, value in h5[k][igram].attrs.iteritems():
                group.attrs[key] = value
            group.attrs['drop_ifgram'] = 'no'
            prog_bar.update(i + 1, suffix=date12_list[i])
        prog_bar.close()
        h5.close()
        h5out.close()
        print 'finished writing >>> ' + outFile

    return outFile
コード例 #27
0
ファイル: geocode.py プロジェクト: louisemaubant/PySAR
def geocode_file_radar_lut(fname, lookup_file, fname_out=None, inps=None):
    '''Geocode file using lookup table file in radar coordinates (isce).
    Two solutions:
    1) scipy.interpolate.griddata, with a speed up solution from Jaime and Jeff (Stack Overflow)
        https://stackoverflow.com/questions/20915502/speedup-scipy-griddata-for-multiple-interpo
        lations-between-two-irregular-grids
    2) matplotlib.tri, interpolation from triangular grid to quad grid, which is much slower than 1).

    Inputs:
        fname       : string, file to be geocoded
        lookup_file : string, lookup table file, geometryRadar.h5
        fname_out   : string, optional, output geocoded filename
        inps        : namespace, object with the following items:
                      interp_method : string, interpolation/resampling method, supporting linear
                      fill_value    : value used for points outside of the interpolation domain
    Output:
        fname_out  : string, optional, output geocoded filename
    '''
    start = time.time()
    ## Default Inputs and outputs
    if not inps:
        inps = cmdLineParse()

    if inps.interp_method != 'linear':
        print 'ERROR: Supported interpolation method: linear'
        print 'Input method is '+inps.interp_method
        sys.exit(-1)

    if not fname_out:
        fname_out = geocode_output_filename(fname)

    ## Read lookup table file
    atr_rdr = readfile.read_attribute(fname)
    length = int(atr_rdr['FILE_LENGTH'])
    width = int(atr_rdr['WIDTH'])
    print 'reading lookup table file '+lookup_file
    lat = readfile.read(lookup_file, epoch='latitude')[0]
    lon = readfile.read(lookup_file, epoch='longitude')[0]

    #####Prepare output pixel grid: lat/lon range and step
    if os.path.isfile(inps.lalo_step):
        print 'use file %s as reference for output grid lat/lon range and step' % (inps.lalo_step)
        atr_ref = readfile.read_attribute(inps.lalo_step)
        inps.lat_step = float(atr_ref['Y_STEP'])
        inps.lon_step = float(atr_ref['X_STEP'])
        inps.lat_num = int(atr_ref['FILE_LENGTH'])
        inps.lon_num = int(atr_ref['WIDTH'])
        inps.lat0 = float(atr_ref['Y_FIRST'])
        inps.lon0 = float(atr_ref['X_FIRST'])
        inps.lat1 = inps.lat0 + inps.lat_step*inps.lat_num
        inps.lon1 = inps.lon0 + inps.lon_step*inps.lon_num
    else:
        try:
            inps.lat_step = -1*abs(float(inps.lalo_step))
            inps.lon_step = abs(float(inps.lalo_step))
            inps.lat0 = np.nanmax(lat)
            inps.lat1 = np.nanmin(lat)
            inps.lon0 = np.nanmin(lon)
            inps.lon1 = np.nanmax(lon)
            inps.lat_num = int((inps.lat1-inps.lat0)/inps.lat_step)
            inps.lon_num = int((inps.lon1-inps.lon0)/inps.lon_step)
            inps.lat_step = (inps.lat1 - inps.lat0)/inps.lat_num
            inps.lon_step = (inps.lon1 - inps.lon0)/inps.lon_num
        except ValueError:
            print 'Input lat/lon step is neither a float number nor a file in geo-coord, please try again.'

    print 'output lat range: %f - %f' % (inps.lat0, inps.lat1)
    print 'output lon range: %f - %f' % (inps.lon0, inps.lon1)
    print 'output lat_step : %f' % (inps.lat_step)
    print 'output lon_step : %f' % (inps.lon_step)
    print 'input  file size in   y/x  : %d/%d' % (length, width)
    print 'output file size in lat/lon: %d/%d' % (inps.lat_num, inps.lon_num)

    grid_lat, grid_lon = np.mgrid[inps.lat0:inps.lat1:inps.lat_num*1j,\
                                  inps.lon0:inps.lon1:inps.lon_num*1j]


    ##### Interpolate value on regular geo coordinates (from lookup table file attributes, 2D ndarray)
    ##### with known value on irregular geo coordinates (from lookup table file value, tuple of ndarray of float)

    ##Solution 1 - qhull
    print 'calculate triangulation and coordinates transformation using scipy.spatial.qhull.Delaunay ...'
    pts_old = np.hstack((lat.reshape(-1,1), lon.reshape(-1,1)))
    pts_new = np.hstack((grid_lat.reshape(-1,1), grid_lon.reshape(-1,1)))
    vtx, wts = interp_weights(pts_old, pts_new)
    del pts_old, pts_new, grid_lat, grid_lon

    ##Solution 2 - matplotlib.tri
    #triang = mtri.Triangulation(lat.flatten(),lon.flatten())

    data_geo = np.empty((inps.lat_num, inps.lon_num)).flatten()
    data_geo.fill(inps.fill_value)
    k = atr_rdr['FILE_TYPE']
    ##### Multiple Dataset File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+fname_out

        if k in multi_dataset_hdf5_file:
            print 'number of acquisitions: '+str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_geo = interpolate(data.flatten(), vtx, wts).reshape(inps.lat_num, inps.lon_num)

                dset = group.create_dataset(date, data=data_geo, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            prog_bar.close()

            print 'update attributes'
            atr = update_attribute_radar_lut(atr_rdr, inps, lat, lon)
            for key,value in atr.iteritems():
                group.attrs[key] = value

        elif k in multi_group_hdf5_file:
            print 'number of interferograms: '+str(epoch_num)
            try:    date12_list = ptime.list_ifgram2date12(epoch_list)
            except: date12_list = epoch_list
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_geo = interpolate(data.flatten(), vtx, wts).reshape(inps.lat_num, inps.lon_num)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_geo, compression='gzip')

                atr = update_attribute_radar_lut(h5[k][ifgram].attrs, inps, lat, lon, print_msg=False)
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])
            prog_bar.close()
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print 'reading '+fname
        data = readfile.read(fname)[0]

        ##Solution 1 - qhull
        data_geo = interpolate(data.flatten(), vtx, wts).reshape(inps.lat_num, inps.lon_num)

        ###Solution 2 - matplotlib.tri
        #interp_lin = mtri.LinearTriInterpolator(triang, data.flatten())
        #data_geo = interp_lin(grid_lat.flatten(), grid_lon.flatten())
        #interp_cubic = mtri.CubicTriInterpolator(triang, data, kind='geom')
        #data_geo = interp_cubic(grid_lat, grid_lon)

        print 'update attributes'
        atr = update_attribute_radar_lut(atr_rdr, inps, lat, lon)

        print 'writing >>> '+fname_out
        writefile.write(data_geo, atr, fname_out)

    del data_geo, vtx, wts
    print 'finished writing file: %s' % (fname_out)
    s = time.time()-start;  m, s = divmod(s, 60);  h, m = divmod(m, 60)
    print 'Time used: %02d hours %02d mins %02d secs' % (h, m, s)
    return fname_out
コード例 #28
0
ファイル: multilook.py プロジェクト: louisemaubant/PySAR
def multilook_file(infile, lks_y, lks_x, outfile=None):
    lks_y = int(lks_y)
    lks_x = int(lks_x)

    ## input file info
    atr = readfile.read_attribute(infile)
    k = atr['FILE_TYPE']
    print 'multilooking ' + k + ' file ' + infile
    print 'number of looks in y / azimuth direction: %d' % lks_y
    print 'number of looks in x / range   direction: %d' % lks_x

    ## output file name
    if not outfile:
        if os.getcwd() == os.path.dirname(os.path.abspath(infile)):
            ext = os.path.splitext(infile)[1]
            outfile = os.path.splitext(infile)[0] + '_' + str(
                lks_y) + 'alks_' + str(lks_x) + 'rlks' + ext
        else:
            outfile = os.path.basename(infile)
    print 'writing >>> ' + outfile

    ###############################################################################
    ## Read/Write multi-dataset files
    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5 = h5py.File(infile, 'r')
        epochList = sorted(h5[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(outfile, 'w')
        group = h5out.create_group(k)

        if k in ['interferograms', 'coherence', 'wrapped']:
            date12_list = ptime.list_ifgram2date12(epochList)
            print 'number of interferograms: ' + str(len(epochList))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k][epoch].get(epoch)[:]
                atr = h5[k][epoch].attrs

                data_mli = multilook_matrix(data, lks_y, lks_x)
                atr_mli = multilook_attribute(atr,
                                              lks_y,
                                              lks_x,
                                              print_msg=False)

                gg = group.create_group(epoch)
                dset = gg.create_dataset(epoch,
                                         data=data_mli,
                                         compression='gzip')
                for key, value in atr_mli.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])

        elif k == 'timeseries':
            print 'number of acquisitions: ' + str(len(epochList))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k].get(epoch)[:]

                data_mli = multilook_matrix(data, lks_y, lks_x)

                dset = group.create_dataset(epoch,
                                            data=data_mli,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=epoch)
            atr = h5[k].attrs
            atr_mli = multilook_attribute(atr, lks_y, lks_x)
            for key, value in atr_mli.iteritems():
                group.attrs[key] = value

        h5.close()
        h5out.close()
        prog_bar.close()

    ## Read/Write single-dataset files
    elif k in ['.trans', '.utm_to_rdc', '.UTM_TO_RDC']:
        rg, az, atr = readfile.read(infile)
        rgmli = multilook_matrix(rg, lks_y, lks_x)
        #rgmli *= 1.0/lks_x
        azmli = multilook_matrix(az, lks_y, lks_x)
        #azmli *= 1.0/lks_y
        atr = multilook_attribute(atr, lks_y, lks_x)
        writefile.write(rgmli, azmli, atr, outfile)
    else:
        data, atr = readfile.read(infile)
        data_mli = multilook_matrix(data, lks_y, lks_x)
        atr = multilook_attribute(atr, lks_y, lks_x)
        writefile.write(data_mli, atr, outfile)

    return outfile
コード例 #29
0
def file_operation(fname, operator, operand, fname_out=None):
    '''Mathmathic operation of file'''

    # Basic Info
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    print 'input is '+k+' file: '+fname
    print 'operation: file %s %f' % (operator, operand)

    # default output filename
    if not fname_out:
        if   operator in ['+','plus',  'add',      'addition']:        suffix = 'plus'
        elif operator in ['-','minus', 'substract','substraction']:    suffix = 'minus'
        elif operator in ['*','times', 'multiply', 'multiplication']:  suffix = 'multiply'
        elif operator in ['/','obelus','divide',   'division']:        suffix = 'divide'
        elif operator in ['^','pow','power']:                          suffix = 'pow'
        ext = os.path.splitext(fname)[1]
        fname_out = os.path.splitext(fname)[0]+'_'+suffix+str(operand)+ext

    ##### Multiple Dataset HDF5 File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+fname_out

        if k == 'timeseries':
            print 'number of acquisitions: '+str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_out = data_operation(data, operator, operand)

                dset = group.create_dataset(date, data=data_out, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            for key,value in atr.iteritems():
                group.attrs[key] = value

        elif k in ['interferograms','wrapped','coherence']:
            print 'number of interferograms: '+str(epoch_num)
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_out = data_operation(data, operator, operand)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_out, compression='gzip')
                for key, value in h5[k][ifgram].attrs.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])

        h5.close()
        h5out.close()
        prog_bar.close()

    ##### Duo datasets non-HDF5 File
    elif k in ['.trans']:
        rg, az, atr = readfile.read(fname)
        rg_out = data_operation(rg, operator, operand)
        az_out = data_operation(az, operator, operand)
        print 'writing >>> '+fname_out
        writefile.write(rg_out, az_out, atr, fname_out)

    ##### Single Dataset File
    else:
        data, atr = readfile.read(fname)
        data_out = data_operation(data, operator, operand)
        print 'writing >>> '+fname_out
        writefile.write(data_out, atr, fname_out)

    return fname_out
コード例 #30
0
def correct_lod_file(File, rangeDistFile=None, outFile=None):
    # Check Sensor Type
    print 'correct Local Oscilator Drift for Envisat using an empirical model (Marinkovic and Larsen, 2013)'
    print 'input file: ' + File
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    platform = atr['PLATFORM']
    print 'platform: ' + platform
    if not platform.lower() in ['env', 'envisat']:
        print 'No need to correct LOD for ' + platform
        sys.exit(1)

    # Output Filename
    if not outFile:
        ext = os.path.splitext(File)[1]
        outFile = os.path.splitext(File)[0] + '_LODcor' + ext

    # Get LOD phase ramp from empirical model
    if not rangeDistFile:
        print 'calculate range distance from input file attributes'
        width = int(atr['WIDTH'])
        length = int(atr['FILE_LENGTH'])
        range_resolution = float(atr['RANGE_PIXEL_SIZE'])
        rangeDist1D = range_resolution * np.linspace(0, width - 1, width)
        rangeDist = np.tile(rangeDist1D, (length, 1))
    else:
        print 'read range distance from file: %s' % (rangeDistFile)
        rangeDist = readfile.read(rangeDistFile, epoch='slantRangeDistance')[0]

    yref = int(atr['ref_y'])
    xref = int(atr['ref_x'])
    rangeDist -= rangeDist[yref][xref]
    Ramp = np.array(rangeDist * 3.87e-7, np.float32)

    # Correct LOD Ramp for Input File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(File, 'r')
        epochList = sorted(h5[k].keys())
        epochNum = len(epochList)

        print 'writing >>> %s' % (outFile)
        h5out = h5py.File(outFile, 'w')
        group = h5out.create_group(k)

        prog_bar = ptime.progress_bar(maxValue=epochNum)
        if k in ['interferograms', 'wrapped']:
            Ramp *= -4 * np.pi / float(atr['WAVELENGTH'])
            print 'number of interferograms: ' + str(epochNum)
            date12List = ptime.list_ifgram2date12(epochList)
            for i in range(epochNum):
                epoch = epochList[i]
                data = h5[k][epoch].get(epoch)[:]
                atr = h5[k][epoch].attrs

                dates = ptime.yyyymmdd(atr['DATE12'].split('-'))
                dates = ptime.yyyymmdd2years(dates)
                dt = dates[1] - dates[0]
                data -= Ramp * dt

                gg = group.create_group(epoch)
                dset = gg.create_dataset(epoch, data=data, compression='gzip')
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12List[i])

        elif k == 'timeseries':
            print 'number of acquisitions: ' + str(len(epochList))
            tbase = [
                float(dy) / 365.25
                for dy in ptime.date_list2tbase(epochList)[0]
            ]
            for i in range(epochNum):
                epoch = epochList[i]
                data = h5[k].get(epoch)[:]

                data -= Ramp * tbase[i]

                dset = group.create_dataset(epoch,
                                            data=data,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=epoch)
            for key, value in atr.iteritems():
                group.attrs[key] = value
        else:
            print 'No need to correct for LOD for ' + k + ' file'
            sys.exit(1)
        prog_bar.close()
        h5.close()
        h5out.close()

    elif k in ['.unw']:
        data, atr = readfile.read(File)
        Ramp *= -4 * np.pi / float(atr['WAVELENGTH'])
        dates = ptime.yyyymmdd(atr['DATE12'].split('-'))
        dates = ptime.yyyymmdd2years(dates)
        dt = dates[1] - dates[0]
        data -= Ramp * dt
        print 'writing >>> %s' % (outFile)
        writefile.write(data, atr, outFile)
    else:
        print 'No need to correct for LOD for %s file' % (k)

    return outFile