示例#1
0
def ref_date_file(inFile, ref_date, outFile=None):
    '''Change input file reference date to a different one.'''
    if not outFile:
        outFile = os.path.splitext(inFile)[0]+'_refDate.h5'

    # Input file type 
    atr = readfile.read_attribute(inFile)
    k = atr['FILE_TYPE']
    if not k in ['timeseries']:
        print('Input file is '+k+', only timeseries is supported.')
        return None

    # Input reference date
    h5 = h5py.File(inFile, 'r')
    date_list = sorted(h5[k].keys())
    h5.close()
    date_num = len(date_list)
    try:    ref_date_orig = atr['ref_date']
    except: ref_date_orig = date_list[0]

    ref_date = ptime.yyyymmdd(ref_date)
    print('input reference date: '+ref_date)
    if not ref_date in date_list:
        print('Input reference date was not found!\nAll dates available: '+str(date_list))
        return None
    if ref_date == ref_date_orig:
        print('Same reference date chosen as existing reference date.')
        print('Copy %s to %s' % (inFile, outFile))
        shutil.copy2(inFile, outFile)
        return outFile

    # Referencing in time
    h5 = h5py.File(inFile, 'r')
    ref_data = h5[k].get(ref_date)[:]

    print('writing >>> '+outFile)
    h5out = h5py.File(outFile,'w')
    group = h5out.create_group(k)
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        data = h5[k].get(date)[:]
        dset = group.create_dataset(date, data=data-ref_data, compression='gzip')
        prog_bar.update(i+1, suffix=date)
    prog_bar.close()
    h5.close()

    ## Update attributes
    atr = ref_date_attribute(atr, ref_date, date_list)
    for key,value in atr.items():
        group.attrs[key] = value
    h5out.close()

    return outFile
示例#2
0
def main(argv):
    try:
        timeseries_file = argv[0]
    except:
        usage() ; sys.exit(1)

    # Basic info
    atr = readfile.read_attribute(timeseries_file)
    k = atr['FILE_TYPE']
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    ##### Read time-series
    print("loading time series: " + timeseries_file)
    h5 = h5py.File(timeseries_file)
    date_list = sorted(h5[k].keys())
    date_num = len(date_list)
    pixel_num = length*width

    tbase = np.array(ptime.date_list2tbase(date_list)[0], np.float32)

    prog_bar = ptime.progress_bar(maxValue=date_num)
    timeseries = np.zeros((date_num, pixel_num),np.float32)
    for i in range(date_num):
        date = date_list[i]
        d = h5[k].get(date)[:]
        timeseries[i,:] = d.flatten(0)
        prog_bar.update(i+1, suffix=date)
    prog_bar.close()
    del d
    h5.close()

    ##### Calculate 1st and 2nd temporal derivatives
    print("calculating temporal 1st derivative ... ")
    timeseries_1st = np.zeros((date_num-1,pixel_num),np.float32)
    for i in range(date_num-1):
        timeseries_1st[i][:] = timeseries[i+1][:] - timeseries[i][:]

    print("calculating temporal 2nd derivative")
    timeseries_2nd = np.zeros((date_num-2,pixel_num),np.float32)
    for i in range(date_num-2):
        timeseries_2nd[i][:] = timeseries_1st[i+1][:] - timeseries_1st[i][:]

    ##### Write 1st and 2nd temporal derivatives
    outfile1 = os.path.splitext(timeseries_file)[0]+'_1stDerivative.h5'
    print('writing >>> '+outfile1)
    h5out = h5py.File(outfile1, 'w')
    group = h5out.create_group(k)

    prog_bar = ptime.progress_bar(maxValue=date_num-1)
    for i in range(date_num-1):
        date = date_list[i+1]
        dset = group.create_dataset(date, data=np.reshape(timeseries_1st[i][:],[length,width]), compression='gzip')
        prog_bar.update(i+1, suffix=date)
    for key,value in atr.items():
        group.attrs[key] = value
    prog_bar.close()
    h5out.close()

    outfile2 = os.path.splitext(timeseries_file)[0]+'_2ndDerivative.h5'
    print('writing >>> '+outfile2)
    h5out = h5py.File(outfile2, 'w')
    group = h5out.create_group(k)

    prog_bar = ptime.progress_bar(maxValue=date_num-2)
    for i in range(date_num-2):
        date = date_list[i+2]
        dset = group.create_dataset(date, data=np.reshape(timeseries_2nd[i][:],[length,width]), compression='gzip')
        prog_bar.update(i+1, suffix=date)
    for key,value in atr.items():
        group.attrs[key] = value
    prog_bar.close()
    h5out.close()

    print('Done.')
    return outfile1, outfile2
示例#3
0
def modify_file_date12_list(File,
                            date12_to_rmv,
                            mark_attribute=False,
                            outFile=None):
    '''Update multiple group hdf5 file using date12 to remove
    Inputs:
        File          - multi_group HDF5 file, i.e. unwrapIfgram.h5, coherence.h5
        date12_to_rmv - list of string indicating interferograms in YYMMDD-YYMMDD format
        mark_attribute- bool, if True, change 'drop_ifgram' attribute only; otherwise, write
                        resutl to a new file
        outFile       - string, output file name
    Output:
        outFile       - string, output file name, if mark_attribute=True, outFile = File
    '''
    k = readfile.read_attribute(File)['FILE_TYPE']
    print(
        '----------------------------------------------------------------------------'
    )
    print('file: ' + File)

    if mark_attribute:
        print(
            "set drop_ifgram to 'yes' for all interferograms to remove, and 'no' for all the others."
        )
        h5 = h5py.File(File, 'r+')
        ifgram_list = sorted(h5[k].keys())
        for ifgram in ifgram_list:
            if h5[k][ifgram].attrs['DATE12'] in date12_to_rmv:
                h5[k][ifgram].attrs['drop_ifgram'] = 'yes'
            else:
                h5[k][ifgram].attrs['drop_ifgram'] = 'no'
        h5.close()
        outFile = File

    else:
        date12_orig = pnet.get_date12_list(File)
        date12_to_write = sorted(list(set(date12_orig) - set(date12_to_rmv)))
        print('number of interferograms in file      : ' +
              str(len(date12_orig)))
        print('number of interferograms to keep/write: ' +
              str(len(date12_to_write)))
        print('list   of interferograms to keep/write: ')
        print(date12_to_write)
        date12Num = len(date12_to_write)

        if not outFile:
            outFile = 'Modified_' + os.path.basename(File)
        print('writing >>> ' + outFile)
        h5out = h5py.File(outFile, 'w')
        gg = h5out.create_group(k)

        h5 = h5py.File(File, 'r')
        igramList = sorted(h5[k].keys())
        date12_list = ptime.list_ifgram2date12(igramList)
        prog_bar = ptime.progress_bar(maxValue=date12Num, prefix='writing: ')
        for i in range(date12Num):
            date12 = date12_to_write[i]
            idx = date12_orig.index(date12)
            igram = igramList[idx]

            data = h5[k][igram].get(igram)[:]
            group = gg.create_group(igram)
            dset = group.create_dataset(igram, data=data, compression='gzip')
            for key, value in h5[k][igram].attrs.items():
                group.attrs[key] = value
            group.attrs['drop_ifgram'] = 'no'
            prog_bar.update(i + 1, suffix=date12_list[i])
        prog_bar.close()
        h5.close()
        h5out.close()
        print('finished writing >>> ' + outFile)

    return outFile
示例#4
0
文件: subset.py 项目: Ovec8hkin/PySAR
def subset_file(File, subset_dict_input, outFile=None):
    '''Subset file with
    Inputs:
        File        : str, path/name of file
        outFile     : str, path/name of output file
        subset_dict : dict, subsut parameter, including the following items:
                      subset_x   : list of 2 int,   subset in x direction,   default=None
                      subset_y   : list of 2 int,   subset in y direction,   default=None
                      subset_lat : list of 2 float, subset in lat direction, default=None
                      subset_lon : list of 2 float, subset in lon direction, default=None
                      fill_value : float, optional. filled value for area outside of data coverage. default=None
                                   None/not-existed to subset within data coverage only.
                      tight  : bool, tight subset or not, for lookup table file, i.e. geomap*.trans
    Outputs:
        outFile :  str, path/name of output file; 
                   outFile = 'subset_'+File, if File is in current directory;
                   outFile = File, if File is not in the current directory.
    '''

    # Input File Info
    try:
        atr_dict = readfile.read_attribute(File)
    except:
        return None
    width = int(atr_dict['WIDTH'])
    length = int(atr_dict['FILE_LENGTH'])
    k = atr_dict['FILE_TYPE']
    print('subset ' + k + ' file: ' + File + ' ...')

    subset_dict = subset_dict_input.copy()
    # Read Subset Inputs into 4-tuple box in pixel and geo coord
    pix_box, geo_box = subset_input_dict2box(subset_dict, atr_dict)

    # if fill_value exists and not None, subset data and fill assigned value for area out of its coverage.
    # otherwise, re-check subset to make sure it's within data coverage and initialize the matrix with np.nan
    outfill = False
    if 'fill_value' in list(subset_dict.keys()) and subset_dict['fill_value']:
        outfill = True
    else:
        outfill = False
    if not outfill:
        pix_box = check_box_within_data_coverage(pix_box, atr_dict)
        subset_dict['fill_value'] = np.nan

    geo_box = box_pixel2geo(pix_box, atr_dict)
    data_box = (0, 0, width, length)
    print('data   range in y/x: ' + str(data_box))
    print('subset range in y/x: ' + str(pix_box))
    print('data   range in lat/lon: ' + str(box_pixel2geo(data_box, atr_dict)))
    print('subset range in lat/lon: ' + str(geo_box))

    if pix_box == data_box:
        print('Subset range == data coverage, no need to subset. Skip.')
        return File

    # Calculate Subset/Overlap Index
    pix_box4data, pix_box4subset = get_box_overlap_index(data_box, pix_box)

    ###########################  Data Read and Write  ######################
    # Output File Name
    if not outFile:
        if os.getcwd() == os.path.dirname(os.path.abspath(File)):
            if 'tight' in list(subset_dict.keys()) and subset_dict['tight']:
                outFile = os.path.splitext(
                    File)[0] + '_tight' + os.path.splitext(File)[1]
            else:
                outFile = 'subset_' + os.path.basename(File)
        else:
            outFile = os.path.basename(File)
    print('writing >>> ' + outFile)

    ##### Multiple Dataset File
    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        ##### Open Input File
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epochNum = len(epochList)
        if k in multi_dataset_hdf5_file:
            print('number of acquisitions: ' + str(epochNum))
        else:
            print('number of interferograms: ' + str(epochNum))

        ##### Open Output File
        h5out = h5py.File(outFile)
        group = h5out.create_group(k)
        prog_bar = ptime.progress_bar(maxValue=epochNum)

    ## Loop
    if k == 'timeseries':
        for i in range(epochNum):
            epoch = epochList[i]
            dset = h5file[k].get(epoch)
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)

        atr_dict = subset_attribute(atr_dict, pix_box)
        for key, value in atr_dict.items():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epochNum):
            epoch = epochList[i]
            dset = h5file[k][epoch].get(epoch)
            atr_dict = h5file[k][epoch].attrs
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            atr_dict = subset_attribute(atr_dict, pix_box, print_msg=False)
            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr_dict.items():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

    ##### Single Dataset File
    elif k in ['.jpeg', '.jpg', '.png', '.ras', '.bmp']:
        data, atr_dict = readfile.read(File, pix_box)
        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    elif k == '.trans':
        rg_overlap, az_overlap, atr_dict = readfile.read(File, pix_box4data)

        rg = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        rg[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = rg_overlap

        az = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        az[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = az_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(rg, az, atr_dict, outFile)
    else:
        data_overlap, atr_dict = readfile.read(File, pix_box4data)

        data = np.ones((pix_box[3] - pix_box[1],
                        pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        data[pix_box4subset[1]:pix_box4subset[3],
             pix_box4subset[0]:pix_box4subset[2]] = data_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    ##### End Cleaning
    try:
        prog_bar.close()
        h5file.close()
        h5out.close()
    except:
        pass

    return outFile
示例#5
0
def main(argv):
    inps = cmdLineParse()

    ##### Check default input arguments
    # default output filename
    if not inps.outfile:
        inps.outfile = os.path.splitext(
            inps.timeseries_file)[0] + '_tropHgt.h5'

    # Basic info
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    pix_num = length * width

    # default DEM file
    if not inps.dem_file:
        if 'X_FIRST' in list(atr.keys()):
            inps.dem_file = ['demGeo_tight.h5', 'demGeo.h5']
        else:
            inps.dem_file = ['demRadar.h5']
    try:
        inps.dem_file = ut.get_file_list(inps.dem_file)[0]
    except:
        inps.dem_file = None
        sys.exit('ERROR: No DEM file found!')

    # default Mask file
    if not inps.mask_file:
        if 'X_FIRST' in list(atr.keys()):
            inps.mask_file = 'geo_maskTempCoh.h5'
        else:
            inps.mask_file = 'maskTempCoh.h5'
        if not os.path.isfile(inps.mask_file):
            inps.mask_file = None
            sys.exit('ERROR: No mask file found!')

    ##### Read Mask
    print('reading mask from file: ' + inps.mask_file)
    mask = readfile.read(inps.mask_file)[0].flatten(1)
    ndx = mask != 0
    msk_num = np.sum(ndx)
    print('total            pixel number: %d' % pix_num)
    print('estimating using pixel number: %d' % msk_num)

    ##### Read DEM
    print('read DEM from file: ' + inps.dem_file)
    dem = readfile.read(inps.dem_file)[0]

    ref_y = int(atr['ref_y'])
    ref_x = int(atr['ref_x'])
    dem -= dem[ref_y, ref_x]

    print('considering the incidence angle of each pixel ...')
    inc_angle = ut.incidence_angle(atr, dimension=2)
    dem *= 1.0 / np.cos(inc_angle * np.pi / 180.0)

    ##### Design matrix for elevation v.s. phase
    dem = dem.flatten(1)
    if inps.poly_order == 1:
        A = np.vstack((dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem, np.ones(pix_num))).T
    elif inps.poly_order == 2:
        A = np.vstack((dem[ndx]**2, dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem**2, dem, np.ones(pix_num))).T
    elif inps.poly_order == 3:
        A = np.vstack((dem[ndx]**3, dem[ndx]**2, dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem**3, dem**2, dem, np.ones(pix_num))).T
    print('polynomial order: %d' % inps.poly_order)

    A_inv = np.linalg.pinv(A)

    ##### Calculate correlation coefficient
    print(
        'Estimating the tropospheric effect between the differences of the subsequent epochs and DEM'
    )

    h5 = h5py.File(inps.timeseries_file)
    date_list = sorted(h5[k].keys())
    date_num = len(date_list)
    print('number of acquisitions: ' + str(date_num))
    try:
        ref_date = atr['ref_date']
    except:
        ref_date = date_list[0]

    print('----------------------------------------------------------')
    print('correlation of DEM with each time-series epoch:')
    corr_array = np.zeros(date_num)
    par_dict = {}
    for i in range(date_num):
        date = date_list[i]
        if date == ref_date:
            cc = 0.0
            par = np.zeros(inps.poly_order + 1)
        else:
            data = h5[k].get(date)[:].flatten(1)

            C = np.zeros((2, msk_num))
            C[0, :] = dem[ndx]
            C[1, :] = data[ndx]
            cc = np.corrcoef(C)[0, 1]

            corr_array[i] = cc
            if inps.threshold and np.abs(cc) < inps.threshold:
                par = np.zeros(inps.poly_order + 1)
            else:
                par = np.dot(A_inv, data[ndx])
        print('%s: %.2f' % (date, cc))
        par_dict[date] = par

    average_phase_height_corr = np.nansum(np.abs(corr_array)) / (date_num - 1)
    print('----------------------------------------------------------')
    print('Average Correlation of DEM with time-series epochs: %.2f' %
          average_phase_height_corr)

    # Correlation of DEM with Difference of subsequent epochs (Not used for now)
    corr_diff_dict = {}
    par_diff_dict = {}
    for i in range(date_num - 1):
        date1 = date_list[i]
        date2 = date_list[i + 1]
        date12 = date1 + '-' + date2

        data1 = h5[k].get(date1)[:].flatten(1)
        data2 = h5[k].get(date2)[:].flatten(1)
        data_diff = data2 - data1

        C_diff = np.zeros((2, msk_num))
        C_diff[0, :] = dem[ndx]
        C_diff[1, :] = data_diff[ndx]
        cc_diff = np.corrcoef(C_diff)[0, 1]

        corr_diff_dict[date12] = cc_diff
        par = np.dot(A_inv, data_diff[ndx])
        par_diff_dict[date12] = par

    ##### Correct and write time-series file
    print('----------------------------------------------------------')
    print('removing the stratified tropospheric delay from each epoch')
    print('writing >>> ' + inps.outfile)
    h5out = h5py.File(inps.outfile, 'w')
    group = h5out.create_group(k)

    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        data = h5[k].get(date)[:]

        if date != ref_date:
            par = par_dict[date]
            trop_delay = np.reshape(np.dot(B, par), [width, length]).T
            trop_delay -= trop_delay[ref_y, ref_x]
            data -= trop_delay

        dset = group.create_dataset(date, data=data, compression='gzip')
        prog_bar.update(i + 1, suffix=date)

    for key, value in atr.items():
        group.attrs[key] = value

    prog_bar.close()
    h5out.close()
    h5.close()

    print('Done.')
    return inps.outfile
示例#6
0
def remove_surface(File, surf_type, maskFile=None, outFile=None, ysub=None):
    start = time.time()
    atr = readfile.read_attribute(File)

    # Output File Name
    if not outFile:
        outFile = os.path.splitext(
            File)[0] + '_' + surf_type + os.path.splitext(File)[1]

    if maskFile:
        Mask = readfile.read(maskFile)[0]
        print('read mask file: ' + maskFile)
    else:
        Mask = np.ones((int(atr['FILE_LENGTH']), int(atr['WIDTH'])))
        print('use mask of the whole area')

    ##### Input File Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print('Input file is ' + k)
    print('remove ramp type: ' + surf_type)

    ## Multiple Datasets File
    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5flat = h5py.File(outFile, 'w')
        group = h5flat.create_group(k)
        print('writing >>> ' + outFile)

    if k in ['timeseries']:
        print('number of acquisitions: ' + str(len(epochList)))
        for i in range(epoch_num):
            epoch = epochList[i]
            data = h5file[k].get(epoch)[:]

            if not ysub:
                data_n, ramp = remove_data_surface(data, Mask, surf_type)
            else:
                data_n = remove_data_multiple_surface(data, Mask, surf_type,
                                                      ysub)

            dset = group.create_dataset(epoch, data=data_n, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)
        for key, value in h5file[k].attrs.items():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print('number of interferograms: ' + str(len(epochList)))
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epoch_num):
            epoch = epochList[i]
            data = h5file[k][epoch].get(epoch)[:]

            if not ysub:
                data_n, ramp = remove_data_surface(data, Mask, surf_type)
            else:
                data_n = remove_data_multiple_surface(data, Mask, surf_type,
                                                      ysub)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data_n, compression='gzip')
            for key, value in h5file[k][epoch].attrs.items():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

    ## Single Dataset File
    else:
        data, atr = readfile.read(File)
        print('Removing ' + surf_type + ' from ' + k)

        if not ysub:
            data_n, ramp = remove_data_surface(data, Mask, surf_type)
        else:
            data_n = remove_data_multiple_surface(data, Mask, surf_type, ysub)

        print('writing >>> ' + outFile)
        writefile.write(data_n, atr, outFile)

    try:
        h5file.close()
        h5flat.close()
        prog_bar.close()
    except:
        pass

    print('Remove ' + surf_type + ' took ' + str(time.time() - start) +
          ' secs')
    return outFile
示例#7
0
def geocode_file_with_geo_lut(fname,
                              lut_file=None,
                              method='nearest',
                              fill_value=np.nan,
                              fname_out=None):
    '''Geocode file using ROI_PAC/Gamma lookup table file.
    Related module: scipy.interpolate.RegularGridInterpolator

    Inputs:
        fname      : string, file to be geocoded
        lut_file   : string, optional, lookup table file genereated by ROIPAC or Gamma
                     i.e. geomap_4rlks.trans           from ROI_PAC
                          sim_150911-150922.UTM_TO_RDC from Gamma
        method     : string, optional, interpolation/resampling method, supporting nearest, linear
        fill_value : value used for points outside of the interpolation domain.
                     If None, values outside the domain are extrapolated.
        fname_out  : string, optional, output geocoded filename
    Output:
        fname_out  : string, optional, output geocoded filename
    '''

    start = time.time()
    ## Default Inputs and outputs
    if not fname_out:
        fname_out = 'geo_' + fname

    # Default lookup table file:
    atr_rdr = readfile.read_attribute(fname)
    if not lut_file:
        if atr_rdr['INSAR_PROCESSOR'] == 'roipac':
            lut_file = ['geomap*lks_tight.trans', 'geomap*lks.trans']
        elif atr_rdr['INSAR_PROCESSOR'] == 'gamma':
            lut_file = ['sim*_tight.UTM_TO_RDC', 'sim*.UTM_TO_RDC']

    try:
        lut_file = ut.get_file_list(lut_file)[0]
    except:
        lut_file = None
    if not lut_file:
        sys.exit(
            'ERROR: No lookup table file found! Can not geocoded without it.')

    ## Original coordinates: row/column number in radar file
    print('------------------------------------------------------')
    print('geocoding file: ' + fname)
    len_rdr = int(atr_rdr['FILE_LENGTH'])
    wid_rdr = int(atr_rdr['WIDTH'])
    pts_rdr = (np.arange(len_rdr), np.arange(wid_rdr))

    ## New coordinates: data value in lookup table
    print('reading lookup table file: ' + lut_file)
    rg, az, atr_lut = readfile.read(lut_file)
    len_geo = int(atr_lut['FILE_LENGTH'])
    wid_geo = int(atr_lut['WIDTH'])

    # adjustment if input radar file has been subseted.
    if 'subset_x0' in list(atr_rdr.keys()):
        x0 = float(atr_rdr['subset_x0'])
        y0 = float(atr_rdr['subset_y0'])
        rg -= x0
        az -= y0
        print(
            '\tinput radar coord file has been subsetted, adjust lookup table value'
        )

    # extract pixels only available in radar file (get ride of invalid corners)
    idx = (az > 0.0) * (az <= len_rdr) * (rg > 0.0) * (rg <= wid_rdr)
    pts_geo = np.hstack((az[idx].reshape(-1, 1), rg[idx].reshape(-1, 1)))
    del az, rg

    print('geocoding using scipy.interpolate.RegularGridInterpolator ...')
    data_geo = np.empty((len_geo, wid_geo)) * fill_value
    k = atr_rdr['FILE_TYPE']
    ##### Multiple Dataset File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(fname, 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)
        print('writing >>> ' + fname_out)

        if k == 'timeseries':
            print('number of acquisitions: ' + str(epoch_num))
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]
                RGI_func = RGI(pts_rdr,
                               data,
                               method,
                               bounds_error=False,
                               fill_value=fill_value)

                data_geo.fill(fill_value)
                data_geo[idx] = RGI_func(pts_geo)

                dset = group.create_dataset(date,
                                            data=data_geo,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=date)
            prog_bar.close()

            print('update attributes')
            atr = geocode_attribute_with_geo_lut(atr_rdr, atr_lut)
            for key, value in atr.items():
                group.attrs[key] = value

        elif k in ['interferograms', 'wrapped', 'coherence']:
            print('number of interferograms: ' + str(epoch_num))
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]
                RGI_func = RGI(pts_rdr,
                               data,
                               method,
                               bounds_error=False,
                               fill_value=fill_value)

                data_geo.fill(fill_value)
                data_geo[idx] = RGI_func(pts_geo)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram,
                                         data=data_geo,
                                         compression='gzip')

                atr = geocode_attribute_with_geo_lut(h5[k][ifgram].attrs,
                                                     atr_lut,
                                                     print_msg=False)
                for key, value in atr.items():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print('reading ' + fname)
        data = readfile.read(fname)[0]
        RGI_func = RGI(pts_rdr,
                       data,
                       method,
                       bounds_error=False,
                       fill_value=fill_value)

        data_geo.fill(fill_value)
        data_geo[idx] = RGI_func(pts_geo)

        print('update attributes')
        atr = geocode_attribute_with_geo_lut(atr_rdr, atr_lut)

        print('writing >>> ' + fname_out)
        writefile.write(data_geo, atr, fname_out)

    del data_geo
    s = time.time() - start
    m, s = divmod(s, 60)
    h, m = divmod(m, 60)
    print('Time used: %02d hours %02d mins %02d secs' % (h, m, s))
    return fname_out
示例#8
0
def unwrap_error_correction_bridging(ifgram_file, mask_file, y_list, x_list, ramp_type='plane',\
                                     ifgram_cor_file=None, save_cor_deramp_file=False):
    '''Unwrapping error correction with bridging.
    Inputs:
        ifgram_file : string, name/path of interferogram(s) to be corrected
        mask_file   : string, name/path of mask file to mark different patches 
        y/x_list    : list of int, bonding points in y/x 
        ifgram_cor_file : string, optional, output file name
        save_cor_deramp_file : bool, optional
    Output:
        ifgram_cor_file
    Example:
        y_list = [235, 270, 350, 390]
        x_list = [880, 890, 1200, 1270]
        unwrap_error_correction_bridging('unwrapIfgram.h5', 'mask_all.h5', y_list, x_list, 'quadratic')
    '''
    ##### Mask and Ramp
    mask = readfile.read(mask_file)[0]
    ramp_mask = mask == 1
    print('estimate phase ramp during the correction')
    print('ramp type: '+ramp_type)

    ##### Bridge Info
    # Check
    for i in range(len(x_list)):
        if mask[y_list[i],x_list[i]] == 0:
            print('\nERROR: Connecting point (%d,%d) is out of masked area! Select them again!\n' % (y_list[i],x_list[i]))
            sys.exit(1)
    print('Number of bridges: '+str(len(x_list)/2))
    print('Bonding points coordinates:\nx: '+str(x_list)+'\ny: '+str(y_list))

    # Plot Connecting Pair of Points
    plot_bonding_points = False
    if plot_bonding_points:
        point_yx = ''
        line_yx  = ''
        n_bridge = len(x)/2
        for i in range(n_bridge):
            pair_yx = str(y[2*i])+','+str(x[2*i])+','+str(y[2*i+1])+','+str(x[2*i+1])
            if not i == n_bridge-1:
                point_yx += pair_yx+','
                line_yx  += pair_yx+';'
            else:
                point_yx += pair_yx
                line_yx  += pair_yx

        try:
            plot_cmd = 'view.py --point-yx="'+point_yx+'" --line-yx="'+line_yx+\
                       '" --nodisplay -o bonding_points.png -f '+maskFile
            print(plot_cmd)
            os.system(plot_cmd)
        except: pass

    # Basic info
    ext = os.path.splitext(ifgram_file)[1]
    atr = readfile.read_attribute(ifgram_file)
    k = atr['FILE_TYPE']

    try:
        ref_y = int(atr['ref_y'])
        ref_x = int(atr['ref_x'])
    except:
        sys.exit('ERROR: Can not find ref_y/x value, input file is not referenced in space!')

    # output file name
    if not ifgram_cor_file:
        ifgram_cor_file = os.path.splitext(ifgram_file)[0]+'_unwCor'+ext
    ifgram_cor_deramp_file = os.path.splitext(ifgram_cor_file)[0]+'_'+ramp_type+ext

    ##### HDF5 file
    if ext == '.h5':
        ##### Read
        h5 = h5py.File(ifgram_file,'r')
        ifgram_list = sorted(h5[k].keys())
        ifgram_num = len(ifgram_list)

        h5out = h5py.File(ifgram_cor_file,'w')
        group = h5out.create_group(k)
        print('writing >>> '+ifgram_cor_file)

        if save_cor_deramp_file:
            h5out_deramp = h5py.File(ifgram_cor_deramp_file,'w')
            group_deramp = h5out_deramp.create_group(k)
            print('writing >>> '+ifgram_cor_deramp_file)

        ##### Loop
        print('Number of interferograms: '+str(ifgram_num))
        prog_bar = ptime.progress_bar(maxValue=ifgram_num)
        date12_list = ptime.list_ifgram2date12(ifgram_list)
        for i in range(ifgram_num):
            ifgram = ifgram_list[i]
            data = h5[k][ifgram].get(ifgram)[:]
            data -= data[ref_y, ref_x]

            data_deramp, ramp = rm.remove_data_surface(data, ramp_mask, ramp_type)
            data_derampCor = bridging_data(data_deramp, mask, x_list, y_list)

            gg = group.create_group(ifgram)
            dset = gg.create_dataset(ifgram, data=data_derampCor-ramp, compression='gzip')
            for key, value in h5[k][ifgram].attrs.items():
                gg.attrs[key]=value

            if save_cor_deramp_file:
                gg_deramp = group_deramp.create_group(ifgram)
                dset = gg_deramp.create_dataset(ifgram, data=data_derampCor, compression='gzip')
                for key, value in h5[k][ifgram].attrs.items():
                    gg_deramp.attrs[key]=value
            prog_bar.update(i+1, suffix=date12_list[i])

        prog_bar.close()
        h5.close()
        h5out.close()
        try: h5out_deramp.close()
        except: pass

    #### .unw file
    elif ext == '.unw':
        print('read '+ifgram_file)
        data = readfile.read(ifgram_file)[0]
        data -= data[ref_y, ref_x]

        data_deramp,ramp = rm.remove_data_surface(data,ramp_mask,ramp_type)
        data_derampCor = bridging_data(data_deramp,mask,x_list,y_list)
        data_cor = data_derampCor - ramp

        print('writing >>> '+ifgram_cor_file)
        ifgram_cor_file        = writefile.write(data_cor,       atr, ifgram_cor_file)
        if save_cor_deramp_file:
            print('writing >>> '+ifgram_cor_deramp_file)
            ifgram_cor_deramp_file = writefile.write(data_derampCor, atr, ifgram_cor_deramp_file)

    else:
        sys.exit('Un-supported file type: '+ext)

    return ifgram_cor_file, ifgram_cor_deramp_file
示例#9
0
def main(argv):

    ##### Inputs
    try:
        ifgram_file = argv[0]
        timeseries_file = argv[1]
    except:
        usage()
        sys.exit(1)

    try:
        outfile = argv[2]
    except:
        outfile = 'reconstructed_' + ifgram_file

    atr = readfile.read_attribute(timeseries_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    ##### Read time-series file
    print('loading timeseries ...')
    h5ts = h5py.File(timeseries_file, 'r')
    date_list = sorted(h5ts['timeseries'].keys())
    date_num = len(date_list)
    timeseries = np.zeros((date_num, length * width))

    print('number of acquisitions: ' + str(date_num))
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        d = h5ts['timeseries'].get(date)[:]
        timeseries[i, :] = d.flatten(0)
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    h5ts.close()
    del d

    range2phase = -4 * np.pi / float(atr['WAVELENGTH'])
    timeseries = range2phase * timeseries

    #####  Estimate interferograms from timeseries
    print(
        'estimating interferograms from timeseries using design matrix from input interferograms'
    )
    A, B = ut.design_matrix(ifgram_file)
    p = -1 * np.ones([A.shape[0], 1])
    Ap = np.hstack((p, A))
    estData = np.dot(Ap, timeseries)
    del timeseries

    ##### Write interferograms file
    print('writing >>> ' + outfile)
    h5 = h5py.File(ifgram_file, 'r')
    ifgram_list = sorted(h5['interferograms'].keys())
    ifgram_num = len(ifgram_list)
    date12_list = ptime.list_ifgram2date12(ifgram_list)

    h5out = h5py.File(outfile, 'w')
    group = h5out.create_group('interferograms')

    print('number of interferograms: ' + str(ifgram_num))
    prog_bar = ptime.progress_bar(maxValue=ifgram_num)
    for i in range(ifgram_num):
        ifgram = ifgram_list[i]
        data = np.reshape(estData[i, :], (length, width))

        gg = group.create_group(ifgram)
        dset = gg.create_dataset(ifgram, data=data, compression='gzip')
        for key, value in h5['interferograms'][ifgram].attrs.items():
            gg.attrs[key] = value
        prog_bar.update(i + 1, suffix=date12_list[i])
    prog_bar.close()
    h5.close()
    h5out.close()
    print('Done.')
    return outfile
示例#10
0
def multilook_file(infile, lks_y, lks_x, outfile=None):
    lks_y = int(lks_y)
    lks_x = int(lks_x)

    ## input file info
    atr = readfile.read_attribute(infile)
    k = atr['FILE_TYPE']
    print('multilooking ' + k + ' file ' + infile)
    print('number of looks in y / azimuth direction: %d' % lks_y)
    print('number of looks in x / range   direction: %d' % lks_x)

    ## output file name
    if not outfile:
        if os.getcwd() == os.path.dirname(os.path.abspath(infile)):
            ext = os.path.splitext(infile)[1]
            outfile = os.path.splitext(infile)[0] + '_' + str(
                lks_y) + 'alks_' + str(lks_x) + 'rlks' + ext
        else:
            outfile = os.path.basename(infile)
    print('writing >>> ' + outfile)

    ###############################################################################
    ## Read/Write multi-dataset files
    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5 = h5py.File(infile, 'r')
        epochList = sorted(h5[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(outfile, 'w')
        group = h5out.create_group(k)

        if k in ['interferograms', 'coherence', 'wrapped']:
            date12_list = ptime.list_ifgram2date12(epochList)
            print('number of interferograms: ' + str(len(epochList)))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k][epoch].get(epoch)[:]
                atr = h5[k][epoch].attrs

                data_mli = multilook_matrix(data, lks_y, lks_x)
                atr_mli = multilook_attribute(atr,
                                              lks_y,
                                              lks_x,
                                              print_msg=False)

                gg = group.create_group(epoch)
                dset = gg.create_dataset(epoch,
                                         data=data_mli,
                                         compression='gzip')
                for key, value in atr_mli.items():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])

        elif k == 'timeseries':
            print('number of acquisitions: ' + str(len(epochList)))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k].get(epoch)[:]

                data_mli = multilook_matrix(data, lks_y, lks_x)

                dset = group.create_dataset(epoch,
                                            data=data_mli,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=epoch)
            atr = h5[k].attrs
            atr_mli = multilook_attribute(atr, lks_y, lks_x)
            for key, value in atr_mli.items():
                group.attrs[key] = value

        h5.close()
        h5out.close()
        prog_bar.close()

    ## Read/Write single-dataset files
    elif k == '.trans':
        rg, az, atr = readfile.read(infile)
        rgmli = multilook_matrix(rg, lks_y, lks_x)
        #rgmli *= 1.0/lks_x
        azmli = multilook_matrix(az, lks_y, lks_x)
        #azmli *= 1.0/lks_y
        atr = multilook_attribute(atr, lks_y, lks_x)
        writefile.write(rgmli, azmli, atr, outfile)
    else:
        data, atr = readfile.read(infile)
        data_mli = multilook_matrix(data, lks_y, lks_x)
        atr = multilook_attribute(atr, lks_y, lks_x)
        writefile.write(data_mli, atr, outfile)

    return outfile
示例#11
0
def filter_file(fname, filter_type, filter_par=None, fname_out=None):
    '''Filter 2D matrix with selected filter
    Inputs:
        fname       : string, name/path of file to be filtered
        filter_type : string, filter type
        filter_par  : string, optional, parameter for low/high pass filter
                      for low/highpass_avg, it's kernel size in int
                      for low/highpass_gaussain, it's sigma in float
    Output:
        fname_out   : string, optional, output file name/path
    '''

    # Basic info
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    try:
        ref_yx = [int(atr['ref_y']), int(atr['ref_x'])]
    except:
        ref_yx = None

    filter_type = filter_type.lower()
    MSG = 'filtering ' + k + ' file: ' + fname + ' using ' + filter_type + ' filter'
    if filter_type.endswith('avg'):
        if not filter_par:
            filter_par = 5
        MSG += ' with kernel size of %d' % int(filter_par)
    elif filter_type.endswith('gaussian'):
        if not filter_par:
            filter_par = 3.0
        MSG += ' with sigma of %.1f' % filter_par
    print(MSG)

    if not fname_out:
        ext = os.path.splitext(fname)[1]
        fname_out = os.path.splitext(fname)[0] + '_' + filter_type + ext

    ##### Multiple Dataset File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(fname, 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)
        print('writing >>> ' + fname_out)

        if k == 'timeseries':
            print('number of acquisitions: ' + str(epoch_num))
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_filt = filter_data(data, filter_type, filter_par)
                if ref_yx:
                    data_filt -= data_filt[ref_yx[0], ref_yx[1]]

                dset = group.create_dataset(date,
                                            data=data_filt,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=date)
            for key, value in atr.items():
                group.attrs[key] = value

        elif k in ['interferograms', 'wrapped', 'coherence']:
            print('number of interferograms: ' + str(epoch_num))
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_filt = filter_data(data, filter_type, filter_par)
                if ref_yx and k in ['interferograms']:
                    data_filt -= data_filt[ref_yx[0], ref_yx[1]]

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram,
                                         data=data_filt,
                                         compression='gzip')
                for key, value in h5[k][ifgram].attrs.items():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])

        h5.close()
        h5out.close()
        prog_bar.close()

    ##### Single Dataset File
    else:
        data, atr = readfile.read(fname)
        data_filt = filter_data(data, filter_type, filter_par)
        if ref_yx and k in ['.unw', 'velocity']:
            data_filt -= data_filt[ref_yx[0], ref_yx[1]]
        print('writing >>> ' + fname_out)
        writefile.write(data_filt, atr, fname_out)

    return fname_out
示例#12
0
def main(argv):
    inps = cmdLineParse()
    suffix = '_demErr'
    if not inps.outfile:
        inps.outfile = os.path.splitext(
            inps.timeseries_file)[0] + suffix + os.path.splitext(
                inps.timeseries_file)[1]

    # 1. template_file
    if inps.template_file:
        print('read option from template file: ' + inps.template_file)
        inps = read_template2inps(inps.template_file, inps)

    # Read Time Series
    print("loading time series: " + inps.timeseries_file)
    atr = readfile.read_attribute(inps.timeseries_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    h5 = h5py.File(inps.timeseries_file)
    date_list = sorted(h5['timeseries'].keys())
    date_num = len(date_list)
    print('number of acquisitions: ' + str(date_num))

    # Exclude date info
    #inps.ex_date = ['20070115','20100310']
    if inps.ex_date:
        inps = get_exclude_date(inps, date_list)
        if inps.ex_date:
            inps.ex_flag = np.array([i not in inps.ex_date for i in date_list])

    timeseries = np.zeros((len(date_list), length * width), np.float32)
    prog_bar = ptime.progress_bar(maxValue=date_num, prefix='loading: ')
    for i in range(date_num):
        date = date_list[i]
        d = h5['timeseries'].get(date)[:]
        timeseries[i][:] = d.flatten('F')
        prog_bar.update(i + 1, suffix=date)
    del d
    h5.close()
    prog_bar.close()

    # Perpendicular Baseline
    print('read perpendicular baseline')
    try:
        inps.pbase = ut.perp_baseline_timeseries(atr, dimension=0)
        if inps.pbase.shape[1] > 1:
            print('\tconsider P_BASELINE variation in azimuth direction')
        else:
            pbase = inps.pbase
    except:
        print('\tCannot find P_BASELINE_TIMESERIES from timeseries file.')
        print('\tTrying to calculate it from interferograms file')
        if inps.ifgram_file:
            inps.pbase = np.array(
                ut.perp_baseline_ifgram2timeseries(
                    inps.ifgram_file)[0]).reshape(date_num, 1)
        else:
            message = 'No interferogram file input!\n'+\
                      'Can not correct for DEM residula without perpendicular base info!'
            raise Exception(message)

    # Temporal Baseline
    print('read temporal baseline')
    inps.tbase = np.array(ptime.date_list2tbase(date_list)[0]).reshape(
        date_num, 1)

    # Incidence angle (look angle in the paper)
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            print('reading incidence angle from file: ' + inps.incidence_angle)
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            try:
                inps.incidence_angle = np.array(float(inps.incidence_angle))
                print('use input incidence angle : ' +
                      str(inps.incidence_angle))
            except:
                raise ValueError('Can not read input incidence angle: ' +
                                 str(inps.incidence_angle))
    else:
        print('calculate incidence angle using attributes of time series file')
        if inps.pbase.shape[1] > 1:
            inps.incidence_angle = ut.incidence_angle(atr, dimension=2)
        else:
            inps.incidence_angle = ut.incidence_angle(atr, dimension=1)
    inps.incidence_angle *= np.pi / 180.0

    # Range distance
    if inps.range_dis:
        if os.path.isfile(inps.range_dis):
            print('reading range distance from file: ' + inps.range_dis)
            inps.range_dis = readfile.read(inps.range_dis)[0]
        else:
            try:
                inps.range_dis = np.array(float(inps.range_dis))
                print('use input range distance : ' + str(inps.range_dis))
            except:
                raise ValueError('Can not read input incidence angle: ' +
                                 str(inps.range_dis))
    else:
        print(
            'calculate range distance using attributes from time series file')
        if inps.pbase.shape[1] > 1:
            inps.range_dis = ut.range_distance(atr, dimension=2)
        else:
            inps.range_dis = ut.range_distance(atr, dimension=1)

    # Design matrix - temporal deformation model using tbase
    print('-------------------------------------------------')
    if inps.phase_velocity:
        print('using phase velocity history')
        A1 = np.ones((date_num - 1, 1))
        A2 = (inps.tbase[1:date_num] + inps.tbase[0:date_num - 1]) / 2.0
        A3 = (inps.tbase[1:date_num]**3 - inps.tbase[0:date_num - 1]**
              3) / np.diff(inps.tbase, axis=0) / 6.0
        #A3 = (inps.tbase[1:date_num]**2 + inps.tbase[1:date_num]*inps.tbase[0:date_num-1] +\
        #      inps.tbase[0:date_num-1]**2) / 6.0
    else:
        print('using phase history')
        A1 = np.hstack((np.ones((date_num, 1)), inps.tbase))
        A2 = inps.tbase**2 / 2.0
        A3 = inps.tbase**3 / 6.0

    # Polynomial order of model
    print("temporal deformation model's polynomial order = " +
          str(inps.poly_order))
    if inps.poly_order == 1: A_def = A1
    elif inps.poly_order == 2: A_def = np.hstack((A1, A2))
    elif inps.poly_order == 3: A_def = np.hstack((A1, A2, A3))

    # step function
    if inps.step_date:
        print("temporal deformation model's step function step at " +
              inps.step_date)
        step_yy = ptime.yyyymmdd2years(inps.step_date)
        yy_list = ptime.yyyymmdd2years(date_list)
        flag_array = np.array(yy_list) >= step_yy
        A_step = np.zeros((date_num, 1))
        A_step[flag_array] = 1.0
        A_def = np.hstack((A_def, A_step))

    # Heresh's original code for phase history approach
    #A_def = np.hstack((A2,A1,np.ones((date_num,1))))
    print('-------------------------------------------------')

    ##---------------------------------------- Loop for L2-norm inversion  -----------------------------------##
    delta_z_mat = np.zeros([length, width], dtype=np.float32)
    resid_n = np.zeros([A_def.shape[0], length * width], dtype=np.float32)
    constC = np.zeros([length, width], dtype=np.float32)
    #delta_a_mat = np.zeros([length, width])
    if inps.incidence_angle.ndim == 2 and inps.range_dis.ndim == 2:
        print('inversing using L2-norm minimization (unweighted least squares)'\
              ' pixel by pixel: %d loops in total' % (length*width))
        prog_bar = ptime.progress_bar(maxValue=length * width,
                                      prefix='calculating: ')
        for i in range(length * width):
            row = i % length
            col = i / length
            range_dis = inps.range_dis[row, col]
            inc_angle = inps.incidence_angle[row, col]
            # Consider P_BASELINE variation within one interferogram
            if inps.pbase.shape[1] > 1:
                pbase = inps.pbase[:, row].reshape(date_num, 1)

            # Design matrix - DEM error using pbase, range distance and incidence angle
            A_delta_z = pbase / (range_dis * np.sin(inc_angle))
            if inps.phase_velocity:
                pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
                A_delta_z_v = pbase_v / (range_dis * np.sin(inc_angle))
                A = np.hstack((A_delta_z_v, A_def))
            else:
                A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

            # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
            ts_dis = timeseries[:, i]
            if inps.phase_velocity:
                ts_dis = np.diff(ts_dis, axis=0) / np.diff(inps.tbase, axis=0)

            if inps.ex_date:
                X = np.dot(A_inv, ts_dis[inps.ex_flag])
            else:
                X = np.dot(A_inv, ts_dis)

            # Residual vector n
            resid_n[:, i] = ts_dis - np.dot(A, X)

            # Update DEM error / timeseries matrix
            delta_z = X[0]
            delta_z_mat[row, col] = delta_z
            if inps.update_timeseries:
                timeseries[:, i] -= np.dot(A_delta_z, delta_z).flatten()
            prog_bar.update(i + 1, every=length * width / 100)
        prog_bar.close()

    elif inps.incidence_angle.ndim == 1 and inps.range_dis.ndim == 1:
        print('inversing using L2-norm minimization (unweighted least squares)'\
              ' column by column: %d loops in total' % (width))
        prog_bar = ptime.progress_bar(maxValue=width, prefix='calculating: ')
        for i in range(width):
            range_dis = inps.range_dis[i]
            inc_angle = inps.incidence_angle[i]

            # Design matrix - DEM error using pbase, range distance and incidence angle
            A_delta_z = pbase / (range_dis * np.sin(inc_angle))
            if inps.phase_velocity:
                pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
                A_delta_z_v = pbase_v / (range_dis * np.sin(inc_angle))
                A = np.hstack((A_delta_z_v, A_def))
            else:
                A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

            # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
            ts_dis = timeseries[:, i * length:(i + 1) * length]
            if inps.phase_velocity:
                ts_dis = np.diff(ts_dis, axis=0) / np.diff(inps.tbase, axis=0)

            if inps.ex_date:
                X = np.dot(A_inv, ts_dis[inps.ex_flag, :])
            else:
                X = np.dot(A_inv, ts_dis)

            # Residual vector n
            resid_n[:, i * length:(i + 1) * length] = ts_dis - np.dot(A, X)
            constC[:, i] = X[1].reshape((1, length))

            # Update DEM error / timeseries matrix
            delta_z = X[0].reshape((1, length))
            delta_z_mat[:, i] = delta_z
            if inps.update_timeseries:
                timeseries[:, i * length:(i + 1) * length] -= np.dot(
                    A_delta_z, delta_z)
            prog_bar.update(i + 1, every=width / 100)
        prog_bar.close()

    elif inps.incidence_angle.ndim == 0 and inps.range_dis.ndim == 0:
        print(
            'inversing using L2-norm minimization (unweighted least squares) for the whole area'
        )

        # Design matrix - DEM error using pbase, range distance and incidence angle
        A_delta_z = pbase / (inps.range_dis * np.sin(inps.incidence_angle))
        if inps.phase_velocity:
            pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
            A_delta_z_v = pbase_v / (inps.range_dis *
                                     np.sin(inps.incidence_angle))
            A = np.hstack((A_delta_z_v, A_def))
        else:
            A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

        # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
        if inps.phase_velocity:
            timeseries = np.diff(timeseries, axis=0) / np.diff(inps.tbase,
                                                               axis=0)

        if inps.ex_date:
            X = np.dot(A_inv, timeseries[inps.ex_flag, :])
        else:
            X = np.dot(A_inv, timeseries)

        # Residual vector n
        resid_n = ts_dis - np.dot(A, X)

        # Update DEM error / timeseries matrix
        delta_z_mat = X[0].reshape((1, length * width))
        if inps.update_timeseries:
            timeseries -= np.dot(A_delta_z, delta_z_mat)
        delta_z_mat = np.reshape(delta_z_mat, [length, width], order='F')

    else:
        print(
            'ERROR: Script only support same dimension for both incidence angle and range distance matrix.'
        )
        print('dimension of incidence angle: ' +
              str(inps.incidence_angle.ndim))
        print('dimension of range distance: ' + str(inps.range_dis.ndim))
        sys.exit(1)

    ##------------------------------------------------ Output  --------------------------------------------##
    # DEM error file
    if 'Y_FIRST' in list(atr.keys()):
        dem_error_file = 'demGeo_error.h5'
    else:
        dem_error_file = 'demRadar_error.h5'
    #if inps.phase_velocity:  suffix = '_pha_poly'+str(inps.poly_order)
    #else:                    suffix = '_vel_poly'+str(inps.poly_order)
    #dem_error_file = os.path.splitext(dem_error_file)[0]+suffix+os.path.splitext(dem_error_file)[1]
    print('writing >>> ' + dem_error_file)
    atr_dem_error = atr.copy()
    atr_dem_error['FILE_TYPE'] = 'dem'
    atr_dem_error['UNIT'] = 'm'
    writefile.write(delta_z_mat, atr_dem_error, dem_error_file)

    ## Phase Constant C = resid_n[0,:]
    #atrC = atr.copy()
    #atrC['FILE_TYPE'] = 'mask'
    #atrC['UNIT'] = 'm'
    #writefile.write(constC, atrC, 'constD.h5')

    ## Corrected DEM file
    #if inps.dem_file:
    #    inps.dem_outfile = os.path.splitext(inps.dem_file)[0]+suffix+os.path.splitext(inps.dem_file)[1]
    #    print '--------------------------------------'
    #    print 'writing >>> '+inps.dem_outfile
    #    dem, atr_dem = readfile.read(inps.dem_file)
    #    writefile.write(dem+delta_z_mat, atr_dem, inps.dem_outfile)

    #outfile = 'delta_acc.h5'
    #print 'writing >>> '+outfile
    #atr_dem_error = atr.copy()
    #atr_dem_error['FILE_TYPE'] = 'velocity'
    #atr_dem_error['UNIT'] = 'm/s'
    #writefile.write(delta_a_mat, atr_dem_error, outfile)
    #print '**************************************'

    # Corrected Time Series
    if inps.update_timeseries:
        print('writing >>> ' + inps.outfile)
        print('number of dates: ' + str(len(date_list)))
        h5out = h5py.File(inps.outfile, 'w')
        group = h5out.create_group('timeseries')
        prog_bar = ptime.progress_bar(maxValue=date_num, prefix='writing: ')
        for i in range(date_num):
            date = date_list[i]
            d = np.reshape(timeseries[i][:], [length, width], order='F')
            dset = group.create_dataset(date, data=d, compression='gzip')
            prog_bar.update(i + 1, suffix=date)
        prog_bar.close()
        for key, value in atr.items():
            group.attrs[key] = value
        h5out.close()

    outFile = os.path.splitext(inps.outfile)[0] + 'InvResid.h5'
    print('writing >>> ' + outFile)
    print('number of dates: ' + str(A_def.shape[0]))
    h5out = h5py.File(outFile, 'w')
    group = h5out.create_group('timeseries')
    prog_bar = ptime.progress_bar(maxValue=A_def.shape[0], prefix='writing: ')
    for i in range(A_def.shape[0]):
        date = date_list[i]
        d = np.reshape(resid_n[i][:], [length, width], order='F')
        dset = group.create_dataset(date, data=d, compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    # Attribute
    for key, value in atr.items():
        group.attrs[key] = value
    if A_def.shape[0] == date_num:
        group.attrs['UNIT'] = 'm'
    else:
        group.attrs['UNIT'] = 'm/yr'
    h5out.close()

    return
示例#13
0
def main(argv):
    try:
        timeseriesFile = argv[0]
    except:
        usage()
        sys.exit(1)

    try:
        outname = argv[1]
    except:
        outname = 'sum_' + timeseriesFile

    ##### Read Timeseries
    atr = readfile.read_attribute(timeseriesFile)
    k = atr['FILE_TYPE']
    print("loading time series: " + timeseriesFile)
    h5timeseries = h5py.File(timeseriesFile)
    dateList = sorted(h5timeseries['timeseries'].keys())
    date_num = len(dateList)
    print('number of acquisitions: %d' % date_num)

    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    D = np.zeros((date_num, length * width), np.float32)

    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = dateList[i]
        d = h5timeseries['timeseries'].get(date)[:]
        D[i][:] = d.flatten(0)
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    h5timeseries.close()

    ##### Calculate Sum
    print('calculating epochs sum ...')
    sumD = np.zeros(D.shape)
    prog_bar.reset()
    for i in range(date_num):
        sumD[j, :] = np.sum(np.abs(D - D[j, :]), 0) / date_num
        prog_bar.update(i + 1)
    prog_bar.close()

    ## Normalize to 0 and 1
    ## with high atmosphere equal to 0 and no atmosphere equal to 1
    sumD -= np.max(sumD, 0)
    sumD *= -1
    sumD /= np.max(sumD, 0)
    sumD[np.isnan(sumD)] = 1

    ##### Write sum epochs file
    print('writing to >>> ' + outname)
    h5sum = h5py.File(outname, 'w')
    group = h5sum.create_group('timeseries')
    prog_bar.reset()
    for i in range(date_num):
        date = dateList[i]
        d = np.reshape(sumD[i][:], [length, width])
        dset = group.create_dataset(date, data=d, compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()

    for key, value in atr.items():
        group.attrs[key] = value
    h5sum.close()
    print('Done.')
示例#14
0
def file_operation(fname, operator, operand, fname_out=None):
    '''Mathmathic operation of file'''

    # Basic Info
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    print('input is '+k+' file: '+fname)
    print('operation: file %s %f' % (operator, operand))

    # default output filename
    if not fname_out:
        if   operator in ['+','plus',  'add',      'addition']:        suffix = 'plus'
        elif operator in ['-','minus', 'substract','substraction']:    suffix = 'minus'
        elif operator in ['*','times', 'multiply', 'multiplication']:  suffix = 'multiply'
        elif operator in ['/','obelus','divide',   'division']:        suffix = 'divide'
        elif operator in ['^','pow','power']:                          suffix = 'pow'
        ext = os.path.splitext(fname)[1]
        fname_out = os.path.splitext(fname)[0]+'_'+suffix+str(operand)+ext

    ##### Multiple Dataset HDF5 File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print('writing >>> '+fname_out)

        if k == 'timeseries':
            print('number of acquisitions: '+str(epoch_num))
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_out = data_operation(data, operator, operand)

                dset = group.create_dataset(date, data=data_out, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            for key,value in atr.items():
                group.attrs[key] = value

        elif k in ['interferograms','wrapped','coherence']:
            print('number of interferograms: '+str(epoch_num))
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_out = data_operation(data, operator, operand)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_out, compression='gzip')
                for key, value in h5[k][ifgram].attrs.items():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])

        h5.close()
        h5out.close()
        prog_bar.close()

    ##### Duo datasets non-HDF5 File
    elif k in ['.trans']:
        rg, az, atr = readfile.read(fname)
        rg_out = data_operation(rg, operator, operand)
        az_out = data_operation(az, operator, operand)
        print('writing >>> '+fname_out)
        writefile.write(rg_out, az_out, atr, fname_out)

    ##### Single Dataset File
    else:
        data, atr = readfile.read(fname)
        data_out = data_operation(data, operator, operand)
        print('writing >>> '+fname_out)
        writefile.write(data_out, atr, fname_out)

    return fname_out
示例#15
0
def main(argv):
    inps = cmdLineParse()

    #print '\n********** Inversion: Time Series to Velocity ***********'
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    print('input ' + k + ' file: ' + inps.timeseries_file)
    if not k == 'timeseries':
        sys.exit('ERROR: input file is not timeseries!')
    h5file = h5py.File(inps.timeseries_file)

    #####################################
    ## Date Info
    dateListAll = sorted(h5file[k].keys())
    print('--------------------------------------------')
    print('Dates from input file: ' + str(len(dateListAll)))
    print(dateListAll)

    inps.ex_date = get_exclude_date(inps, dateListAll)

    dateList = sorted(list(set(dateListAll) - set(inps.ex_date)))
    print('--------------------------------------------')
    if len(dateList) == len(dateListAll):
        print('using all dates to calculate the velocity')
    else:
        print('Dates used to estimate the velocity: ' + str(len(dateList)))
        print(dateList)
    print('--------------------------------------------')

    # Date Aux Info
    dates, datevector = ptime.date_list2vector(dateList)

    #####################################
    ## Inversion
    # Design matrix
    B = np.ones([len(datevector), 2])
    B[:, 0] = datevector
    #B_inv = np.linalg.pinv(B)
    B_inv = np.dot(np.linalg.inv(np.dot(B.T, B)), B.T)
    B_inv = np.array(B_inv, np.float32)

    # Loading timeseries
    print("Loading time series file: " + inps.timeseries_file + ' ...')
    width = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])
    dateNum = len(dateList)
    timeseries = np.zeros([dateNum, length * width], np.float32)
    prog_bar = ptime.progress_bar(maxValue=dateNum, prefix='loading: ')
    for i in range(dateNum):
        date = dateList[i]
        timeseries[i, :] = h5file[k].get(date)[:].flatten()
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    h5file.close()

    # Velocity Inversion
    print('Calculating velocity ...')
    X = np.dot(B_inv, timeseries)
    velocity = np.reshape(X[0, :], [length, width])

    print('Calculating rmse ...')
    timeseries_linear = np.dot(B, X)
    timeseries_residual = timeseries - timeseries_linear
    rmse = np.reshape(np.sqrt((np.sum((timeseries_residual)**2, 0)) / dateNum),
                      [length, width])

    print('Calculating the standard deviation of the estimated velocity ...')
    s1 = np.sqrt(np.sum(timeseries_residual**2, 0) / (dateNum - 2))
    s2 = np.sqrt(np.sum((datevector - np.mean(datevector))**2))
    std = np.reshape(s1 / s2, [length, width])

    # SSt=np.sum((timeseries-np.mean(timeseries,0))**2,0)
    # SSres=np.sum(residual**2,0)
    # SS_REG=SSt-SSres
    # Rsquared=np.reshape(SS_REG/SSt,[length,width])
    ######################################################
    # covariance of the velocities

    #####################################
    # Output file name
    if not inps.outfile:
        inps.outfile = 'velocity.h5'

    inps.outfile_rmse = os.path.splitext(
        inps.outfile)[0] + 'Rmse' + os.path.splitext(inps.outfile)[1]
    inps.outfile_std = os.path.splitext(
        inps.outfile)[0] + 'Std' + os.path.splitext(inps.outfile)[1]
    inps.outfile_r2 = os.path.splitext(
        inps.outfile)[0] + 'R2' + os.path.splitext(inps.outfile)[1]

    # Attributes
    atr['date1'] = datevector[0]
    atr['date2'] = datevector[dateNum - 1]

    # File Writing
    print('--------------------------------------')
    atr['FILE_TYPE'] = 'velocity'
    print('writing >>> ' + inps.outfile)
    writefile.write(velocity, atr, inps.outfile)

    #atr['FILE_TYPE'] = 'rmse'
    print('writing >>> ' + inps.outfile_rmse)
    writefile.write(rmse, atr, inps.outfile_rmse)

    #atr['FILE_TYPE'] = 'rmse'
    print('writing >>> ' + inps.outfile_std)
    writefile.write(std, atr, inps.outfile_std)

    print('Done.\n')
    return inps.outfile
示例#16
0
def seed_file_reference_value(File, outName, refList, ref_y='', ref_x=''):
    ## Seed Input File with reference value in refList
    print('Reference value: ')
    print(refList)

    #####  IO Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print('file type: ' + k)

    ##### Multiple Dataset File
    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        ##### Input File Info
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epochNum = len(epochList)

        ##### Check Epoch Number
        if not epochNum == len(refList):
            print('\nERROR: Reference value has different epoch number'+\
                  'from input file.')
            print('Reference List epoch number: ' + str(refList))
            print('Input file     epoch number: ' + str(epochNum))
            sys.exit(1)

        ##### Output File Info
        h5out = h5py.File(outName, 'w')
        group = h5out.create_group(k)
        print('writing >>> ' + outName)
        prog_bar = ptime.progress_bar(maxValue=epochNum, prefix='seeding: ')

    ## Loop
    if k == 'timeseries':
        print('number of acquisitions: ' + str(epochNum))
        for i in range(epochNum):
            epoch = epochList[i]
            data = h5file[k].get(epoch)[:]
            data -= refList[i]
            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)
        atr = seed_attributes(atr, ref_x, ref_y)
        for key, value in atr.items():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print('number of interferograms: ' + str(epochNum))
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epochNum):
            epoch = epochList[i]
            #print epoch
            data = h5file[k][epoch].get(epoch)[:]
            atr = h5file[k][epoch].attrs

            data -= refList[i]
            atr = seed_attributes(atr, ref_x, ref_y)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr.items():
                gg.attrs[key] = value

            prog_bar.update(i + 1, suffix=date12_list[i])

    ##### Single Dataset File
    else:
        print('writing >>> ' + outName)
        data, atr = readfile.read(File)
        data -= refList
        atr = seed_attributes(atr, ref_x, ref_y)
        writefile.write(data, atr, outName)

    ##### End & Cleaning
    try:
        prog_bar.close()
        h5file.close()
        h5out.close()
    except:
        pass

    return outName
示例#17
0
def add_files(fname_list, fname_out=None):
    '''Generate sum of all input files
    Inputs:
        fname_list - list of string, path/name of input files to be added
        fname_out  - string, optional, path/name of output file
    Output:
        fname_out  - string, path/name of output file
    Example:
        'mask_all.h5' = add_file(['mask_1.h5','mask_2.h5','mask_3.h5'], 'mask_all.h5')
    '''
    # Default output file name
    ext = os.path.splitext(fname_list[0])[1]
    if not fname_out:
        fname_out = os.path.splitext(fname_list[0])[0]
        for i in range(1, len(fname_list)):
            fname_out += '_plus_' + os.path.splitext(
                os.path.basename(fname_list[i]))[0]
        fname_out += ext

    # Basic Info
    atr = readfile.read_attribute(fname_list[0])
    k = atr['FILE_TYPE']
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    print('First input file is ' + atr['PROCESSOR'] + ' ' + k)

    ## Multi-dataset/group file
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        # File Type Check
        for i in range(1, len(fname_list)):
            ki = readfile.read_attribute(fname_list[i])['FILE_TYPE']
            if (k in multi_dataset_hdf5_file and ki in multi_dataset_hdf5_file
                    or k in multi_group_hdf5_file
                    and ki in multi_group_hdf5_file):
                pass
            else:
                print('Input files structure are not the same: ' + k +
                      ' v.s. ' + ki)
                sys.exit(1)

        print('writing >>> ' + fname_out)
        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)

        h5 = h5py.File(fname_list[0], 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

    if k in multi_dataset_hdf5_file:
        print('number of acquisitions: %d' % epoch_num)
        for i in range(epoch_num):
            epoch = epoch_list[i]
            data = np.zeros((length, width))
            for fname in fname_list:
                h5file = h5py.File(fname, 'r')
                d = h5file[k].get(epoch)[:]
                data = add_matrix(data, d)

            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)

        for key, value in atr.items():
            group.attrs[key] = value
        h5out.close()
        h5.close()
        prog_bar.close()

    elif k in multi_group_hdf5_file:
        print('number of interferograms: %d' % epoch_num)
        date12_list = ptime.list_ifgram2date12(epoch_list)
        for i in range(epoch_num):
            epoch = epoch_list[i]
            data = np.zeros((length, width))
            for fname in fname_list:
                h5file = h5py.File(fname, 'r')
                temp_k = list(h5file.keys())[0]
                temp_epoch_list = sorted(h5file[temp_k].keys())
                d = h5file[temp_k][temp_epoch_list[i]].get(
                    temp_epoch_list[i])[:]
                data = add_matrix(data, d)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in h5[k][epoch].attrs.items():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])
        h5out.close()
        h5.close()
        prog_bar.close()

    ## Single dataset files
    else:
        data = np.zeros((length, width))
        for fname in fname_list:
            print('loading ' + fname)
            d, r = readfile.read(fname)
            data = add_matrix(data, d)

        print('writing >>> ' + fname_out)
        writefile.write(data, atr, fname_out)

    return fname_out
示例#18
0
def unwrap_error_correction_phase_closure(ifgram_file, mask_file, ifgram_cor_file=None):
    '''Correct unwrapping errors in network of interferograms using phase closure.
    Inputs:
        ifgram_file     - string, name/path of interferograms file
        mask_file       - string, name/path of mask file to mask the pixels to be corrected
        ifgram_cor_file - string, optional, name/path of corrected interferograms file
    Output:
        ifgram_cor_file
    Example:
        'unwrapIfgram_unwCor.h5' = unwrap_error_correction_phase_closure('Seeded_unwrapIfgram.h5','mask.h5')
    '''
    print('read mask from file: '+mask_file)
    mask = readfile.read(mask_file)[0].flatten(1)

    atr = readfile.read_attribute(ifgram_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    k = atr['FILE_TYPE']
    pixel_num = length*width

    # Check reference pixel
    try:
        ref_y = int(atr['ref_y'])
        ref_x = int(atr['ref_x'])
    except:
        sys.exit('ERROR: Can not find ref_y/x value, input file is not referenced in space!')

    h5 = h5py.File(ifgram_file,'r')
    ifgram_list = sorted(h5[k].keys())
    ifgram_num = len(ifgram_list)

    ##### Prepare curls
    curls, Triangles, C = ut.get_triangles(h5)
    curl_num = np.shape(curls)[0]
    print('Number of      triangles: '+  str(curl_num))

    curl_file='curls.h5'
    if not os.path.isfile(curl_file):
        print('writing >>> '+curl_file)
        ut.generate_curls(curl_file, h5, Triangles, curls)

    thr=0.50
    curls = np.array(curls);   n1=curls[:,0];   n2=curls[:,1];   n3=curls[:,2]

    print('reading interferograms...')
    print('Number of interferograms: '+ str(ifgram_num))
    data = np.zeros((ifgram_num,pixel_num),np.float32)
    prog_bar = ptime.progress_bar(maxValue=ifgram_num)
    for ni in range(ifgram_num):
        ifgram = ifgram_list[ni]
        d = h5[k][ifgram].get(ifgram)[:].flatten(1)
        data[ni,:] = d
        prog_bar.update(ni+1)
    prog_bar.close()

    print('reading curls ...') 
    print('number of culrs: '+str(curl_num))
    h5curl = h5py.File(curl_file,'r')
    curl_list = sorted(h5curl[k].keys())
    curl_data = np.zeros((curl_num, pixel_num),np.float32)
    prog_bar = ptime.progress_bar(maxValue=curl_num)
    for ni in range(curl_num):
        d = h5curl[k][curl_list[ni]].get(curl_list[ni])[:].flatten(1)
        curl_data[ni,:] = d.flatten(1)
        prog_bar.update(ni+1)
    prog_bar.close()
    h5curl.close() 

    print('estimating unwrapping error pixel by pixel ...')
    EstUnwrap = np.zeros((ifgram_num,pixel_num),np.float32)
    prog_bar = ptime.progress_bar(maxValue=pixel_num)
    for ni in range(pixel_num):
        if mask[ni]==1:
            dU = data[:,ni]
            unwCurl = np.array(curl_data[:,ni])

            ind  = np.abs(unwCurl)>=thr;      N1 =n1[ind];      N2 =n2[ind];      N3 =n3[ind]
            indC = np.abs(unwCurl)< thr;      Nc1=n1[indC];     Nc2=n2[indC];     Nc3=n3[indC]
  
            N =np.hstack([N1, N2, N3]);       UniN =np.unique(N)
            Nc=np.hstack([Nc1,Nc2,Nc3]);      UniNc=np.unique(Nc)

            inter = list(set(UniNc) & set(UniN)) # intersetion
            UniNc = list(UniNc)
            for x in inter:
                UniNc.remove(x)

            D = np.zeros([len(UniNc),ifgram_num])
            for i in range(len(UniNc)):
                D[i,UniNc[i]]=1

            AAA  = np.vstack([-2*np.pi*C,D])
            AAAA = np.vstack([AAA,0.25*np.eye(ifgram_num)])

            ##########
            # with Tikhonov regularization:
            LLL = list(np.dot(C,dU)) + list(np.zeros(np.shape(UniNc)[0])) + list(np.zeros(ifgram_num))
            ind = np.isnan(AAAA)
            M1 = pinv(AAAA)
            M = np.dot(M1,LLL)
            EstUnwrap[:,ni] = np.round(M[0:ifgram_num])*2.0*np.pi
        prog_bar.update(ni+1, suffix='%s/%d' % (ni,pixel_num))
    prog_bar.close()

    dataCor = data + EstUnwrap

    ##### Output
    if not ifgram_cor_file:
        ifgram_cor_file = os.path.splitext(ifgram_file)[0]+'_unwCor.h5'
    print('writing >>> '+ifgram_cor_file)
    h5unwCor = h5py.File(ifgram_cor_file,'w') 
    gg = h5unwCor.create_group(k) 

    prog_bar = ptime.progress_bar(maxValue=ifgram_num)
    for i in range(ifgram_num):
        ifgram = ifgram_list[i]
        group = gg.create_group(ifgram)
        dset = group.create_dataset(ifgram, data=np.reshape(dataCor[i,:],[width,length]).T, compression='gzip')
        for key, value in h5[k][ifgram].attrs.items():
            group.attrs[key] = value
        prog_bar.update(i+1)
    prog_bar.close()
    h5unwCor.close()
    h5.close()
    return ifgram_cor_file
示例#19
0
def main(argv):
    inps = cmdLineParse()

    # Basic info
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    if k not in ['timeseries']:
        sys.exit('ERROR: only timeseries file supported, input is ' + k +
                 ' file!')

    h5 = h5py.File(inps.timeseries_file, 'r')
    date_list = sorted(h5[k].keys())
    date_num = len(date_list)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    pixel_num = length * width

    tbase = np.array(ptime.date_list2tbase(date_list)[0], np.float32).reshape(
        (date_num, 1))
    tbase /= 365.25

    # Read timeseries
    print('loading time-series ...')
    timeseries = np.zeros((date_num, pixel_num))
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        d = h5[k].get(date)[:]
        timeseries[i, :] = d.flatten(0)
        prog_bar.update(i + 1, suffix=date)
    del d
    h5.close()
    prog_bar.close()

    # Smooth timeseries with moving window in time
    print(
        'smoothing time-series using moving gaussian window with size of %.1f years'
        % inps.time_win)
    timeseries_filt = np.zeros((date_num, pixel_num))
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        # Weight from Gaussian (normal) distribution in time
        t_diff = tbase[i] - tbase
        weight = np.exp(-0.5 * (t_diff**2) / (inps.time_win**2))
        weight /= np.sum(weight)
        weightMat = np.tile(weight, (1, pixel_num))
        # Smooth the current acquisition - moving window in time one by one
        timeseries_filt[i, :] = np.sum(timeseries * weightMat, 0)
        prog_bar.update(i + 1, suffix=date)
    del weightMat
    del timeseries
    prog_bar.close()

    # Write smoothed timeseries file
    try:
        ref_date = atr['ref_date']
    except:
        ref_date = date_list[0]
    ref_date_idx = date_list.index(ref_date)
    print('reference date: ' + ref_date)
    print('reference date index: ' + str(ref_date_idx))
    ref_data = np.reshape(timeseries_filt[ref_date_idx, :], [length, width])

    if not inps.outfile:
        inps.outfile = os.path.splitext(inps.timeseries_file)[0] + '_smooth.h5'
    print('writing >>> ' + inps.outfile)
    print('number of acquisitions: ' + str(date_num))

    h5out = h5py.File(inps.outfile, 'w')
    group = h5out.create_group(k)
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        data = np.reshape(timeseries_filt[i, :], [length, width])
        dset = group.create_dataset(date,
                                    data=data - ref_data,
                                    compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    for key, value in atr.items():
        group.attrs[key] = value
    h5out.close()
    prog_bar.close()

    print('Done.')
    return inps.outfile