コード例 #1
0
ファイル: transect.py プロジェクト: Ovec8hkin/PySAR
def manual_select_start_end_point(File):
    '''Manual Select Start/End Point in display figure.'''
    print('reading ' + File + ' ...')
    data, atr = readfile.read(File)
    print('displaying ' + File + ' ...')
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.imshow(data)

    xc = []
    yc = []
    print('please click on start and end point of the desired profile')
    print('then close the figure to continue')

    def onclick(event):
        if event.button == 1:
            xcc, ycc = int(event.xdata), int(event.ydata)
            xc.append(xcc)
            yc.append(ycc)
            print('x = ' + str(xcc) + '\ny = ' + str(ycc))
            ax.plot(xcc, ycc, 'ro')

    cid = fig.canvas.mpl_connect('button_release_event', onclick)
    plt.show()

    start_yx = [yc[0], xc[0]]
    end_yx = [yc[1], xc[1]]
    return start_yx, end_yx
コード例 #2
0
ファイル: transect.py プロジェクト: Ovec8hkin/PySAR
def transect_list(fileList, inps):
    '''Get transection along input line from file list
    Inputs:
        fileList : list of str, path of files to get transect
        inps     : Namespace including the following items:
                   start/end_lalo
                   start/end_yx
                   interpolation
    Outputs:
        transectList : list of N*2 matrix containing distance and its value
        atrList      : list of attribute dictionary, for each input file
    '''

    transectList = []
    atrList = []
    for File in fileList:
        print('reading ' + File)
        data, atr = readfile.read(File)
        if inps.start_lalo and inps.end_lalo:
            transect = transect_lalo(data, atr, inps.start_lalo, inps.end_lalo,
                                     inps.interpolation)
        else:
            transect = transect_yx(data, atr, inps.start_yx, inps.end_yx,
                                   inps.interpolation)
        transectList.append(transect)
        atrList.append(atr)
    return transectList, atrList
コード例 #3
0
ファイル: view_gui.py プロジェクト: Ovec8hkin/PySAR
def pick_file():
    global attributes, starting_upper_lim

    if h5_file.get() == "":
        filename = filedialog.askopenfilename(initialdir="/", title="Select file",
                                              filetypes=(("jpeg files", "*.h5"), ("all files", "*.*")))
        frame.filename = filename
        h5_file.set(frame.filename)
        h5_file_short.set(filename.split("/")[-1])
        pick_h5_file_button.config(text="Cancel")

        atr = readfile.read_attribute(h5_file.get())

        file_type = atr['FILE_TYPE']

        if file_type not in readfile.multi_group_hdf5_file + readfile.multi_dataset_hdf5_file + ['HDFEOS']:
            data, attributes = readfile.read(h5_file.get())
            max = numpy.amax(data)
            starting_upper_lim = max * 2
            update_sliders("m")
            y_lim_upper.set(max)

        set_variables_from_attributes()

        return frame.filename
    else:
        h5_file.set("")
        h5_file_short.set("No File Selected")
        pick_h5_file_button.config(text="Select .h5 File")
コード例 #4
0
def set_dem_file():
    global ax_v, inps, img

    if inps.dem_file:
        dem = readfile.read(inps.dem_file)[0]
        ax_v = view.plot_dem_yx(ax_v, dem)

    img = ax_v.imshow(d_v,
                      cmap=inps.colormap,
                      clim=inps.ylim,
                      interpolation='nearest')
コード例 #5
0
def main(argv):
    try:
        demFile = argv[0]
        File = argv[1]
    except:
        usage()
        sys.exit(1)

    dem, demRsc = readfile.read(demFile)
    data, atr = readfile.read(File)
    print('Input file is ' + atr['FILE_TYPE'])

    # Subset
    try:
        y0, y1 = [int(i) for i in argv[2].split(':')]
        x0, x1 = [int(i) for i in argv[3].split(':')]
        data = data[y0:y1, x0:x1]
        dem = dem[y0:y1, x0:x1]
    except:
        pass

    # Calculation
    dem = dem.flatten(1)
    data = data.flatten(1)
    ndx = ~np.isnan(data)
    C1 = np.zeros([2, len(dem[ndx])])
    C1[0][:] = dem[ndx]
    C1[1][:] = data[ndx]

    # Display
    print('-------------------------------------------')
    print('Correlation with the DEM:  %.2f' % np.corrcoef(C1)[0][1])
    print('-------------------------------------------')
    print('DEM info:')
    print('    Max height difference: %.2f m' %
          (np.max(dem[ndx]) - np.min(dem[ndx])))
    print('    Average        height: %.2f m' % np.mean(dem[ndx]))
    print('    Height            Std: %.2f m' % np.std(dem[ndx]))
    return
コード例 #6
0
def load_single_dataset_hdf5(file_type, infile, outfile, extra_meta_dict=dict()):
    '''Convert ROI_PAC .dem / .hgt file to hdf5 file
    Based on load_dem.py written by Emre Havazli
    Inputs:
        file_type : string, group name of hdf5 file, i.e. dem, mask
        infile    : string, input ROI_PAC file name
        outfile   : string, output hdf5 file name
        extra_meta_dict : dict, extra attributes to output file
    Output:
        outfile   : string, output hdf5 file name
    '''
    atr = readfile.read_attribute(infile)

    if ut.update_file(outfile, infile):
        if (os.path.dirname(infile) == os.path.dirname(outfile) and \
            os.path.splitext(infile)[1] == os.path.splitext(outfile)[1]):
            print(infile+' already in working directory with recommended format, no need to re-load.')
            outfile = infile

        else:
            # Read input file
            print('loading file: '+infile)
            data = readfile.read(infile)[0]

            # Write output file - data
            print('writing >>> '+outfile)
            h5 = h5py.File(outfile, 'w')
            group = h5.create_group(file_type)
            dset = group.create_dataset(file_type, data=data, compression='gzip')

            # Write output file - attributes
            for key, value in atr.items():
                group.attrs[key] = value
            try: group.attrs['PROJECT_NAME'] = extra_meta_dict['project_name']
            except: pass
            key = 'INSAR_PROCESSOR'
            if key not in list(atr.keys()):
                try:  atr[key] = extra_meta_dict['insar_processor']
                except:  pass
            h5.close()

    #if (os.path.abspath(infile) != os.path.abspath(outfile) and \
    #    os.path.dirname(infile) == os.path.dirname(outfile)):
    #    print 'remove the duplicated, obsolete '+atr['FILE_TYPE']+' file in the same directory'
    #    rmCmd = 'rm '+infile
    #    print rmCmd
    #    os.system(rmCmd)

    return outfile
コード例 #7
0
ファイル: seed_data.py プロジェクト: Ovec8hkin/PySAR
def select_max_coherence_yx(cohFile, mask=None, min_coh=0.85):
    '''Select pixel with coherence > min_coh in random'''
    print('\n---------------------------------------------------------')
    print('select pixel with coherence > ' + str(min_coh) + ' in random')
    print('use coherence file: ' + cohFile)
    coh, coh_atr = readfile.read(cohFile)
    if not mask is None:
        coh[mask == 0] = 0.0
    coh_mask = coh >= min_coh
    y, x = random_select_reference_yx(coh_mask, print_msg=False)
    #y, x = np.unravel_index(np.argmax(coh), coh.shape)
    print('y/x: ' + str([y, x]))
    print('---------------------------------------------------------')

    return y, x
コード例 #8
0
def set_mask():
    global mask, inps, atr

    if not inps.mask_file:
        if 'X_FIRST' in atr.keys():
            file_list = ['geo_maskTempCoh.h5']
        else:
            file_list = ['maskTempCoh.h5', 'mask.h5']

        try:
            inps.mask_file = ut.get_file_list(file_list)[0]
        except:
            inps.mask_file = None

    try:
        mask = readfile.read(inps.mask_file)[0]
        mask[mask != 0] = 1
        print('load mask from file: ' + inps.mask_file)
    except:
        mask = None
        print('No mask used.')
コード例 #9
0
def main(argv):
    inps = cmdLineParse()

    ##### 1. Read data
    atr = readfile.read_attribute(inps.file)
    k = atr['FILE_TYPE']
    print('Input file is ' + k)

    # Check: file in geo coord
    if 'X_FIRST' not in list(atr.keys()):
        sys.exit('ERROR: Input file is not geocoded.')

    # Check: epoch is required for multi_dataset/group files
    if not inps.epoch and k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        print("No date/date12 input.\nIt's required for " + k + " file")
        sys.exit(1)

    # Read data
    data, atr = readfile.read(inps.file, (), inps.epoch)

    # Output filename
    if not inps.outfile:
        inps.outfile = pview.auto_figure_title(inps.file, inps.epoch,
                                               vars(inps))

    # Data Operation - Display Unit & Rewrapping
    data, inps.disp_unit, inps.wrap = pview.scale_data4disp_unit_and_rewrap(
        data, atr, inps.disp_unit, inps.wrap)
    if inps.wrap:
        inps.ylim = [-np.pi, np.pi]

    ##### 2. Generate Google Earth KMZ
    kmz_file = write_kmz_file(data, atr, inps.outfile, inps)

    print('Done.')
    return
コード例 #10
0
ファイル: save_gmt.py プロジェクト: Ovec8hkin/PySAR
def main(argv):
    inps = cmdLineParse()

    ##### 1. Read data
    atr = readfile.read_attribute(inps.file)
    k = atr['FILE_TYPE']
    print('Input file is '+k)

    # Check: file in geo coord
    if 'X_FIRST' not in list(atr.keys()):
        sys.exit('ERROR: Input file is not geocoded.')

    # Check: epoch is required for multi_dataset/group files
    if not inps.epoch:
        if k in multi_group_hdf5_file:
            print("No date/date12 input.\nIt's required for "+k+" file")
            sys.exit(1)
        elif k in multi_dataset_hdf5_file:
            print('No input date ..., continue to convert the last date of time-series.')
            h5 = h5py.File(inps.file, 'r')
            date_list = sorted(h5[k].keys())
            h5.close()
            inps.epoch = date_list[-1]

    # Read data
    data, atr = readfile.read(inps.file, (), inps.epoch)

    # Output filename
    if not inps.outfile:
        inps.outfile = pview.auto_figure_title(inps.file, inps.epoch, vars(inps))
    inps.outfile = os.path.splitext(inps.outfile)[0]+'.grd'

    ##### 2. Write GMT .grd file
    inps.outfile = write_grd_file(data, atr, inps.outfile)
    print('Done.')
    return inps.outfile
コード例 #11
0
ファイル: subset.py プロジェクト: Ovec8hkin/PySAR
def subset_file(File, subset_dict_input, outFile=None):
    '''Subset file with
    Inputs:
        File        : str, path/name of file
        outFile     : str, path/name of output file
        subset_dict : dict, subsut parameter, including the following items:
                      subset_x   : list of 2 int,   subset in x direction,   default=None
                      subset_y   : list of 2 int,   subset in y direction,   default=None
                      subset_lat : list of 2 float, subset in lat direction, default=None
                      subset_lon : list of 2 float, subset in lon direction, default=None
                      fill_value : float, optional. filled value for area outside of data coverage. default=None
                                   None/not-existed to subset within data coverage only.
                      tight  : bool, tight subset or not, for lookup table file, i.e. geomap*.trans
    Outputs:
        outFile :  str, path/name of output file; 
                   outFile = 'subset_'+File, if File is in current directory;
                   outFile = File, if File is not in the current directory.
    '''

    # Input File Info
    try:
        atr_dict = readfile.read_attribute(File)
    except:
        return None
    width = int(atr_dict['WIDTH'])
    length = int(atr_dict['FILE_LENGTH'])
    k = atr_dict['FILE_TYPE']
    print('subset ' + k + ' file: ' + File + ' ...')

    subset_dict = subset_dict_input.copy()
    # Read Subset Inputs into 4-tuple box in pixel and geo coord
    pix_box, geo_box = subset_input_dict2box(subset_dict, atr_dict)

    # if fill_value exists and not None, subset data and fill assigned value for area out of its coverage.
    # otherwise, re-check subset to make sure it's within data coverage and initialize the matrix with np.nan
    outfill = False
    if 'fill_value' in list(subset_dict.keys()) and subset_dict['fill_value']:
        outfill = True
    else:
        outfill = False
    if not outfill:
        pix_box = check_box_within_data_coverage(pix_box, atr_dict)
        subset_dict['fill_value'] = np.nan

    geo_box = box_pixel2geo(pix_box, atr_dict)
    data_box = (0, 0, width, length)
    print('data   range in y/x: ' + str(data_box))
    print('subset range in y/x: ' + str(pix_box))
    print('data   range in lat/lon: ' + str(box_pixel2geo(data_box, atr_dict)))
    print('subset range in lat/lon: ' + str(geo_box))

    if pix_box == data_box:
        print('Subset range == data coverage, no need to subset. Skip.')
        return File

    # Calculate Subset/Overlap Index
    pix_box4data, pix_box4subset = get_box_overlap_index(data_box, pix_box)

    ###########################  Data Read and Write  ######################
    # Output File Name
    if not outFile:
        if os.getcwd() == os.path.dirname(os.path.abspath(File)):
            if 'tight' in list(subset_dict.keys()) and subset_dict['tight']:
                outFile = os.path.splitext(
                    File)[0] + '_tight' + os.path.splitext(File)[1]
            else:
                outFile = 'subset_' + os.path.basename(File)
        else:
            outFile = os.path.basename(File)
    print('writing >>> ' + outFile)

    ##### Multiple Dataset File
    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        ##### Open Input File
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epochNum = len(epochList)
        if k in multi_dataset_hdf5_file:
            print('number of acquisitions: ' + str(epochNum))
        else:
            print('number of interferograms: ' + str(epochNum))

        ##### Open Output File
        h5out = h5py.File(outFile)
        group = h5out.create_group(k)
        prog_bar = ptime.progress_bar(maxValue=epochNum)

    ## Loop
    if k == 'timeseries':
        for i in range(epochNum):
            epoch = epochList[i]
            dset = h5file[k].get(epoch)
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)

        atr_dict = subset_attribute(atr_dict, pix_box)
        for key, value in atr_dict.items():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epochNum):
            epoch = epochList[i]
            dset = h5file[k][epoch].get(epoch)
            atr_dict = h5file[k][epoch].attrs
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            atr_dict = subset_attribute(atr_dict, pix_box, print_msg=False)
            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr_dict.items():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

    ##### Single Dataset File
    elif k in ['.jpeg', '.jpg', '.png', '.ras', '.bmp']:
        data, atr_dict = readfile.read(File, pix_box)
        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    elif k == '.trans':
        rg_overlap, az_overlap, atr_dict = readfile.read(File, pix_box4data)

        rg = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        rg[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = rg_overlap

        az = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        az[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = az_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(rg, az, atr_dict, outFile)
    else:
        data_overlap, atr_dict = readfile.read(File, pix_box4data)

        data = np.ones((pix_box[3] - pix_box[1],
                        pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        data[pix_box4subset[1]:pix_box4subset[3],
             pix_box4subset[0]:pix_box4subset[2]] = data_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    ##### End Cleaning
    try:
        prog_bar.close()
        h5file.close()
        h5out.close()
    except:
        pass

    return outFile
コード例 #12
0
ファイル: seed_data.py プロジェクト: Ovec8hkin/PySAR
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)

    atr = readfile.read_attribute(inps.file[0])
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    if inps.reset:
        print(
            '----------------------------------------------------------------------------'
        )
        for file in inps.file:
            remove_reference_pixel(file)
        return

    ##### Check Input Coordinates
    # Read ref_y/x/lat/lon from reference/template
    # priority: Direct Input > Reference File > Template File
    if inps.template_file:
        print('reading reference info from template: ' + inps.template_file)
        inps = read_seed_template2inps(inps.template_file, inps)
    if inps.reference_file:
        print('reading reference info from reference: ' + inps.reference_file)
        inps = read_seed_reference2inps(inps.reference_file, inps)

    ## Do not use ref_lat/lon input for file in radar-coord
    #if not 'X_FIRST' in atr.keys() and (inps.ref_lat or inps.ref_lon):
    #    print 'Lat/lon reference input is disabled for file in radar coord.'
    #    inps.ref_lat = None
    #    inps.ref_lon = None

    # Convert ref_lat/lon to ref_y/x
    if inps.ref_lat and inps.ref_lon:
        if 'X_FIRST' in list(atr.keys()):
            inps.ref_y = subset.coord_geo2radar(inps.ref_lat, atr, 'lat')
            inps.ref_x = subset.coord_geo2radar(inps.ref_lon, atr, 'lon')
        else:
            # Convert lat/lon to az/rg for radar coord file using geomap*.trans file
            inps.ref_y, inps.ref_x = ut.glob2radar(np.array(inps.ref_lat), np.array(inps.ref_lon),\
                                                   inps.trans_file, atr)[0:2]
        print('Input reference point in lat/lon: ' +
              str([inps.ref_lat, inps.ref_lon]))
    print('Input reference point in   y/x  : ' + str([inps.ref_y, inps.ref_x]))

    # Do not use ref_y/x outside of data coverage
    if (inps.ref_y and inps.ref_x
            and not (0 <= inps.ref_y <= length and 0 <= inps.ref_x <= width)):
        inps.ref_y = None
        inps.ref_x = None
        print('WARNING: input reference point is OUT of data coverage!')
        print('Continue with other method to select reference point.')

    # Do not use ref_y/x in masked out area
    if inps.ref_y and inps.ref_x and inps.mask_file:
        print('mask: ' + inps.mask_file)
        mask = readfile.read(inps.mask_file)[0]
        if mask[inps.ref_y, inps.ref_x] == 0:
            inps.ref_y = None
            inps.ref_x = None
            print('WARNING: input reference point is in masked OUT area!')
            print('Continue with other method to select reference point.')

    ##### Select method
    if inps.ref_y and inps.ref_x:
        inps.method = 'input-coord'
    elif inps.coherence_file:
        if os.path.isfile(inps.coherence_file):
            inps.method = 'max-coherence'
        else:
            inps.coherence_file = None

    if inps.method == 'manual':
        inps.parallel = False
        print('Parallel processing is disabled for manual seeding method.')

    ##### Seeding file by file
    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(
            len(inps.file))

    if len(inps.file) == 1:
        seed_file_inps(inps.file[0], inps, inps.outfile)

    elif inps.parallel:
        #num_cores = min(multiprocessing.cpu_count(), len(inps.file))
        #print 'parallel processing using %d cores ...'%(num_cores)
        Parallel(n_jobs=num_cores)(delayed(seed_file_inps)(file, inps)
                                   for file in inps.file)
    else:
        for File in inps.file:
            seed_file_inps(File, inps)

    print('Done.')
    return
コード例 #13
0
ファイル: seed_data.py プロジェクト: Ovec8hkin/PySAR
def seed_file_reference_value(File, outName, refList, ref_y='', ref_x=''):
    ## Seed Input File with reference value in refList
    print('Reference value: ')
    print(refList)

    #####  IO Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print('file type: ' + k)

    ##### Multiple Dataset File
    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        ##### Input File Info
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epochNum = len(epochList)

        ##### Check Epoch Number
        if not epochNum == len(refList):
            print('\nERROR: Reference value has different epoch number'+\
                  'from input file.')
            print('Reference List epoch number: ' + str(refList))
            print('Input file     epoch number: ' + str(epochNum))
            sys.exit(1)

        ##### Output File Info
        h5out = h5py.File(outName, 'w')
        group = h5out.create_group(k)
        print('writing >>> ' + outName)
        prog_bar = ptime.progress_bar(maxValue=epochNum, prefix='seeding: ')

    ## Loop
    if k == 'timeseries':
        print('number of acquisitions: ' + str(epochNum))
        for i in range(epochNum):
            epoch = epochList[i]
            data = h5file[k].get(epoch)[:]
            data -= refList[i]
            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)
        atr = seed_attributes(atr, ref_x, ref_y)
        for key, value in atr.items():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print('number of interferograms: ' + str(epochNum))
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epochNum):
            epoch = epochList[i]
            #print epoch
            data = h5file[k][epoch].get(epoch)[:]
            atr = h5file[k][epoch].attrs

            data -= refList[i]
            atr = seed_attributes(atr, ref_x, ref_y)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr.items():
                gg.attrs[key] = value

            prog_bar.update(i + 1, suffix=date12_list[i])

    ##### Single Dataset File
    else:
        print('writing >>> ' + outName)
        data, atr = readfile.read(File)
        data -= refList
        atr = seed_attributes(atr, ref_x, ref_y)
        writefile.write(data, atr, outName)

    ##### End & Cleaning
    try:
        prog_bar.close()
        h5file.close()
        h5out.close()
    except:
        pass

    return outName
コード例 #14
0
def remove_surface(File, surf_type, maskFile=None, outFile=None, ysub=None):
    start = time.time()
    atr = readfile.read_attribute(File)

    # Output File Name
    if not outFile:
        outFile = os.path.splitext(
            File)[0] + '_' + surf_type + os.path.splitext(File)[1]

    if maskFile:
        Mask = readfile.read(maskFile)[0]
        print('read mask file: ' + maskFile)
    else:
        Mask = np.ones((int(atr['FILE_LENGTH']), int(atr['WIDTH'])))
        print('use mask of the whole area')

    ##### Input File Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print('Input file is ' + k)
    print('remove ramp type: ' + surf_type)

    ## Multiple Datasets File
    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5flat = h5py.File(outFile, 'w')
        group = h5flat.create_group(k)
        print('writing >>> ' + outFile)

    if k in ['timeseries']:
        print('number of acquisitions: ' + str(len(epochList)))
        for i in range(epoch_num):
            epoch = epochList[i]
            data = h5file[k].get(epoch)[:]

            if not ysub:
                data_n, ramp = remove_data_surface(data, Mask, surf_type)
            else:
                data_n = remove_data_multiple_surface(data, Mask, surf_type,
                                                      ysub)

            dset = group.create_dataset(epoch, data=data_n, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)
        for key, value in h5file[k].attrs.items():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print('number of interferograms: ' + str(len(epochList)))
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epoch_num):
            epoch = epochList[i]
            data = h5file[k][epoch].get(epoch)[:]

            if not ysub:
                data_n, ramp = remove_data_surface(data, Mask, surf_type)
            else:
                data_n = remove_data_multiple_surface(data, Mask, surf_type,
                                                      ysub)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data_n, compression='gzip')
            for key, value in h5file[k][epoch].attrs.items():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

    ## Single Dataset File
    else:
        data, atr = readfile.read(File)
        print('Removing ' + surf_type + ' from ' + k)

        if not ysub:
            data_n, ramp = remove_data_surface(data, Mask, surf_type)
        else:
            data_n = remove_data_multiple_surface(data, Mask, surf_type, ysub)

        print('writing >>> ' + outFile)
        writefile.write(data_n, atr, outFile)

    try:
        h5file.close()
        h5flat.close()
        prog_bar.close()
    except:
        pass

    print('Remove ' + surf_type + ' took ' + str(time.time() - start) +
          ' secs')
    return outFile
コード例 #15
0
def file_operation(fname, operator, operand, fname_out=None):
    '''Mathmathic operation of file'''

    # Basic Info
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    print('input is '+k+' file: '+fname)
    print('operation: file %s %f' % (operator, operand))

    # default output filename
    if not fname_out:
        if   operator in ['+','plus',  'add',      'addition']:        suffix = 'plus'
        elif operator in ['-','minus', 'substract','substraction']:    suffix = 'minus'
        elif operator in ['*','times', 'multiply', 'multiplication']:  suffix = 'multiply'
        elif operator in ['/','obelus','divide',   'division']:        suffix = 'divide'
        elif operator in ['^','pow','power']:                          suffix = 'pow'
        ext = os.path.splitext(fname)[1]
        fname_out = os.path.splitext(fname)[0]+'_'+suffix+str(operand)+ext

    ##### Multiple Dataset HDF5 File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print('writing >>> '+fname_out)

        if k == 'timeseries':
            print('number of acquisitions: '+str(epoch_num))
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_out = data_operation(data, operator, operand)

                dset = group.create_dataset(date, data=data_out, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            for key,value in atr.items():
                group.attrs[key] = value

        elif k in ['interferograms','wrapped','coherence']:
            print('number of interferograms: '+str(epoch_num))
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_out = data_operation(data, operator, operand)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_out, compression='gzip')
                for key, value in h5[k][ifgram].attrs.items():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])

        h5.close()
        h5out.close()
        prog_bar.close()

    ##### Duo datasets non-HDF5 File
    elif k in ['.trans']:
        rg, az, atr = readfile.read(fname)
        rg_out = data_operation(rg, operator, operand)
        az_out = data_operation(az, operator, operand)
        print('writing >>> '+fname_out)
        writefile.write(rg_out, az_out, atr, fname_out)

    ##### Single Dataset File
    else:
        data, atr = readfile.read(fname)
        data_out = data_operation(data, operator, operand)
        print('writing >>> '+fname_out)
        writefile.write(data_out, atr, fname_out)

    return fname_out
コード例 #16
0
def add_files(fname_list, fname_out=None):
    '''Generate sum of all input files
    Inputs:
        fname_list - list of string, path/name of input files to be added
        fname_out  - string, optional, path/name of output file
    Output:
        fname_out  - string, path/name of output file
    Example:
        'mask_all.h5' = add_file(['mask_1.h5','mask_2.h5','mask_3.h5'], 'mask_all.h5')
    '''
    # Default output file name
    ext = os.path.splitext(fname_list[0])[1]
    if not fname_out:
        fname_out = os.path.splitext(fname_list[0])[0]
        for i in range(1, len(fname_list)):
            fname_out += '_plus_' + os.path.splitext(
                os.path.basename(fname_list[i]))[0]
        fname_out += ext

    # Basic Info
    atr = readfile.read_attribute(fname_list[0])
    k = atr['FILE_TYPE']
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    print('First input file is ' + atr['PROCESSOR'] + ' ' + k)

    ## Multi-dataset/group file
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        # File Type Check
        for i in range(1, len(fname_list)):
            ki = readfile.read_attribute(fname_list[i])['FILE_TYPE']
            if (k in multi_dataset_hdf5_file and ki in multi_dataset_hdf5_file
                    or k in multi_group_hdf5_file
                    and ki in multi_group_hdf5_file):
                pass
            else:
                print('Input files structure are not the same: ' + k +
                      ' v.s. ' + ki)
                sys.exit(1)

        print('writing >>> ' + fname_out)
        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)

        h5 = h5py.File(fname_list[0], 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

    if k in multi_dataset_hdf5_file:
        print('number of acquisitions: %d' % epoch_num)
        for i in range(epoch_num):
            epoch = epoch_list[i]
            data = np.zeros((length, width))
            for fname in fname_list:
                h5file = h5py.File(fname, 'r')
                d = h5file[k].get(epoch)[:]
                data = add_matrix(data, d)

            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)

        for key, value in atr.items():
            group.attrs[key] = value
        h5out.close()
        h5.close()
        prog_bar.close()

    elif k in multi_group_hdf5_file:
        print('number of interferograms: %d' % epoch_num)
        date12_list = ptime.list_ifgram2date12(epoch_list)
        for i in range(epoch_num):
            epoch = epoch_list[i]
            data = np.zeros((length, width))
            for fname in fname_list:
                h5file = h5py.File(fname, 'r')
                temp_k = list(h5file.keys())[0]
                temp_epoch_list = sorted(h5file[temp_k].keys())
                d = h5file[temp_k][temp_epoch_list[i]].get(
                    temp_epoch_list[i])[:]
                data = add_matrix(data, d)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in h5[k][epoch].attrs.items():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])
        h5out.close()
        h5.close()
        prog_bar.close()

    ## Single dataset files
    else:
        data = np.zeros((length, width))
        for fname in fname_list:
            print('loading ' + fname)
            d, r = readfile.read(fname)
            data = add_matrix(data, d)

        print('writing >>> ' + fname_out)
        writefile.write(data, atr, fname_out)

    return fname_out
コード例 #17
0
def unwrap_error_correction_phase_closure(ifgram_file, mask_file, ifgram_cor_file=None):
    '''Correct unwrapping errors in network of interferograms using phase closure.
    Inputs:
        ifgram_file     - string, name/path of interferograms file
        mask_file       - string, name/path of mask file to mask the pixels to be corrected
        ifgram_cor_file - string, optional, name/path of corrected interferograms file
    Output:
        ifgram_cor_file
    Example:
        'unwrapIfgram_unwCor.h5' = unwrap_error_correction_phase_closure('Seeded_unwrapIfgram.h5','mask.h5')
    '''
    print('read mask from file: '+mask_file)
    mask = readfile.read(mask_file)[0].flatten(1)

    atr = readfile.read_attribute(ifgram_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    k = atr['FILE_TYPE']
    pixel_num = length*width

    # Check reference pixel
    try:
        ref_y = int(atr['ref_y'])
        ref_x = int(atr['ref_x'])
    except:
        sys.exit('ERROR: Can not find ref_y/x value, input file is not referenced in space!')

    h5 = h5py.File(ifgram_file,'r')
    ifgram_list = sorted(h5[k].keys())
    ifgram_num = len(ifgram_list)

    ##### Prepare curls
    curls, Triangles, C = ut.get_triangles(h5)
    curl_num = np.shape(curls)[0]
    print('Number of      triangles: '+  str(curl_num))

    curl_file='curls.h5'
    if not os.path.isfile(curl_file):
        print('writing >>> '+curl_file)
        ut.generate_curls(curl_file, h5, Triangles, curls)

    thr=0.50
    curls = np.array(curls);   n1=curls[:,0];   n2=curls[:,1];   n3=curls[:,2]

    print('reading interferograms...')
    print('Number of interferograms: '+ str(ifgram_num))
    data = np.zeros((ifgram_num,pixel_num),np.float32)
    prog_bar = ptime.progress_bar(maxValue=ifgram_num)
    for ni in range(ifgram_num):
        ifgram = ifgram_list[ni]
        d = h5[k][ifgram].get(ifgram)[:].flatten(1)
        data[ni,:] = d
        prog_bar.update(ni+1)
    prog_bar.close()

    print('reading curls ...') 
    print('number of culrs: '+str(curl_num))
    h5curl = h5py.File(curl_file,'r')
    curl_list = sorted(h5curl[k].keys())
    curl_data = np.zeros((curl_num, pixel_num),np.float32)
    prog_bar = ptime.progress_bar(maxValue=curl_num)
    for ni in range(curl_num):
        d = h5curl[k][curl_list[ni]].get(curl_list[ni])[:].flatten(1)
        curl_data[ni,:] = d.flatten(1)
        prog_bar.update(ni+1)
    prog_bar.close()
    h5curl.close() 

    print('estimating unwrapping error pixel by pixel ...')
    EstUnwrap = np.zeros((ifgram_num,pixel_num),np.float32)
    prog_bar = ptime.progress_bar(maxValue=pixel_num)
    for ni in range(pixel_num):
        if mask[ni]==1:
            dU = data[:,ni]
            unwCurl = np.array(curl_data[:,ni])

            ind  = np.abs(unwCurl)>=thr;      N1 =n1[ind];      N2 =n2[ind];      N3 =n3[ind]
            indC = np.abs(unwCurl)< thr;      Nc1=n1[indC];     Nc2=n2[indC];     Nc3=n3[indC]
  
            N =np.hstack([N1, N2, N3]);       UniN =np.unique(N)
            Nc=np.hstack([Nc1,Nc2,Nc3]);      UniNc=np.unique(Nc)

            inter = list(set(UniNc) & set(UniN)) # intersetion
            UniNc = list(UniNc)
            for x in inter:
                UniNc.remove(x)

            D = np.zeros([len(UniNc),ifgram_num])
            for i in range(len(UniNc)):
                D[i,UniNc[i]]=1

            AAA  = np.vstack([-2*np.pi*C,D])
            AAAA = np.vstack([AAA,0.25*np.eye(ifgram_num)])

            ##########
            # with Tikhonov regularization:
            LLL = list(np.dot(C,dU)) + list(np.zeros(np.shape(UniNc)[0])) + list(np.zeros(ifgram_num))
            ind = np.isnan(AAAA)
            M1 = pinv(AAAA)
            M = np.dot(M1,LLL)
            EstUnwrap[:,ni] = np.round(M[0:ifgram_num])*2.0*np.pi
        prog_bar.update(ni+1, suffix='%s/%d' % (ni,pixel_num))
    prog_bar.close()

    dataCor = data + EstUnwrap

    ##### Output
    if not ifgram_cor_file:
        ifgram_cor_file = os.path.splitext(ifgram_file)[0]+'_unwCor.h5'
    print('writing >>> '+ifgram_cor_file)
    h5unwCor = h5py.File(ifgram_cor_file,'w') 
    gg = h5unwCor.create_group(k) 

    prog_bar = ptime.progress_bar(maxValue=ifgram_num)
    for i in range(ifgram_num):
        ifgram = ifgram_list[i]
        group = gg.create_group(ifgram)
        dset = group.create_dataset(ifgram, data=np.reshape(dataCor[i,:],[width,length]).T, compression='gzip')
        for key, value in h5[k][ifgram].attrs.items():
            group.attrs[key] = value
        prog_bar.update(i+1)
    prog_bar.close()
    h5unwCor.close()
    h5.close()
    return ifgram_cor_file
コード例 #18
0
ファイル: match.py プロジェクト: Ovec8hkin/PySAR
def match_two_files(File1, File2, outName=None, manual_match=False, disp_fig=False):
    '''Match two geocoded files by estimating their offset.
    Better for two files with common area overlaping.
    '''
    
    # Read Input Files
    V1, atr1 = readfile.read(File1)
    V2, atr2 = readfile.read(File2)
    k = atr1['FILE_TYPE']
    print('---------------------------')
    print('matching 2 '+k+' files:\n'+File1+'\n'+File2)
    
    # Get Coverage Info 
    # Boundary Info - 2 Input Files
    West1,East1,North1,South1,width1,length1 = corners(atr1)
    West2,East2,North2,South2,width2,length2 = corners(atr2)
    # Boundary Info - Output File
    print('finding the corners of the whole area')
    West  = min(West1, West2)
    East  = max(East1, East2)
    North = max(North1,North2)
    South = min(South1,South2)
    lon_step = float(atr1['X_STEP'])
    lat_step = float(atr1['Y_STEP'])
    width  = int(round((East - West )/lon_step + 1.0))
    length = int(round((South - North)/lat_step + 1.0))

    # Get Index of Input Files in Output Files
    lon_seq = np.arange(West, West +width *lon_step, lon_step) 
    lat_seq = np.arange(North, North+length*lat_step, lat_step)
    indx1 = nearest(West1,  lon_seq)[0]
    indy1 = nearest(North1, lat_seq)[0]
    indx2 = nearest(West2,  lon_seq)[0]
    indy2 = nearest(North2, lat_seq)[0]

    # Estimate Offset of overlaping area
    VV1 = np.zeros([length,width])
    VV2 = np.zeros([length,width])
    VV1[:,:] = np.nan
    VV2[:,:] = np.nan
    VV1[indy1:indy1+length1, indx1:indx1+width1] = V1
    VV2[indy2:indy2+length2, indx2:indx2+width2] = V2

    if not manual_match:
        VV_diff = VV2 - VV1
        offset = np.nansum(VV_diff) / np.sum(np.isfinite(VV_diff))  

    if np.isnan(offset):
        print('**************************************************')
        print('WARNING:')
        print('')
        print('No common area found between two velocity maps')
        print('At least one common pixel is required.')
        print('No matching applied. ')
        print('Continue with manual matching ...')
        print('    by selecting two line from each dataset to calculate the offset')
        print('**************************************************')
        manual_matching = True
    if manual_match:
        offset = manual_offset_estimate(V1, V2)

    # Adjust File2 value using offset
    if np.isnan(offset):
        print('**************************************************')
        print('WARNING:')
        print('')
        print('No offset is estimated and no matching applied.')
        print('Continue to merge two input files without any adjustment.')
        print('**************************************************')   
    else:
        print('Average offset between two velocity in the common area is: ' + str(offset))
        V2 = V2 - offset

    # Get merged data matrix value
    indv2 = np.isfinite(V2)
    VV = np.zeros([length,width])
    VV[:,:] = np.nan
    VV[indy1:indy1+length1, indx1:indx1+width1] = V1
    VV[indy2:indy2+length2, indx2:indx2+width2][indv2] = V2[indv2]
    
    # Write Output File
    if not outName:
        ext = os.path.splitext(File1)[1]
        outName = os.path.splitext(os.path.basename(File1))[0]+'_'+\
                  os.path.splitext(os.path.basename(File2))[0]+ext
    print('writing >>> '+outName)
    atr = atr1.copy()
    atr['WIDTH'] = width
    atr['FILE_LENGTH'] = length
    atr['X_FIRST'] = West
    atr['Y_FIRST'] = North
    writefile.write(VV, atr, outName)

    # Display
    fig_size = [16.0,16.0]
    fig = plt.figure(figsize=fig_size)
    print('plotting result ...')
    fig=plt.subplot(2,2,1);  plt.imshow(VV1);      plt.title(File1);     plt.colorbar()
    fig=plt.subplot(2,2,2);  plt.imshow(VV2);      plt.title(File2);     plt.colorbar()
    fig=plt.subplot(2,2,3);  plt.imshow(VV);       plt.title(outName);   plt.colorbar()
    fig=plt.subplot(2,2,4);  plt.imshow(VV_diff);  plt.title('Offset');  plt.colorbar()
    plt.tight_layout()
    plt.savefig(outName+'.png', bbox_inches='tight', transparent=True, dpi=150)
    print('save figure to '+outName+'.png')

    if disp_fig:
        print('showing ...')
        plt.show()

    return outName
コード例 #19
0
ファイル: baseline_error.py プロジェクト: Ovec8hkin/PySAR
def main(argv):
    try:
        if argv[0] in ['-h', '--help']:
            usage()
            sys.exit(1)
        else:
            File = argv[0]
    except:
        usage()
        sys.exit(1)

    try:
        maskFile = argv[1]
    except:
        pass

    ##################################
    h5file = h5py.File(File)
    dateList = list(h5file['timeseries'].keys())
    ##################################

    ##### Read Mask File
    ## Priority:
    ## Input mask file > pysar.mask.file > existed Modified_Mask.h5 > existed Mask.h5
    try:
        maskFile
    except:
        if os.path.isfile('Modified_Mask.h5'): maskFile = 'Modified_Mask.h5'
        elif os.path.isfile('Mask.h5'): maskFile = 'Mask.h5'
        else:
            print('No mask found!')
            sys.exit(1)
    try:
        Mask, Matr = readfile.read(maskFile)
        print('mask: ' + maskFile)
    except:
        print('Can not open mask file: ' + maskFile)
        sys.exit(1)

    ##################################
    Mask = Mask.flatten(1)
    ndx = Mask != 0
    ##################################
    nt = float(h5file['timeseries'].attrs['LOOK_REF1'])
    ft = float(h5file['timeseries'].attrs['LOOK_REF2'])
    sy, sx = np.shape(dset1)
    npixel = sx * sy
    lookangle = np.tile(np.linspace(nt, ft, sx), [sy, 1])
    lookangle = lookangle.flatten(1) * np.pi / 180.0
    Fh = -np.sin(lookangle)
    Fv = -np.cos(lookangle)

    try:
        daz = float(h5file['timeseries'].attrs['AZIMUTH_PIXEL_SIZE'])
    except:
        print('''
        ERROR!
        The attribute AZIMUTH_PIXEL_SIZE was not found!
        Possible cause of error: Geo coordinate.
        This function works only in radar coordinate system.
        ''')
        sys.exit(1)
    lines = np.tile(np.arange(0, sy, 1), [1, sx])
    lines = lines.flatten(1)
    rs = lines * daz

    A = np.zeros([npixel, 4])

    A[:, 0] = Fh
    A[:, 1] = Fh * rs
    A[:, 2] = Fv
    A[:, 3] = Fv * rs

    Bh = []
    Bv = []
    Bhrate = []
    Bvrate = []
    Be = np.zeros([len(dateList), 4])
    try:
        excludedDates = argv[2]
    except:
        excludedDates = []

    print(
        '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    )
    for i in range(1, len(dateList)):
        if not dateList[i] in excludedDates:
            dset = h5file['timeseries'].get(dateList[i])
            data = dset[0:dset.shape[0], 0:dset.shape[1]]
            L = data.flatten(1)
            Berror = np.dot(np.linalg.pinv(A[ndx]), L[ndx])
            Bh.append(Berror[0])
            Bhrate.append(Berror[1])
            Bv.append(Berror[2])
            Bvrate.append(Berror[3])
            Be[i, :] = Berror
        else:
            print(
                str(dateList[i]) +
                ' is not considered for Baseline Error estimation')

    print(
        '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    )
    print('baseline error           mean                          std')
    print('       bh     :  ' + str(np.mean(Bh)) + '     ,  ' +
          str(np.std(Bh)))
    print('     bh rate  :  ' + str(np.mean(Bhrate)) + '     ,  ' +
          str(np.std(Bhrate)))
    print('       bv     :  ' + str(np.mean(Bv)) + '     ,  ' +
          str(np.std(Bv)))
    print('     bv rate  :  ' + str(np.mean(Bvrate)) + '     ,  ' +
          str(np.std(Bvrate)))
    print(
        '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    )
    print('bh error of each epoch:')
    print(Bh)
    print(
        '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    )
    print('bv error of each epoch:')
    print(Bv)
    # plt.hist(Bh,bins=8,normed=True)
    # formatter = FuncFormatter(to_percent)
    # Set the formatter
    # plt.gca().yaxis.set_major_formatter(formatter)
    # plt.show()
    print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
    print('Estimating Baseline error from each differences ...')

    Bedif = np.zeros([len(dateList), 4])
    for i in range(1, len(dateList)):
        dset1 = h5file['timeseries'].get(dateList[i - 1])
        data1 = dset1[0:dset1.shape[0], 0:dset1.shape[1]]
        dset2 = h5file['timeseries'].get(dateList[i])
        data2 = dset2[0:dset2.shape[0], 0:dset2.shape[1]]
        data = data2 - data1
        L = data.flatten(1)
        Berrord = np.dot(np.linalg.pinv(A[ndx]), L[ndx])
        Bedif[i, :] = Berrord

    print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')

    yref = int(h5file['timeseries'].attrs['ref_y'])
    xref = int(h5file['timeseries'].attrs['ref_x'])

    orbEffect = np.zeros([len(dateList), sy, sx])
    for i in range(1, len(dateList)):
        effect = np.dot(A, Be[i, :])
        effect = np.reshape(effect, [sx, sy]).T
        # orbEffect[i,:,:]=orbEffect[i-1,:,:]+effect
        # orbEffect[i,:,:]=orbEffect[i,:,:]-orbEffect[i,yref,xref]
        orbEffect[i, :, :] = effect - effect[yref, xref]
        del effect

    print('Correctiing the time series ')
    outName = File.replace('.h5', '') + '_baselineCor.h5'
    h5orbCor = h5py.File(outName, 'w')
    group = h5orbCor.create_group('timeseries')
    for i in range(len(dateList)):
        dset1 = h5file['timeseries'].get(dateList[i])
        data = dset1[0:dset1.shape[0], 0:dset1.shape[1]] - orbEffect[i, :, :]
        dset = group.create_dataset(dateList[i], data=data, compression='gzip')

    for key, value in h5file['timeseries'].attrs.items():
        group.attrs[key] = value

    try:
        dset1 = h5file['mask'].get('mask')
        group = h5orbCor.create_group('mask')
        dset = group.create_dataset('mask', data=dset1, compression='gzip')
    except:
        pass

    h5file.close()
    h5orbCor.close()
コード例 #20
0
def main(argv):
    inps = cmdLineParse()

    ##### Check default input arguments
    # default output filename
    if not inps.outfile:
        inps.outfile = os.path.splitext(
            inps.timeseries_file)[0] + '_tropHgt.h5'

    # Basic info
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    pix_num = length * width

    # default DEM file
    if not inps.dem_file:
        if 'X_FIRST' in list(atr.keys()):
            inps.dem_file = ['demGeo_tight.h5', 'demGeo.h5']
        else:
            inps.dem_file = ['demRadar.h5']
    try:
        inps.dem_file = ut.get_file_list(inps.dem_file)[0]
    except:
        inps.dem_file = None
        sys.exit('ERROR: No DEM file found!')

    # default Mask file
    if not inps.mask_file:
        if 'X_FIRST' in list(atr.keys()):
            inps.mask_file = 'geo_maskTempCoh.h5'
        else:
            inps.mask_file = 'maskTempCoh.h5'
        if not os.path.isfile(inps.mask_file):
            inps.mask_file = None
            sys.exit('ERROR: No mask file found!')

    ##### Read Mask
    print('reading mask from file: ' + inps.mask_file)
    mask = readfile.read(inps.mask_file)[0].flatten(1)
    ndx = mask != 0
    msk_num = np.sum(ndx)
    print('total            pixel number: %d' % pix_num)
    print('estimating using pixel number: %d' % msk_num)

    ##### Read DEM
    print('read DEM from file: ' + inps.dem_file)
    dem = readfile.read(inps.dem_file)[0]

    ref_y = int(atr['ref_y'])
    ref_x = int(atr['ref_x'])
    dem -= dem[ref_y, ref_x]

    print('considering the incidence angle of each pixel ...')
    inc_angle = ut.incidence_angle(atr, dimension=2)
    dem *= 1.0 / np.cos(inc_angle * np.pi / 180.0)

    ##### Design matrix for elevation v.s. phase
    dem = dem.flatten(1)
    if inps.poly_order == 1:
        A = np.vstack((dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem, np.ones(pix_num))).T
    elif inps.poly_order == 2:
        A = np.vstack((dem[ndx]**2, dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem**2, dem, np.ones(pix_num))).T
    elif inps.poly_order == 3:
        A = np.vstack((dem[ndx]**3, dem[ndx]**2, dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem**3, dem**2, dem, np.ones(pix_num))).T
    print('polynomial order: %d' % inps.poly_order)

    A_inv = np.linalg.pinv(A)

    ##### Calculate correlation coefficient
    print(
        'Estimating the tropospheric effect between the differences of the subsequent epochs and DEM'
    )

    h5 = h5py.File(inps.timeseries_file)
    date_list = sorted(h5[k].keys())
    date_num = len(date_list)
    print('number of acquisitions: ' + str(date_num))
    try:
        ref_date = atr['ref_date']
    except:
        ref_date = date_list[0]

    print('----------------------------------------------------------')
    print('correlation of DEM with each time-series epoch:')
    corr_array = np.zeros(date_num)
    par_dict = {}
    for i in range(date_num):
        date = date_list[i]
        if date == ref_date:
            cc = 0.0
            par = np.zeros(inps.poly_order + 1)
        else:
            data = h5[k].get(date)[:].flatten(1)

            C = np.zeros((2, msk_num))
            C[0, :] = dem[ndx]
            C[1, :] = data[ndx]
            cc = np.corrcoef(C)[0, 1]

            corr_array[i] = cc
            if inps.threshold and np.abs(cc) < inps.threshold:
                par = np.zeros(inps.poly_order + 1)
            else:
                par = np.dot(A_inv, data[ndx])
        print('%s: %.2f' % (date, cc))
        par_dict[date] = par

    average_phase_height_corr = np.nansum(np.abs(corr_array)) / (date_num - 1)
    print('----------------------------------------------------------')
    print('Average Correlation of DEM with time-series epochs: %.2f' %
          average_phase_height_corr)

    # Correlation of DEM with Difference of subsequent epochs (Not used for now)
    corr_diff_dict = {}
    par_diff_dict = {}
    for i in range(date_num - 1):
        date1 = date_list[i]
        date2 = date_list[i + 1]
        date12 = date1 + '-' + date2

        data1 = h5[k].get(date1)[:].flatten(1)
        data2 = h5[k].get(date2)[:].flatten(1)
        data_diff = data2 - data1

        C_diff = np.zeros((2, msk_num))
        C_diff[0, :] = dem[ndx]
        C_diff[1, :] = data_diff[ndx]
        cc_diff = np.corrcoef(C_diff)[0, 1]

        corr_diff_dict[date12] = cc_diff
        par = np.dot(A_inv, data_diff[ndx])
        par_diff_dict[date12] = par

    ##### Correct and write time-series file
    print('----------------------------------------------------------')
    print('removing the stratified tropospheric delay from each epoch')
    print('writing >>> ' + inps.outfile)
    h5out = h5py.File(inps.outfile, 'w')
    group = h5out.create_group(k)

    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        data = h5[k].get(date)[:]

        if date != ref_date:
            par = par_dict[date]
            trop_delay = np.reshape(np.dot(B, par), [width, length]).T
            trop_delay -= trop_delay[ref_y, ref_x]
            data -= trop_delay

        dset = group.create_dataset(date, data=data, compression='gzip')
        prog_bar.update(i + 1, suffix=date)

    for key, value in atr.items():
        group.attrs[key] = value

    prog_bar.close()
    h5out.close()
    h5.close()

    print('Done.')
    return inps.outfile
コード例 #21
0
def main(argv):
    inps = cmdLineParse()

    if inps.timeseries_file:
        inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
        atr = readfile.read_attribute(inps.timeseries_file)

    if inps.dem_file:
        inps.dem_file = ut.get_file_list([inps.dem_file])[0]
        # Convert DEM to ROIPAC format
        if os.path.splitext(inps.dem_file)[1] in ['.h5']:
            print('convert DEM file to ROIPAC format')
            dem, atr_dem = readfile.read(inps.dem_file)
            if 'Y_FIRST' in list(atr_dem.keys()):
                atr_dem['FILE_TYPE'] = '.dem'
            else:
                atr_dem['FILE_TYPE'] = '.hgt'
            outname = os.path.splitext(inps.dem_file)[0]+'4pyaps'+atr_dem['FILE_TYPE']
            inps.dem_file = writefile.write(dem, atr_dem, outname)

    print('*******************************************************************************')
    print('Downloading weather model data ...')

    ## Get Grib Source
    if   inps.weather_model in ['ECMWF','ERA-Interim']:   inps.grib_source = 'ECMWF'
    elif inps.weather_model == 'ERA'  :                   inps.grib_source = 'ERA'
    elif inps.weather_model == 'MERRA':                   inps.grib_source = 'MERRA'
    elif inps.weather_model == 'NARR' :                   inps.grib_source = 'NARR'
    else: raise Reception('Unrecognized weather model: '+inps.weather_model)
    print('grib source: '+inps.grib_source)

    # Get weather directory
    if not inps.weather_dir:
        if inps.timeseries_file:
            inps.weather_dir = os.path.dirname(os.path.abspath(inps.timeseries_file))+'/../WEATHER'
        elif inps.dem_file:
            inps.weather_dir = os.path.dirname(os.path.abspath(inps.dem_file))+'/../WEATHER'
        else:
            inps.weather_dir = os.path.abspath(os.getcwd())
    print('Store weather data into directory: '+inps.weather_dir)

    # Get date list to download
    if not inps.date_list_file:
        h5timeseries = h5py.File(inps.timeseries_file, 'r')
        dateList = sorted(h5timeseries['timeseries'].keys())
        h5timeseries.close()
        print('read date list info from: '+inps.timeseries_file)
    else:
        dateList = ptime.yyyymmdd(np.loadtxt(inps.date_list_file, dtype=str, usecols=(0,)).tolist())
        print('read date list info from: '+inps.date_list_file)

    # Get Acquisition time - hour
    if not inps.hour:
        inps.hour = closest_weather_product_time(atr['CENTER_LINE_UTC'], inps.grib_source)
    print('Time of cloest available product: '+inps.hour)

    ## Download data using PyAPS
    inps.grib_file_list = dload_grib(dateList, inps.hour, inps.weather_model, inps.weather_dir)

    if inps.download:
        print('Download completed, exit as planned.')
        return

    print('*******************************************************************************')
    print('Calcualting delay for each epoch.')

    ## Get Incidence angle: to map the zenith delay to the slant delay
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            inps.incidence_angle = float(inps.incidence_angle)
            print('incidence angle: '+str(inps.incidence_angle))
    else:
        print('calculating incidence angle ...')
        inps.incidence_angle = ut.incidence_angle(atr)
    inps.incidence_angle = inps.incidence_angle*np.pi/180.0

    ## Create delay hdf5 file
    tropFile = inps.grib_source+'.h5'
    print('writing >>> '+tropFile)
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')

    ## Create tropospheric corrected timeseries hdf5 file
    if not inps.out_file:
        ext = os.path.splitext(inps.timeseries_file)[1]
        inps.out_file = os.path.splitext(inps.timeseries_file)[0]+'_'+inps.grib_source+'.h5'
    print('writing >>> '+inps.out_file)
    h5timeseries_tropCor = h5py.File(inps.out_file, 'w')
    group_tropCor = h5timeseries_tropCor.create_group('timeseries')

    ## Calculate phase delay on reference date
    try:    ref_date = atr['ref_date']
    except: ref_date = dateList[0]
    print('calculating phase delay on reference date: '+ref_date)
    ref_date_grib_file = None
    for fname in inps.grib_file_list:
        if ref_date in fname:
            ref_date_grib_file = fname
    phs_ref = get_delay(ref_date_grib_file, atr, vars(inps))

    ## Loop to calculate phase delay on the other dates
    h5timeseries = h5py.File(inps.timeseries_file, 'r')
    for i in range(len(inps.grib_file_list)):
        grib_file = inps.grib_file_list[i] 
        date = re.findall('\d{8}', grib_file)[0]

        # Get phase delay
        if date != ref_date:
            print('calculate phase delay on %s from file %s' % (date, os.path.basename(grib_file)))
            phs = get_delay(grib_file, atr, vars(inps))
        else:
            phs = np.copy(phs_ref)
        # Get relative phase delay in time
        phs -= phs_ref

        # Write dataset
        print('writing to HDF5 files ...')
        data = h5timeseries['timeseries'].get(date)[:]
        dset  = group_tropCor.create_dataset(date, data=data-phs, compression='gzip')
        dset  = group_trop.create_dataset(date, data=phs, compression='gzip')

    ## Write Attributes
    for key,value in atr.items():
        group_tropCor.attrs[key] = value
        group_trop.attrs[key] = value
    
    h5timeseries.close()
    h5timeseries_tropCor.close()
    h5trop.close()

    # Delete temporary DEM file in ROI_PAC format
    if '4pyaps' in inps.dem_file:
        rmCmd = 'rm '+inps.dem_file+' '+inps.dem_file+'.rsc '
        print(rmCmd)
        os.system(rmCmd)
    
    print('Done.')

    return
コード例 #22
0
def multilook_file(infile, lks_y, lks_x, outfile=None):
    lks_y = int(lks_y)
    lks_x = int(lks_x)

    ## input file info
    atr = readfile.read_attribute(infile)
    k = atr['FILE_TYPE']
    print('multilooking ' + k + ' file ' + infile)
    print('number of looks in y / azimuth direction: %d' % lks_y)
    print('number of looks in x / range   direction: %d' % lks_x)

    ## output file name
    if not outfile:
        if os.getcwd() == os.path.dirname(os.path.abspath(infile)):
            ext = os.path.splitext(infile)[1]
            outfile = os.path.splitext(infile)[0] + '_' + str(
                lks_y) + 'alks_' + str(lks_x) + 'rlks' + ext
        else:
            outfile = os.path.basename(infile)
    print('writing >>> ' + outfile)

    ###############################################################################
    ## Read/Write multi-dataset files
    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5 = h5py.File(infile, 'r')
        epochList = sorted(h5[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(outfile, 'w')
        group = h5out.create_group(k)

        if k in ['interferograms', 'coherence', 'wrapped']:
            date12_list = ptime.list_ifgram2date12(epochList)
            print('number of interferograms: ' + str(len(epochList)))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k][epoch].get(epoch)[:]
                atr = h5[k][epoch].attrs

                data_mli = multilook_matrix(data, lks_y, lks_x)
                atr_mli = multilook_attribute(atr,
                                              lks_y,
                                              lks_x,
                                              print_msg=False)

                gg = group.create_group(epoch)
                dset = gg.create_dataset(epoch,
                                         data=data_mli,
                                         compression='gzip')
                for key, value in atr_mli.items():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])

        elif k == 'timeseries':
            print('number of acquisitions: ' + str(len(epochList)))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k].get(epoch)[:]

                data_mli = multilook_matrix(data, lks_y, lks_x)

                dset = group.create_dataset(epoch,
                                            data=data_mli,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=epoch)
            atr = h5[k].attrs
            atr_mli = multilook_attribute(atr, lks_y, lks_x)
            for key, value in atr_mli.items():
                group.attrs[key] = value

        h5.close()
        h5out.close()
        prog_bar.close()

    ## Read/Write single-dataset files
    elif k == '.trans':
        rg, az, atr = readfile.read(infile)
        rgmli = multilook_matrix(rg, lks_y, lks_x)
        #rgmli *= 1.0/lks_x
        azmli = multilook_matrix(az, lks_y, lks_x)
        #azmli *= 1.0/lks_y
        atr = multilook_attribute(atr, lks_y, lks_x)
        writefile.write(rgmli, azmli, atr, outfile)
    else:
        data, atr = readfile.read(infile)
        data_mli = multilook_matrix(data, lks_y, lks_x)
        atr = multilook_attribute(atr, lks_y, lks_x)
        writefile.write(data_mli, atr, outfile)

    return outfile
コード例 #23
0
ファイル: transect.py プロジェクト: Ovec8hkin/PySAR
def main(argv):
    inps = cmdLineParse()
    print('\n**************** Transect *********************')
    print('number of file: ' + str(len(inps.file)))
    print(inps.file)

    ##### Start / End Point Input
    # 1. lonlat file
    if inps.lola_file:
        inps.start_lalo, inps.end_lalo = read_lonlat_file(inps.lola_file)
    # 2. Manually select in window display
    if (not inps.start_yx or not inps.end_yx) and (not inps.start_lalo
                                                   or not inps.end_lalo):
        print('No input yx/lalo found.')
        print('Continue with manually select start and end point.')
        inps.start_yx, inps.end_yx = manual_select_start_end_point(
            inps.file[0])
    # Message
    if inps.start_lalo and inps.end_lalo:
        print('Start point:  lat = ' + str(inps.start_lalo[0]) + ', lon = ' +
              str(inps.start_lalo[1]))
        print('End   point:  lat = ' + str(inps.end_lalo[0]) + ', lon = ' +
              str(inps.end_lalo[1]))
    else:
        print('Start point:  y = ' + str(inps.start_yx[0]) + ', x = ' +
              str(inps.start_yx[1]))
        print('End   point:  y = ' + str(inps.end_yx[0]) + ', x = ' +
              str(inps.end_yx[1]))

    ##### Get Transection/Profiles Data
    print('extract transect from input files ...')
    transectList, atrList = transect_list(inps.file, inps)
    if inps.dem:
        demTransectList, demAtrList = transect_list([inps.dem], inps)
    print('profile length: ' + str(transectList[0][-1, 0] / 1000.0) + ' km')

    ##### Plot
    # Figure 1 - Profile line in the 1st input file
    print('plot profile line in the 1st input file')
    fig0 = plt.figure()
    ax0 = fig0.add_axes([0.1, 0.1, 0.8, 0.8])
    data0, atr0 = readfile.read(inps.file[0])
    ax0.imshow(data0)
    if inps.start_lalo and inps.end_lalo:
        [y0, y1] = sub.coord_geo2radar([inps.start_lalo[0], inps.end_lalo[0]],
                                       atr0, 'lat')
        [x0, x1] = sub.coord_geo2radar([inps.start_lalo[1], inps.end_lalo[1]],
                                       atr0, 'lon')
    else:
        [y0, y1] = [inps.start_yx[0], inps.end_yx[0]]
        [x0, x1] = [inps.start_yx[1], inps.end_yx[1]]
    ax0.plot([x0, x1], [y0, y1], 'ro-')
    ax0.set_xlim(0, np.shape(data0)[1])
    ax0.set_ylim(np.shape(data0)[0], 0)
    ax0.set_title('Transect Line in ' + inps.file[0])

    # Status bar
    def format_coord(x, y):
        col = int(x)
        row = int(y)
        if 0 <= col < data0.shape[1] and 0 <= row < data0.shape[0]:
            z = data0[row, col]
            if 'X_FIRST' in list(atr0.keys()):
                lat = sub.coord_radar2geo(row, atr0, 'row')
                lon = sub.coord_radar2geo(col, atr0, 'col')
                return 'lon=%.4f, lat=%.4f, x=%.0f,  y=%.0f,  value=%.4f' % (
                    lon, lat, x, y, z)
            else:
                return 'x=%.0f,  y=%.0f,  value=%.4f' % (x, y, z)
        else:
            return 'x=%.0f,  y=%.0f' % (x, y)

    ax0.format_coord = format_coord

    # Figure 2 - Transections/Profiles
    print('plot profiles')
    fig, ax = plt.subplots(figsize=inps.fig_size)
    # Plot 2.1 - Input Files
    if not inps.disp_unit:
        inps.disp_unit = atrList[0]['UNIT']
    inps.disp_scale, inps.disp_unit = get_scale_from_disp_unit(
        inps.disp_unit, atrList[0]['UNIT'])

    value_min = 0
    value_max = 0
    for i in range(len(inps.file)):
        # Profile Color based on Asc/Desc direction
        if atrList[i]['ORBIT_DIRECTION'][0].upper() == 'A':
            p_color = 'crimson'
        else:
            p_color = 'royalblue'
        # Plot
        distance = transectList[i][:, 0] / 1000.0  # km
        value = transectList[i][:, 1] * inps.disp_scale - inps.disp_offset * i
        ax.plot(distance,
                value,
                '.',
                color=p_color,
                markersize=inps.marker_size)
        # Y Stat
        value_min = np.nanmin([value_min, np.nanmin(value)])
        value_max = np.nanmax([value_max, np.nanmax(value)])

    # Y axis
    if not inps.disp_min:
        inps.disp_min = np.floor(value_min - (value_max - value_min) * 1.2 /
                                 len(inps.file))
    if not inps.disp_max:
        inps.disp_max = np.ceil(value_max)
    ax.set_ylim(inps.disp_min, inps.disp_max)
    ax.set_ylabel('Mean LOS Velocity (' + inps.disp_unit + ')',
                  fontsize=inps.font_size)
    # X axis
    ax.set_xlabel('Distance (km)', fontsize=inps.font_size)
    ax.tick_params(which='both', direction='out', labelsize=inps.font_size)

    # Plot 2.2 - DEM
    if inps.dem:
        ax2 = ax.twinx()
        distance = demTransectList[0][:, 0] / 1000.0  # km
        value = demTransectList[0][:, 1] / 1000.0  # km
        ax2.fill_between(distance, 0, value, facecolor='gray')

        # Y axis - display DEM in the bottom
        value_min = np.nanmin(value)
        value_max = np.nanmax(value)
        if not inps.dem_disp_min:
            inps.dem_disp_min = np.floor(value_min * 2.0) / 2.0
        if not inps.dem_disp_max:
            inps.dem_disp_max = np.ceil((value_max + (value_max - value_min) *
                                         (len(inps.file) + 0.0)) * 2.0) / 2.0
        ax2.set_ylim(inps.dem_disp_min, inps.dem_disp_max)
        ## Show lower part of yaxis
        #dem_tick = ax2.yaxis.get_majorticklocs()
        #dem_tick = dem_tick[:len(dem_tick)/2]
        #ax2.set_yticks(dem_tick)
        ax2.set_ylabel('Elevation (km)', fontsize=inps.font_size)
        ax2.tick_params(which='both',
                        direction='out',
                        labelsize=inps.font_size)

    ## X axis - Shared
    distanceMax = np.nanmax(transectList[0][:, 0] / 1000.0)  # in km
    plt.xlim(0, distanceMax)
    plt.tight_layout()

    ##### Output
    if not inps.outfile:
        figBase = 'transect_x' + str(x0) + 'y' + str(y0) + '_x' + str(
            x1) + 'y' + str(y1)
    else:
        figBase, inps.outfile_ext = os.path.splitext(inps.outfile)
        if not inps.outfile_ext:
            inps.outfile_ext = '.png'
    if inps.save_fig:
        print('writing >>> ' + figBase + inps.outfile_ext)
        fig0.savefig(inps.file[-1] + inps.outfile_ext,
                     bbox_inches='tight',
                     transparent=True,
                     dpi=inps.fig_dpi)
        fig.savefig(figBase + inps.outfile_ext,
                    bbox_inches='tight',
                    transparent=True,
                    dpi=inps.fig_dpi)

        print('writing >>> ' + figBase + '.mat')
        transect_mat = {}
        for i in range(len(inps.file)):
            project = atrList[i]['PROJECT_NAME']
            transect_mat[project] = transectList[i]
        if inps.dem:
            transect_mat['elevation'] = demTransectList[0]
        sio.savemat(figBase + '.mat', {'transection': transect_mat})

    # Display
    if inps.disp_fig:
        print('showing ...')
        plt.show()
    return
コード例 #24
0
ファイル: generate_mask.py プロジェクト: Ovec8hkin/PySAR
def main(argv):
    inps = cmdLineParse()

    # Input File Info
    atr = readfile.read_attribute(inps.file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    k = atr['FILE_TYPE']
    print('Input file is '+k+': '+inps.file)

    # default output filename
    if not inps.outfile:
        if k == 'temporal_coherence':
            inps.outfile = 'maskTempCoh.h5'
        else:
            inps.outfile = 'mask.h5'
        if inps.file.startswith('geo_'):
            inps.outfile = 'geo_'+inps.outfile

    ##### Mask: Non-zero
    if inps.nonzero:
        print('generate mask for all pixels with non-zero value')
        inps.outfile = ut.nonzero_mask(inps.file, inps.outfile)
        return inps.outfile

    ##### Mask: Threshold 
    print('create initial mask with the same size as the input file and all = 1')
    mask = np.ones((length, width), dtype=np.float32)

    data, atr = readfile.read(inps.file, inps.epoch)

    # min threshold
    if inps.vmin:
        mask[data<inps.vmin] = 0
        print('all pixels with value < %s = 0' % str(inps.vmin))

    # max threshold
    if inps.vmax:
        mask[data>inps.vmax] = 0
        print('all pixels with value > %s = 0' % str(inps.vmax))

    # nan value
    mask[np.isnan(data)] = 0
    print('all pixels with nan value = 0')

    # subset in Y
    if inps.subset_y:
        y0,y1 = sorted(inps.subset_y)
        mask[0:y0,:] = 0
        mask[y1:length,:] = 0
        print('all pixels with y OUT of [%d, %d] = 0' % (y0,y1))

    # subset in x
    if inps.subset_x:
        x0,x1 = sorted(inps.subset_x)
        mask[:,0:x0] = 0
        mask[:,x1:width] = 0
        print('all pixels with x OUT of [%d, %d] = 0' % (x0,x1))
  
    ## Write mask file
    print('writing >>> '+inps.outfile)
    atr['FILE_TYPE'] = 'mask'
    writefile.write(mask, atr, inps.outfile)
    return inps.outfile
コード例 #25
0
def mask_file(File, maskFile, outFile=None, inps_dict=None):
    ''' Mask input File with maskFile
    Inputs:
        File/maskFile - string, 
        inps_dict - dictionary including the following options:
                    subset_x/y - list of 2 ints, subset in x/y direction
                    thr - float, threshold/minValue to generate mask
    Output:
        outFile - string
    '''

    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print('masking ' + k + ' file: ' + File + ' ...')

    # Read maskFile
    atrm = readfile.read_attribute(maskFile)
    km = atrm['FILE_TYPE']
    if km not in multi_group_hdf5_file + multi_dataset_hdf5_file:
        print('reading mask file: ' + maskFile)
        mask = readfile.read(maskFile)[0]
        if inps_dict:
            mask = update_mask(mask, inps_dict)

    if not outFile:
        outFile = os.path.splitext(File)[0] + '_masked' + os.path.splitext(
            File)[1]

    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())

        h5out = h5py.File(outFile, 'w')
        print('writing >>> ' + outFile)

    ##### Multiple Dataset File
    if k == 'timeseries':
        print('number of acquisitions: ' + str(len(epochList)))
        group = h5out.create_group(k)
        for d in epochList:
            print(d)
            unw = h5file[k].get(d)[:]

            unw = mask_matrix(unw, mask)

            dset = group.create_dataset(d, data=unw, compression='gzip')
        for key, value in atr.items():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print('number of interferograms: ' + str(len(epochList)))
        gg = h5out.create_group(k)

        # Mask multi group file with multi group coherence file
        if km == 'coherence':
            h5mask = h5py.File(maskFile, 'r')
            cohList = sorted(h5mask[km].keys())
            if len(cohList) != len(epochList):
                sys.exit('ERROR: cohERROR: erence mask file has different\
                number of interferograms than input file!')

        for i in range(len(epochList)):
            igram = epochList[i]
            print(igram)
            unw = h5file[k][igram].get(igram)[:]

            if km == 'coherence':
                coh = cohList[i]
                print(coh)
                mask = h5mask[km][coh].get(coh)[:]
                if not inps_dict:
                    mask = update_mask(mask, inps_dict)

            unw = mask_matrix(unw, mask)

            group = gg.create_group(igram)
            dset = group.create_dataset(igram, data=unw, compression='gzip')
            for key, value in h5file[k][igram].attrs.items():
                group.attrs[key] = value

    ##### Single Dataset File
    else:
        unw, atr = readfile.read(File)
        unw = mask_matrix(unw, mask)
        print('writing >>> ' + outFile)
        writefile.write(unw, atr, outFile)

    try:
        h5file.close()
    except:
        pass
    try:
        h5out.close()
    except:
        pass
    try:
        h5mask.close()
    except:
        pass
    return outFile
コード例 #26
0
ファイル: subset.py プロジェクト: Ovec8hkin/PySAR
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    print('number of input files: ' + str(len(inps.file)))
    print(inps.file)

    #print '\n**************** Subset *********************'
    atr = readfile.read_attribute(inps.file[0])

    ##### Convert All Inputs into subset_y/x/lat/lon
    # Input Priority: subset_y/x/lat/lon > reference > template > tight
    if not inps.subset_x and not inps.subset_y and not inps.subset_lat and not inps.subset_lon:
        # 1. Read subset info from Reference File
        if inps.reference:
            ref_atr = readfile.read_attribute(inps.reference)
            pix_box, geo_box = get_coverage_box(ref_atr)
            print('using subset info from ' + inps.reference)

        # 2. Read subset info from template options
        elif inps.template_file:
            pix_box, geo_box = read_subset_template2box(inps.template_file)
            print('using subset info from ' + inps.template_file)

        # 3. Use subset from tight info
        elif inps.tight:
            if atr['FILE_TYPE'] == '.trans':
                # Non-zero area in geomap_*.trans file, accurate
                trans_rg, trans_atr = readfile.read(inps.file[0], (), 'range')
                idx_row, idx_col = np.nonzero(trans_rg)
                pix_box = (np.min(idx_col) - 10, np.min(idx_row) - 10,
                           np.max(idx_col) + 10, np.max(idx_row) + 10)
                geo_box = box_pixel2geo(pix_box, trans_atr)
            else:
                print(
                    'ERROR: --tight option only works for geomap_*.trans file.\n'
                )
                inps.tight = False
                sys.exit(1)

            ## from LAT/LON_REF*, which is not accurate
            #lats = [atr['LAT_REF1'], atr['LAT_REF3'], atr['LAT_REF4'], atr['LAT_REF2']]
            #lons = [atr['LON_REF1'], atr['LON_REF3'], atr['LON_REF4'], atr['LON_REF2']]
            #lats = [float(i) for i in lats]
            #lons = [float(i) for i in lons]
            #lalo_buff = min([max(lats)-min(lats), max(lons)-min(lons)]) * 0.05
            #geo_box = (min(lons)-lalo_buff, max(lats)+lalo_buff, max(lons)+lalo_buff, min(lats)-lalo_buff)
            #pix_box = None
            #if not inps.fill_value: inps.fill_value = np.nan
            #print 'using subset info from scene footprint - LAT/LON_REF1/2/3/4'
        else:
            raise Exception('No subset inputs found!')
        # Update subset_y/x/lat/lon
        inps = subset_box2inps(inps, pix_box, geo_box)

    ##### --bbox option
    if inps.trans_file:
        ## Seperate files in radar and geo coord
        rdrFileList = []
        geoFileList = []
        for File in inps.file:
            atr = readfile.read_attribute(File)
            if 'X_FIRST' in list(atr.keys()):
                geoFileList.append(File)
            else:
                rdrFileList.append(File)

        ## Calculate bbox
        rdrFile = rdrFileList[0]
        atr_rdr = readfile.read_attribute(rdrFile)
        if inps.subset_lat and inps.subset_lon:
            print('use subset input in lat/lon')
            print('calculate corresponding bounding box in radar coordinate.')
            geo_box = (inps.subset_lon[0], inps.subset_lat[1],
                       inps.subset_lon[1], inps.subset_lat[0])
            pix_box = bbox_geo2radar(geo_box, atr_rdr, inps.trans_file)
        else:
            print('use subset input in y/x')
            print('calculate corresponding bounding box in geo coordinate.')
            pix_box = (inps.subset_x[0], inps.subset_y[0], inps.subset_x[1],
                       inps.subset_y[1])
            geo_box = bbox_radar2geo(pix_box, atr_rdr, inps.trans_file)
        print('geo   box: ' + str(geo_box))
        print('pixel box: ' + str(pix_box))

        ## Subset files
        inps.fill_value = 0
        print('--------------------------------------------')
        print('subseting dataset in geo coord geo_box: ' + str(geo_box))
        inps = subset_box2inps(inps, None, geo_box)
        subset_file_list(geoFileList, inps)
        print('--------------------------------------------')
        print('subseting dataset in radar coord pix_box: ' + str(pix_box))
        inps = subset_box2inps(inps, pix_box, None)
        subset_file_list(rdrFileList, inps)

    else:
        ##### Subset files
        subset_file_list(inps.file, inps)

    print('Done.')
    return
コード例 #27
0
def unwrap_error_correction_bridging(ifgram_file, mask_file, y_list, x_list, ramp_type='plane',\
                                     ifgram_cor_file=None, save_cor_deramp_file=False):
    '''Unwrapping error correction with bridging.
    Inputs:
        ifgram_file : string, name/path of interferogram(s) to be corrected
        mask_file   : string, name/path of mask file to mark different patches 
        y/x_list    : list of int, bonding points in y/x 
        ifgram_cor_file : string, optional, output file name
        save_cor_deramp_file : bool, optional
    Output:
        ifgram_cor_file
    Example:
        y_list = [235, 270, 350, 390]
        x_list = [880, 890, 1200, 1270]
        unwrap_error_correction_bridging('unwrapIfgram.h5', 'mask_all.h5', y_list, x_list, 'quadratic')
    '''
    ##### Mask and Ramp
    mask = readfile.read(mask_file)[0]
    ramp_mask = mask == 1
    print('estimate phase ramp during the correction')
    print('ramp type: '+ramp_type)

    ##### Bridge Info
    # Check
    for i in range(len(x_list)):
        if mask[y_list[i],x_list[i]] == 0:
            print('\nERROR: Connecting point (%d,%d) is out of masked area! Select them again!\n' % (y_list[i],x_list[i]))
            sys.exit(1)
    print('Number of bridges: '+str(len(x_list)/2))
    print('Bonding points coordinates:\nx: '+str(x_list)+'\ny: '+str(y_list))

    # Plot Connecting Pair of Points
    plot_bonding_points = False
    if plot_bonding_points:
        point_yx = ''
        line_yx  = ''
        n_bridge = len(x)/2
        for i in range(n_bridge):
            pair_yx = str(y[2*i])+','+str(x[2*i])+','+str(y[2*i+1])+','+str(x[2*i+1])
            if not i == n_bridge-1:
                point_yx += pair_yx+','
                line_yx  += pair_yx+';'
            else:
                point_yx += pair_yx
                line_yx  += pair_yx

        try:
            plot_cmd = 'view.py --point-yx="'+point_yx+'" --line-yx="'+line_yx+\
                       '" --nodisplay -o bonding_points.png -f '+maskFile
            print(plot_cmd)
            os.system(plot_cmd)
        except: pass

    # Basic info
    ext = os.path.splitext(ifgram_file)[1]
    atr = readfile.read_attribute(ifgram_file)
    k = atr['FILE_TYPE']

    try:
        ref_y = int(atr['ref_y'])
        ref_x = int(atr['ref_x'])
    except:
        sys.exit('ERROR: Can not find ref_y/x value, input file is not referenced in space!')

    # output file name
    if not ifgram_cor_file:
        ifgram_cor_file = os.path.splitext(ifgram_file)[0]+'_unwCor'+ext
    ifgram_cor_deramp_file = os.path.splitext(ifgram_cor_file)[0]+'_'+ramp_type+ext

    ##### HDF5 file
    if ext == '.h5':
        ##### Read
        h5 = h5py.File(ifgram_file,'r')
        ifgram_list = sorted(h5[k].keys())
        ifgram_num = len(ifgram_list)

        h5out = h5py.File(ifgram_cor_file,'w')
        group = h5out.create_group(k)
        print('writing >>> '+ifgram_cor_file)

        if save_cor_deramp_file:
            h5out_deramp = h5py.File(ifgram_cor_deramp_file,'w')
            group_deramp = h5out_deramp.create_group(k)
            print('writing >>> '+ifgram_cor_deramp_file)

        ##### Loop
        print('Number of interferograms: '+str(ifgram_num))
        prog_bar = ptime.progress_bar(maxValue=ifgram_num)
        date12_list = ptime.list_ifgram2date12(ifgram_list)
        for i in range(ifgram_num):
            ifgram = ifgram_list[i]
            data = h5[k][ifgram].get(ifgram)[:]
            data -= data[ref_y, ref_x]

            data_deramp, ramp = rm.remove_data_surface(data, ramp_mask, ramp_type)
            data_derampCor = bridging_data(data_deramp, mask, x_list, y_list)

            gg = group.create_group(ifgram)
            dset = gg.create_dataset(ifgram, data=data_derampCor-ramp, compression='gzip')
            for key, value in h5[k][ifgram].attrs.items():
                gg.attrs[key]=value

            if save_cor_deramp_file:
                gg_deramp = group_deramp.create_group(ifgram)
                dset = gg_deramp.create_dataset(ifgram, data=data_derampCor, compression='gzip')
                for key, value in h5[k][ifgram].attrs.items():
                    gg_deramp.attrs[key]=value
            prog_bar.update(i+1, suffix=date12_list[i])

        prog_bar.close()
        h5.close()
        h5out.close()
        try: h5out_deramp.close()
        except: pass

    #### .unw file
    elif ext == '.unw':
        print('read '+ifgram_file)
        data = readfile.read(ifgram_file)[0]
        data -= data[ref_y, ref_x]

        data_deramp,ramp = rm.remove_data_surface(data,ramp_mask,ramp_type)
        data_derampCor = bridging_data(data_deramp,mask,x_list,y_list)
        data_cor = data_derampCor - ramp

        print('writing >>> '+ifgram_cor_file)
        ifgram_cor_file        = writefile.write(data_cor,       atr, ifgram_cor_file)
        if save_cor_deramp_file:
            print('writing >>> '+ifgram_cor_deramp_file)
            ifgram_cor_deramp_file = writefile.write(data_derampCor, atr, ifgram_cor_deramp_file)

    else:
        sys.exit('Un-supported file type: '+ext)

    return ifgram_cor_file, ifgram_cor_deramp_file
コード例 #28
0
ファイル: asc_desc.py プロジェクト: Ovec8hkin/PySAR
def main(argv):
    inps = cmdLineParse()

    ##### 1. Extract the common area of two input files
    # Basic info
    atr1 = readfile.read_attribute(inps.file[0])
    atr2 = readfile.read_attribute(inps.file[1])
    if any('X_FIRST' not in i for i in [atr1,atr2]):
        sys.exit('ERROR: Not all input files are geocoded.')

    k1 = atr1['FILE_TYPE']
    print('Input 1st file is '+k1)

    # Common AOI in lalo
    west, east, south, north = get_overlap_lalo(atr1, atr2)
    lon_step = float(atr1['X_STEP'])
    lat_step = float(atr1['Y_STEP'])
    width  = int(round((east  - west )/lon_step))
    length = int(round((south - north)/lat_step))

    # Read data in common AOI: LOS displacement, heading angle, incident angle
    u_los = np.zeros((2, width*length))
    heading = []
    incidence = []
    for i in range(len(inps.file)):
        fname = inps.file[i]
        print('---------------------')
        print('reading '+fname)
        atr = readfile.read_attribute(fname)

        [x0,x1] = subset.coord_geo2radar([west,east], atr, 'lon')
        [y0,y1] = subset.coord_geo2radar([north,south], atr, 'lat')
        V = readfile.read(fname, (x0,y0,x1,y1))[0]
        u_los[i,:] = V.flatten(0)

        heading_angle = float(atr['HEADING'])
        if heading_angle < 0.:
            heading_angle += 360.
        print('heading angle: '+str(heading_angle))
        heading_angle *= np.pi/180.
        heading.append(heading_angle)
        
        inc_angle = float(ut.incidence_angle(atr, dimension=0))
        #print 'incidence angle: '+str(inc_angle)
        inc_angle *= np.pi/180.
        incidence.append(inc_angle)


    ##### 2. Project displacement from LOS to Horizontal and Vertical components
    # math for 3D: cos(theta)*Uz - cos(alpha)*sin(theta)*Ux + sin(alpha)*sin(theta)*Uy = Ulos
    # math for 2D: cos(theta)*Uv - sin(alpha-az)*sin(theta)*Uh = Ulos   #Uh_perp = 0.0
    # This could be easily modified to support multiple view geometry (e.g. two adjcent tracks from asc & desc) to resolve 3D

    # Design matrix
    A = np.zeros((2,2));
    for i in range(len(inps.file)):
        A[i,0] = np.cos(incidence[i])
        A[i,1] = np.sin(incidence[i]) * np.sin(heading[i]-inps.azimuth)

    A_inv = np.linalg.pinv(A)
    u_vh = np.dot(A_inv, u_los)

    u_v = np.reshape(u_vh[0,:], (length, width))
    u_h = np.reshape(u_vh[1,:], (length, width))

    ##### 3. Output
    # Attributes
    atr = atr1.copy()
    atr['WIDTH'] = str(width)
    atr['FILE_LENGTH'] = str(length)
    atr['X_FIRST'] = str(west)
    atr['Y_FIRST'] = str(north)
    atr['X_STEP'] = str(lon_step)
    atr['Y_STEP'] = str(lat_step)

    print('---------------------')
    outname = inps.outfile[0]
    print('writing   vertical component to file: '+outname)
    writefile.write(u_v, atr, outname)

    outname = inps.outfile[1]
    print('writing horizontal component to file: '+outname)
    writefile.write(u_h, atr, outname)

    print('Done.')
    return
コード例 #29
0
def main(argv):

    try:
        File = argv[0]
        demFile = argv[1]
        p = int(argv[2])
    except:
        usage()
        sys.exit(1)

    try:
        baseline_error = argv[3]
    except:
        baseline_error = 'range_and_azimuth'
    print(baseline_error)
    ##################################
    h5file = h5py.File(File)
    dateList = list(h5file['timeseries'].keys())
    ##################################

    try:
        maskFile = argv[4]
    except:
        if os.path.isfile('Modified_Mask.h5'): maskFile = 'Modified_Mask.h5'
        elif os.path.isfile('Mask.h5'): maskFile = 'Mask.h5'
        else:
            print('No mask found!')
            sys.exit(1)
    try:
        Mask, Matr = readfile.read(maskFile)
        print('mask: ' + maskFile)
    except:
        print('Can not open mask file: ' + maskFile)
        sys.exit(1)

    #try:
    #  maskFile=argv[4]
    #  h5Mask = h5py.File(maskFile,'r')
    #  kMask=h5Mask.keys()
    #  dset1 = h5Mask[kMask[0]].get(kMask[0])
    #  Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]]
    #except:
    #  dset1 = h5file['mask'].get('mask')
    #  Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]]

    ##################################
    Mask = Mask.flatten(1)
    ndx = Mask != 0
    ##################################
    # h5file = h5py.File(File)
    # dateList = h5file['timeseries'].keys()
    ##################################
    nt = float(h5file['timeseries'].attrs['LOOK_REF1'])
    ft = float(h5file['timeseries'].attrs['LOOK_REF2'])
    sy, sx = np.shape(dset1)
    npixel = sx * sy
    lookangle = np.tile(np.linspace(nt, ft, sx), [sy, 1])
    lookangle = lookangle.flatten(1) * np.pi / 180.0
    Fh = -np.sin(lookangle)
    Fv = -np.cos(lookangle)

    print('Looking for azimuth pixel size')
    try:
        daz = float(h5file['timeseries'].attrs['AZIMUTH_PIXEL_SIZE'])
    except:
        print('''
        ERROR!
        The attribute AZIMUTH_PIXEL_SIZE was not found!
        Possible cause of error: Geo coordinate.
        This function works only in radar coordinate system.
        ''')
        sys.exit(1)

    lines = np.tile(np.arange(0, sy, 1), [1, sx])
    lines = lines.flatten(1)
    rs = lines * daz

    if baseline_error == 'range_and_azimuth':
        A = np.zeros([npixel, 4])

        A[:, 0] = Fh
        A[:, 1] = Fh * rs
        A[:, 2] = Fv
        A[:, 3] = Fv * rs
        num_base_par = 4
    elif baseline_error == 'range':
        A = np.zeros([npixel, 2])

        A[:, 0] = Fh
        A[:, 1] = Fv
        num_base_par = 2

    ###########################################
    yref = int(h5file['timeseries'].attrs['ref_y'])
    xref = int(h5file['timeseries'].attrs['ref_x'])
    ###########################################
    if os.path.basename(demFile).split('.')[1] == 'hgt':
        amp, dem, demRsc = readfile.read_float32(demFile)
    elif os.path.basename(demFile).split('.')[1] == 'dem':
        dem, demRsc = readfile.read_real_int16(demFile)

    dem = dem - dem[yref][xref]
    dem = dem.flatten(1)
    ###################################################
    if p == 1:
        # A=np.vstack((dem[ndx],np.ones(len(dem[ndx])))).T
        B = np.vstack((dem, np.ones(len(dem)))).T
    elif p == 2:
        # A=np.vstack((dem[ndx]**2,dem[ndx],np.ones(len(dem[ndx])))).T
        B = np.vstack((dem**2, dem, np.ones(len(dem)))).T
    elif p == 3:
        #  A = np.vstack((dem[ndx]**3,dem[ndx]**2,dem[ndx],np.ones(len(dem[ndx])))).T
        B = np.vstack((dem**3, dem**2, dem, np.ones(len(dem)))).T
    print(np.shape(A))

    Ainv = np.linalg.pinv(A)
    ###################################################

    Bh = []
    Bv = []
    Bhrate = []
    Bvrate = []
    Be = np.zeros([len(dateList), num_base_par + p + 1])
    print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
    for i in range(1, len(dateList)):
        dset = h5file['timeseries'].get(dateList[i])
        data = dset[0:dset.shape[0], 0:dset.shape[1]]
        L = data.flatten(1)
        M = np.hstack((A, B))
        Berror = np.dot(np.linalg.pinv(M[ndx]), L[ndx])
        Bh.append(Berror[0])
        Bhrate.append(Berror[1])
        Bv.append(Berror[2])
        Bvrate.append(Berror[3])
        Be[i, :] = Berror
        print(Berror)
    print(
        '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    )
    print('baseline error           mean                          std')
    print('       bh     :  ' + str(np.mean(Bh)) + '     ,  ' +
          str(np.std(Bh)))
    print('     bh rate  :  ' + str(np.mean(Bhrate)) + '     ,  ' +
          str(np.std(Bhrate)))
    print('       bv     :  ' + str(np.mean(Bv)) + '     ,  ' +
          str(np.std(Bv)))
    print('     bv rate  :  ' + str(np.mean(Bvrate)) + '     ,  ' +
          str(np.std(Bvrate)))
    print(
        '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    )
    # plt.hist(Bh,bins=8,normed=True)
    # formatter = FuncFormatter(to_percent)
    # Set the formatter
    # plt.gca().yaxis.set_major_formatter(formatter)
    # plt.show()
    print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
    # print 'Estimating Baseline error from each differences ...'

    orbEffect = np.zeros([len(dateList), sy, sx])
    for i in range(1, len(dateList)):
        effect = np.dot(M, Be[i, :])
        effect = np.reshape(effect, [sx, sy]).T
        # orbEffect[i,:,:]=orbEffect[i-1,:,:]+effect
        # orbEffect[i,:,:]=orbEffect[i,:,:]-orbEffect[i,yref,xref]
        orbEffect[i, :, :] = effect - effect[yref, xref]
        del effect

    print('Correctiing the time series ')
    outName = File.replace('.h5', '') + '_baseTropCor.h5'
    h5orbCor = h5py.File(outName, 'w')
    group = h5orbCor.create_group('timeseries')
    for i in range(len(dateList)):
        dset1 = h5file['timeseries'].get(dateList[i])
        data = dset1[0:dset1.shape[0], 0:dset1.shape[1]] - orbEffect[i, :, :]
        dset = group.create_dataset(dateList[i], data=data, compression='gzip')

    for key, value in h5file['timeseries'].attrs.items():
        group.attrs[key] = value

    dset1 = h5file['mask'].get('mask')
    group = h5orbCor.create_group('mask')
    dset = group.create_dataset('mask', data=dset1, compression='gzip')

    h5file.close()
    h5orbCor.close()
    return
コード例 #30
0
def main(argv):
    inps = cmdLineParse()

    atr = readfile.read_attribute(inps.velocity_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    # Check subset input
    if inps.subset_y:
        inps.subset_y = sorted(inps.subset_y)
        print('subset in y/azimuth direction: ' + str(inps.subset_y))
    else:
        inps.subset_y = [0, length]

    if inps.subset_x:
        inps.subset_x = sorted(inps.subset_x)
        print('subset in x/range direction: ' + str(inps.subset_x))
    else:
        inps.subset_x = [0, width]
    y0, y1 = inps.subset_y
    x0, x1 = inps.subset_x

    # Read velocity/rate
    velocity = readfile.read(inps.velocity_file)[0]
    print('read velocity file: ' + inps.velocity_file)

    k = 'interferograms'
    h5 = h5py.File(inps.ifgram_file, 'r')
    ifgram_list = sorted(h5[k].keys())
    ifgram_num = len(ifgram_list)
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    print('number of interferograms: ' + str(ifgram_num))

    ##### Select interferograms with unwrapping error
    if inps.percentage > 0.0:
        mask = readfile.read(inps.mask_file)[0]
        print('read mask for pixels with unwrapping error from file: ' +
              inps.mask_file)

        unw_err_ifgram_num = int(np.rint(inps.percentage * ifgram_num))
        unw_err_ifgram_idx = random.sample(list(range(ifgram_num)),
                                           unw_err_ifgram_num)
        unw_err_ifgram_list = [ifgram_list[i] for i in unw_err_ifgram_idx]
        unw_err_date12_list = [date12_list[i] for i in unw_err_ifgram_idx]
        print(
            'randomly choose the following %d interferograms with unwrapping error'
            % unw_err_ifgram_num)
        print(unw_err_date12_list)

        unit_unw_err = 2.0 * np.pi * mask
    else:
        unw_err_ifgram_list = []

    ###### Generate simulated interferograms
    m_dates = ptime.yyyymmdd([i.split('-')[0] for i in date12_list])
    s_dates = ptime.yyyymmdd([i.split('-')[1] for i in date12_list])
    range2phase = -4.0 * np.pi / float(atr['WAVELENGTH'])

    print('writing simulated interferograms file: ' + inps.outfile)
    h5out = h5py.File(inps.outfile, 'w')
    group = h5out.create_group('interferograms')
    for i in range(ifgram_num):
        ifgram = ifgram_list[i]
        # Get temporal baseline in years
        t1 = datetime.datetime(*time.strptime(m_dates[i], "%Y%m%d")[0:5])
        t2 = datetime.datetime(*time.strptime(s_dates[i], "%Y%m%d")[0:5])
        dt = (t2 - t1)
        dt = float(dt.days) / 365.25

        # Simuated interferograms with unwrap error
        unw = velocity * dt * range2phase
        if ifgram in unw_err_ifgram_list:
            rand_int = random.sample(list(range(1, 10)), 1)[0]
            unw += rand_int * unit_unw_err
            print(ifgram + '  - add unwrapping error of %d*2*pi' % rand_int)
        else:
            print(ifgram)

        gg = group.create_group(ifgram)
        dset = gg.create_dataset(ifgram,
                                 data=unw[y0:y1, x0:x1],
                                 compression='gzip')

        for key, value in h5[k][ifgram].attrs.items():
            gg.attrs[key] = value
        if ifgram in unw_err_ifgram_list:
            gg.attrs['unwrap_error'] = 'yes'
        else:
            gg.attrs['unwrap_error'] = 'no'
        gg.attrs['FILE_LENGTH'] = y1 - y0
        gg.attrs['WIDTH'] = x1 - x0
    h5.close()
    h5out.close()
    print('Done.')
    return inps.outfile