Beispiel #1
0
    def load_dataset(group, dsetList, fname, h5dsetList):
        ## Read input file
        if len(dsetList) == 2:
            d0, d1, atr = readfile.read(fname)
            dataList = [d0, d1]
        else:
            d0, atr = readfile.read(fname)
            dataList = [d0]

        ## Load into HDF5 file
        for dsetName in dsetList:
            idx = dsetList.index(dsetName)
            if dsetName not in h5dsetList:
                print 'Add %s from %s' % (dsetName, os.path.basename(fname))
                dset = group.create_dataset(dsetName,
                                            data=dataList[idx],
                                            compression='gzip')
            else:
                if exDict['force_load']:
                    print 'Update %s from %s' % (dsetName,
                                                 os.path.basename(fname))
                    dset = group.get(dsetName)
                    dset[:] = dataList[idx]
                else:
                    print '%s is already existed, no need to re-load from %s.' % (
                        dsetName, os.path.basename(fname))
        return group, atr
Beispiel #2
0
def main(argv):

    inps = cmdLineParse()
    TXT = inps.gps_txt
    GEO = inps.geometryGeo

    GPS = np.loadtxt(TXT, dtype=np.str)
    GPS_Nm = GPS[:, 0]
    GPS_Nm = GPS_Nm.tolist()
    GPS_LAT = GPS[:, 1]
    GPS_LON = GPS[:, 2]
    N = len(GPS_Nm)

    data_inc, atr = readfile.read(GEO, epoch='incidenceAngle')
    data_head, atr = readfile.read(GEO, epoch='headingAngle')

    nWidthUTM = atr['WIDTH']
    nLineUTM = atr['FILE_LENGTH']

    Corner_LAT = atr['Y_FIRST']
    Corner_LON = atr['X_FIRST']

    post_Lat = atr['Y_STEP']
    post_Lon = atr['X_STEP']

    OUT = 'gps_geometry_par.txt'

    if os.path.isfile(OUT):
        os.remove(OUT)

    for i in range(N):
        LAT = GPS_LAT[i]
        LON = str(float(GPS_LON[i]) - 360)
        NM = GPS_Nm[i]
        XX = int((float(LAT) - float(Corner_LAT)) /
                 float(post_Lat))  # latitude   width   range
        YY = int((float(LON) - float(Corner_LON)) /
                 float(post_Lon))  # longitude   nline  azimuth

        HEAD0 = data_head[XX][YY]
        INC0 = data_inc[XX][YY]

        STR = str(NM) + ' ' + str(float(LAT)) + ' ' + str(
            float(LON)) + ' ' + str(int(YY)) + ' ' + str(
                int(XX)) + ' ' + str(INC0) + ' ' + str(HEAD0)
        call_str = 'echo ' + STR + ' >> ' + OUT
        os.system(call_str)

        print(
            str(NM) + ' ' + str(float(LAT)) + ' ' + str(float(LON)) + ' ' +
            str(int(YY)) + ' ' + str(int(XX)) + ' ' + str(INC0) + ' ' +
            str(HEAD0))
Beispiel #3
0
def transect_list(fileList, inps):
    '''Get transection along input line from file list
    Inputs:
        fileList : list of str, path of files to get transect
        inps     : Namespace including the following items:
                   start/end_lalo
                   start/end_yx
                   interpolation
    Outputs:
        transectList : list of N*2 matrix containing distance and its value
        atrList      : list of attribute dictionary, for each input file
    '''

    transectList = []
    atrList = []
    for File in fileList:
        print 'reading ' + File
        data, atr = readfile.read(File)
        if inps.start_lalo and inps.end_lalo:
            transect = transect_lalo(data, atr, inps.start_lalo, inps.end_lalo,
                                     inps.interpolation)
        else:
            transect = transect_yx(data, atr, inps.start_yx, inps.end_yx,
                                   inps.interpolation)
        transectList.append(transect)
        atrList.append(atr)
    return transectList, atrList
Beispiel #4
0
def seed_max_coherence(File,mask,outFile,corFile=''):
    print '\n---------------------------------------------------------'
    print   'Automatically select reference point ...'
    print   '    Based on maximum coherence.'
    print   '    Input coherence file or meanCoherence group within   '
    print   '    the file is needed.'
    print   '---------------------------------------------------------'

    SeedingDone = 'no'
    
    ##### Read Coherence
    try:
        h5file = h5py.File(File,'r')
        coh = h5file['meanCoherence'].get('meanCoherence')[:]
    except:
        try:  coh, coh_atr = readfile.read(corFile)
        except: print '\nERROR: No coherence data is found!'

    try:
        coh *= mask
        print 'Searching the pixel with maximum avergae coherence'
        y,x = np.unravel_index(np.argmax(coh), coh.shape)
        seed_xy(File,x,y,outFile)
        SeedingDone = 'yes'
    except: pass

    return SeedingDone
Beispiel #5
0
def main(argv):
    inps = cmdLineParse()
    print '\n*************** Spatial Average ******************'

    if inps.mask_file:
        mask, mask_atr = readfile.read(inps.mask_file)
    else:
        mask = None

    for File in inps.file:
        mean_list = ut.spatial_average(File, mask, saveList=True)
        atr = readfile.read_attribute(File)
        k = atr['FILE_TYPE']
        if inps.disp_fig and k == 'timeseries':
            # Get date list
            h5file = h5py.File(File)
            dateList = sorted(h5file[k].keys())
            h5file.close()
            dates, datevector = ptime.date_list2vector(dateList)

            # plot
            fig = plt.figure()
            ax = fig.add_subplot(111)
            ax.plot(dates,
                    mean_list,
                    '-ko',
                    lw=2,
                    ms=16,
                    alpha=0.7,
                    mfc='crimson')
            ax.set_title('Spatial Average', fontsize=12)
            ax = ptime.auto_adjust_xaxis_date(ax, datevector)
            ax.set_xlabel('Time [years]', fontsize=12)
            ax.set_ylabel('Mean', fontsize=12)
            plt.show()
Beispiel #6
0
def stacking(File):
    ## Stack multi-temporal dataset into one
    ##    equivalent to temporal sum

    ## File Info
    atr = readfile.read_attributes(File)
    k = atr['FILE_TYPE']
    length = int(atr['FILE_LENGTH'])
    width  = int(atr['WIDTH'])

    ## Calculation
    stack  = np.zeros([length,width])
    if k in ['timeseries','interferograms','wrapped','coherence']:
        ##### Input File Info
        h5file = h5py.File(File,'r')
        epochList = h5file[k].keys()
        epochList = sorted(epochList)
        epochNum  = len(epochList)
        for i in range(epochNum):
            epoch = epochList[i]
            if k == 'timeseries':  data = h5file[k].get(epoch)[:]
            else:                  data = h5file[k][epoch].get(epoch)[:]
            stack += data
            printProgress(i+1,epochNum)
        h5file.close()

    else:
        try: stack, atrStack = readfile.read(File)
        except: print 'Cannot read file: '+File; sys.exit(1)

    return stack
Beispiel #7
0
def load_single_dataset_hdf5(file_type, infile, outfile, extra_meta_dict=dict()):
    '''Convert ROI_PAC .dem / .hgt file to hdf5 file
    Based on load_dem.py written by Emre Havazli
    Inputs:
        file_type : string, group name of hdf5 file, i.e. dem, mask
        infile    : string, input ROI_PAC file name
        outfile   : string, output hdf5 file name
        extra_meta_dict : dict, extra attributes to output file
    Output:
        outfile   : string, output hdf5 file name
    '''
    if not ut.update_file(outfile, infile):
        return outfile

    # Read input file
    print 'loading file: '+infile
    data, atr = readfile.read(infile)

    # Write output file - data
    print 'writing >>> '+outfile
    h5 = h5py.File(outfile, 'w')
    group = h5.create_group(file_type)
    dset = group.create_dataset(file_type, data=data, compression='gzip')

    # Write output file - attributes
    for key, value in atr.iteritems():
        group.attrs[key] = value
    try: group.attrs['PROJECT_NAME'] = extra_meta_dict['project_name']
    except: pass
    key = 'INSAR_PROCESSOR'
    if key not in atr.keys():
        try:  atr[key] = extra_meta_dict['insar_processor']
        except:  pass
    h5.close()
    return outfile
Beispiel #8
0
def main(argv):
    inps = cmdLineParse()

    ##### 1. Read data
    atr = readfile.read_attribute(inps.file)
    k = atr['FILE_TYPE']
    print 'Input file is '+k

    # Check: file in geo coord
    if 'X_FIRST' not in atr.keys():
        sys.exit('ERROR: Input file is not geocoded.')

    # Check: epoch is required for multi_dataset/group files
    if not inps.epoch and k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        print "No date/date12 input.\nIt's required for "+k+" file"
        sys.exit(1)

    # Read data
    data, atr = readfile.read(inps.file, (), inps.epoch)

    # Output filename
    if not inps.outfile:
        inps.outfile = pview.auto_figure_title(inps.file, inps.epoch, vars(inps))

    # Data Operation - Display Unit & Rewrapping
    data, inps.disp_unit, inps.wrap = pview.scale_data4disp_unit_and_rewrap(data, atr, inps.disp_unit, inps.wrap)
    if inps.wrap:
        inps.ylim = [-np.pi, np.pi]

    ##### 2. Generate Google Earth KMZ
    kmz_file = write_kmz_file(data, atr, inps.outfile, inps)

    print 'Done.'
    return
Beispiel #9
0
def manual_select_start_end_point(File):
    '''Manual Select Start/End Point in display figure.'''
    print 'reading ' + File + ' ...'
    data, atr = readfile.read(File)
    print 'displaying ' + File + ' ...'
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.imshow(data)

    xc = []
    yc = []
    print 'please click on start and end point of the desired profile'
    print 'then close the figure to continue'

    def onclick(event):
        if event.button == 1:
            xcc, ycc = int(event.xdata), int(event.ydata)
            xc.append(xcc)
            yc.append(ycc)
            print 'x = ' + str(xcc) + '\ny = ' + str(ycc)
            ax.plot(xcc, ycc, 'ro')

    cid = fig.canvas.mpl_connect('button_release_event', onclick)
    plt.show()

    start_yx = [yc[0], xc[0]]
    end_yx = [yc[1], xc[1]]
    return start_yx, end_yx
Beispiel #10
0
def load_single_dataset_hdf5(file_type,
                             infile,
                             outfile,
                             extra_meta_dict=dict()):
    '''Convert ROI_PAC .dem / .hgt file to hdf5 file
    Based on load_dem.py written by Emre Havazli
    Inputs:
        file_type : string, group name of hdf5 file, i.e. dem, mask
        infile    : string, input ROI_PAC file name
        outfile   : string, output hdf5 file name
        extra_meta_dict : dict, extra attributes to output file
    Output:
        outfile   : string, output hdf5 file name
    '''
    atr = readfile.read_attribute(infile)

    if ut.update_file(outfile, infile):
        if (os.path.dirname(infile) == os.path.dirname(outfile) and \
            os.path.splitext(infile)[1] == os.path.splitext(outfile)[1]):
            print infile + ' already in working directory with recommended format, no need to re-load.'
            outfile = infile

        else:
            # Read input file
            print 'loading file: ' + infile
            data = readfile.read(infile)[0]

            # Write output file - data
            print 'writing >>> ' + outfile
            h5 = h5py.File(outfile, 'w')
            group = h5.create_group(file_type)
            dset = group.create_dataset(file_type,
                                        data=data,
                                        compression='gzip')

            # Write output file - attributes
            for key, value in atr.iteritems():
                group.attrs[key] = value
            try:
                group.attrs['PROJECT_NAME'] = extra_meta_dict['project_name']
            except:
                pass
            key = 'INSAR_PROCESSOR'
            if key not in atr.keys():
                try:
                    atr[key] = extra_meta_dict['insar_processor']
                except:
                    pass
            h5.close()

    #if (os.path.abspath(infile) != os.path.abspath(outfile) and \
    #    os.path.dirname(infile) == os.path.dirname(outfile)):
    #    print 'remove the duplicated, obsolete '+atr['FILE_TYPE']+' file in the same directory'
    #    rmCmd = 'rm '+infile
    #    print rmCmd
    #    os.system(rmCmd)

    return outfile
Beispiel #11
0
def select_max_coherence_yx(corFile, mask=None):
    print '\n---------------------------------------------------------'
    print   'Searching pixel with max coherence ...'
    print 'use coherence file: '+corFile
    coh, coh_atr = readfile.read(corFile)
    if not mask is None:
        coh[mask==0] = 0.0
    y, x = np.unravel_index(np.argmax(coh), coh.shape)
    print   'y/x: '+str([y, x])
    print   '---------------------------------------------------------'

    return y, x
Beispiel #12
0
def main(argv):
    try:
        demFile = argv[0]
        File = argv[1]
    except:
        usage()
        sys.exit(1)

    dem, demRsc = readfile.read(demFile)
    data, atr = readfile.read(File)
    print 'Input file is ' + atr['FILE_TYPE']

    # Subset
    try:
        y0, y1 = [int(i) for i in argv[2].split(':')]
        x0, x1 = [int(i) for i in argv[3].split(':')]
        data = data[y0:y1, x0:x1]
        dem = dem[y0:y1, x0:x1]
    except:
        pass

    # Calculation
    dem = dem.flatten(1)
    data = data.flatten(1)
    ndx = ~np.isnan(data)
    C1 = np.zeros([2, len(dem[ndx])])
    C1[0][:] = dem[ndx]
    C1[1][:] = data[ndx]

    # Display
    print '-------------------------------------------'
    print 'Correlation with the DEM:  %.2f' % np.corrcoef(C1)[0][1]
    print '-------------------------------------------'
    print 'DEM info:'
    print '    Max height difference: %.2f m' % (np.max(dem[ndx]) -
                                                 np.min(dem[ndx]))
    print '    Average        height: %.2f m' % np.mean(dem[ndx])
    print '    Height            Std: %.2f m' % np.std(dem[ndx])
    return
Beispiel #13
0
def select_max_coherence_yx(cohFile, mask=None, min_coh=0.85):
    '''Select pixel with coherence > min_coh in random'''
    print '\n---------------------------------------------------------'
    print 'select pixel with coherence > ' + str(min_coh) + ' in random'
    print 'use coherence file: ' + cohFile
    coh, coh_atr = readfile.read(cohFile)
    if not mask is None:
        coh[mask == 0] = 0.0
    coh_mask = coh >= min_coh
    y, x = random_select_reference_yx(coh_mask, print_msg=False)
    #y, x = np.unravel_index(np.argmax(coh), coh.shape)
    print 'y/x: ' + str([y, x])
    print '---------------------------------------------------------'

    return y, x
Beispiel #14
0
def remove_multiple_surface(File, surf_type, Mask, ysub, outName):
    start = time.time()
    ##### Output File Name
    if outName == "":
        ext = os.path.splitext(File)[1].lower()
        outName = os.path.basename(File).split(ext)[0] + "_" + surf_type + ext

    atr = readfile.read_attributes(File)
    k = atr["FILE_TYPE"]
    print "Input file is " + atr["PROCESSOR"] + " " + k

    if k == "timeseries":
        h5file = h5py.File(File, "r")
        ifgramList = h5file[k].keys()
        ifgramList = sorted(ifgramList)
        print "number of epochs: " + str(len(ifgramList))

        h5flat = h5py.File(outName, "w")
        group = h5flat.create_group(k)
        print "writing >>> " + outName

        for ifgram in ifgramList:
            print "Removing " + surf_type + " from " + ifgram
            dataIn = h5file[k].get(ifgram)[:]

            dataOut = remove_data_multiple_surface(dataIn, surf_type, Mask, ysub)

            dset = group.create_dataset(ifgram, data=dataOut, compression="gzip")
        for key, value in h5file[k].attrs.iteritems():
            group.attrs[key] = value

    else:
        try:
            dataIn, atr = readfile.read(File)
        except:
            print "Input file type is not supported: " + atr["FILE_TYPE"]

        dataOut = remove_data_multiple_surface(dataIn, surf_type, Mask, ysub)
        ramp = dataIn - dataOut

        writefile.write(dataOut, atr, outName)
        # atr['FILE_TYPE']='mask'
        # writefile.write(ramp,atr,'2quadratic.h5')

    print "Remove " + surf_type + " took " + str(time.time() - start) + " secs"
Beispiel #15
0
def roipac_nonzero_mask(unwFileList, maskFile='Mask.h5'):
    '''Generate mask for non-zero amplitude pixel of ROI_PAC .unw file list.'''
    unwFileList, width, length = check_file_size(unwFileList)
    if unwFileList:
        # Initial mask value
        if os.path.isfile(maskFile):
            maskZero, atr = readfile.read(maskFile)
            print 'update existing mask file: '+maskFile
        else:
            maskZero = np.ones([int(length), int(width)])
            atr = None
            print 'create initial mask matrix'

        # Update mask from input .unw file list
        fileNum = len(unwFileList)
        for i in range(fileNum):
            file = unwFileList[i]
            amp, unw, rsc = readfile.read_float32(file)
            
            maskZero *= amp
            ut.print_progress(i+1, fileNum, prefix='loading', suffix=os.path.basename(file))
        mask = np.ones([int(length), int(width)])
        mask[maskZero==0] = 0
        
        # write mask hdf5 file
        print 'writing >>> '+maskFile
        h5 = h5py.File(maskFile,'w')
        group = h5.create_group('mask')
        dset = group.create_dataset('mask', data=mask, compression='gzip')
        # Attribute - *.unw.rsc
        for key,value in rsc.iteritems():
            group.attrs[key] = value
        # Attribute - *baseline.rsc
        d1, d2 = rsc['DATE12'].split('-')
        baseline_file = os.path.dirname(file)+'/'+d1+'_'+d2+'_baseline.rsc'
        baseline_rsc = readfile.read_roipac_rsc(baseline_file)
        for key,value in baseline_rsc.iteritems():
            group.attrs[key] = value
        # Attribute - existed file
        if atr:
            for key, value in atr.iteritems():
                group.attrs[key] = value

    return maskFile, unwFileList
Beispiel #16
0
def main(argv):
    inps = cmdLineParse()

    ##### 1. Read data
    atr = readfile.read_attribute(inps.file)
    k = atr['FILE_TYPE']
    print 'Input file is ' + k

    # Check: file in geo coord
    if 'X_FIRST' not in atr.keys():
        sys.exit('ERROR: Input file is not geocoded.')

    # Check: epoch is required for multi_dataset/group files
    if not inps.epoch:
        if k in multi_group_hdf5_file:
            print "No date/date12 input.\nIt's required for " + k + " file"
            sys.exit(1)
        elif k in multi_dataset_hdf5_file:
            print 'No input date ..., continue to convert the last date of time-series.'
            h5 = h5py.File(inps.file, 'r')
            date_list = sorted(h5[k].keys())
            h5.close()
            inps.epoch = date_list[-1]

    # Read data
    data, atr = readfile.read(inps.file, (), inps.epoch)

    # Output filename
    if not inps.outfile:
        inps.outfile = pview.auto_figure_title(inps.file, inps.epoch,
                                               vars(inps))
    inps.outfile = os.path.splitext(inps.outfile)[0] + '.grd'

    ##### 2. Write GMT .grd file
    inps.outfile = write_grd_file(data, atr, inps.outfile)
    print 'Done.'
    return inps.outfile
Beispiel #17
0
def main(argv):

    global method_default
    ##### Referencing methods
    method_default = 'max_coherence'
    #method = 'manual'
    #method = 'max_coherence'        ## Use phase on point with max coherence [default]
    #method = 'global_average'       ## Use Nan Mean of phase on all pixels
    #method = 'random'
    #maskFile = 'Mask.h5'

    global SeedingDone
    
    ############################## Check Inputs ##############################
    if len(sys.argv) > 2:
        try:  opts, args = getopt.getopt(argv,"h:c:f:m:y:x:l:L:t:o:r:",\
                                         ['manual','max-coherence','global-average','random'])
        except getopt.GetoptError:  Usage() ; sys.exit(1)

        for opt,arg in opts:
            if   opt in ("-h","--help"):   Usage();  sys.exit()
            elif opt == '-f':        File     = arg
            elif opt == '-m':        maskFile = arg
            elif opt == '-c':        corFile  = arg
            elif opt == '-o':        outFile  = arg

            elif opt == '-y':        ry       = int(arg)
            elif opt == '-x':        rx       = int(arg)
            elif opt == '-l':        rlat     = float(arg)
            elif opt == '-L':        rlon     = float(arg)
            elif opt == '-r':        refFile  = arg
            elif opt == '-t':        templateFile = arg

            elif opt == '--global-average' :  method = 'global_average'
            elif opt == '--manual'         :  method = 'manual'
            elif opt == '--max-coherence'  :  method = 'max_coherence'
            elif opt == '--random'         :  method = 'random'

    elif len(sys.argv)==2:
        if   argv[0]=='-h':            Usage(); sys.exit(1)
        elif os.path.isfile(argv[0]):  File = argv[0]
        else:  print 'Input file does not existed: '+argv[0];  sys.exit(1)
    elif len(sys.argv)<2:             Usage(); sys.exit(1)

    ##### Input File Info
    try:
        File
        atr = readfile.read_attributes(File)
        k = atr['FILE_TYPE']
        length = int(atr['FILE_LENGTH'])
        width  = int(atr['WIDTH'])
    except:  Usage() ; sys.exit(1)
    ext = os.path.splitext(File)[1].lower()

    try:    outFile
    except: outFile = 'Seeded_'+File
  
    ############################## Reference Point Input ####################
    try:
        refFile
        atr_ref = readfile.read_attributes(refFile)
    except: pass
  
    try:
        templateFile
        templateContents = readfile.read_template(templateFile)
    except: pass

    ### Priority
    ## lat/lon > y/x
    ## Direct Input > Reference File > Template File
    try:
        rlat
        rlon
    except:
        try:
            rlat = float(atr_ref['ref_lat'])
            rlon = float(atr_ref['ref_lon'])
        except:
            try: rlat,rlon = [float(i) for i in templateContents['pysar.seed.lalo'].split(',')]
            except: pass

    try:
        ry
        rx
    except:
        try:
            ry = int(atr_ref['ref_y'])
            rx = int(atr_ref['ref_x'])
        except:
            try: ry,rx       = [int(i)   for i in templateContents['pysar.seed.yx'].split(',')]
            except: pass

    ##### Check lalo / YX
    print '\n************** Reference Point ******************'
    try:
        rlat
        rlon
        y = sub.coord_geo2radar(rlat,atr,'lat')
        x = sub.coord_geo2radar(rlon,atr,'lon')
        0<= x <= width
        0<= y <= length
        rx = x
        ry = y
        print 'Reference point: lat = %.4f,   lon = %.4f'%(rlat,rlon)
        print '                 y   = %d,     x   = %d'%(ry,rx)
    except:
        print 'Skip input lat/lon reference point.'
        print 'Continue with the y/x reference point.'


    ######################### a. Read Mask File #########################
    ## Priority: Input mask file > pysar.mask.file 
    try:     maskFile
    except:
        try: maskFile = templateContents['pysar.mask.file']
        except:  print 'No mask found!';
    try:
        M,Matr = readfile.read(maskFile);
        print 'mask: '+maskFile
    except:
        print '---------------------------------------------------------'
        print 'WARNING: No mask, use the whole area as mask'
        print '---------------------------------------------------------'
        M = np.ones((length,width))

    ## Message
    try:
        rx
        ry
        0<= rx <= width
        0<= ry <= length
        if M[ry,rx] == 0:
            print 'Input point has 0 value in mask.'
    except: pass

    ######################### b. Stack ##################################
    stackFile = os.path.basename(File).split(ext)[0] + '_stack.h5'
    stack_file_exist = 'no'
    try:
        os.path.isfile(stackFile)
        stack,atrStack = readfile.read(stackFile)
        if width == int(atrStack['WIDTH']) and length == int(atrStack['FILE_LENGTH']):
            stack_file_exist = 'yes'
            print 'read stack from file: '+stackFile
    except: pass

    if stack_file_exist == 'no':
        print 'calculating stack of input file ...'
        stack = ut.stacking(File)
        atrStack = atr.copy()
        atrStack['FILE_TYPE'] = 'mask'
        writefile.write(stack,atrStack,stackFile)

    ## Message
    try:
        rx
        ry
        if stack[ry,rx] == 0:
            print 'Input point has nan value in data.'
    except: pass

    stack[M==0] = 0
    if np.nansum(M) == 0.0:
        print '\n*****************************************************'
        print   'ERROR:'
        print   'There is no pixel that has valid phase value in all datasets.' 
        print   'Check the file!'
        print   'Seeding failed'
        sys.exit(1)

    ######################### Check Method ##############################
    try:
        not stack[ry,rx] == 0
        method = 'input_coord'
    except:
        try:    method
        except: method = method_default
        print 'Skip input y/x reference point.'
        print 'Continue with '+method

    #h5file = h5py.File(File)

    ######################### Seeding ###################################
    ##### Sub-function
    def seed_method(method,File,stack,outFile,corFile=''):
        SeedingDone = 'no'
        next_method = method_default
        M = stack != 0

        if   method == 'manual':
            SeedingDone = seed_manual(File,stack,outFile)
            if SeedingDone == 'no':
                next_method = method_default
                print_warning(next_method)

        elif method == 'max_coherence':
            try:    SeedingDone = seed_max_coherence(File,M,outFile,corFile)
            except: SeedingDone = seed_max_coherence(File,M,outFile)
            if SeedingDone == 'no':
                next_method = 'random'
                print_warning(next_method)

        elif method == 'random':
            y,x = random_selection(stack)
            seed_xy(File,x,y,outFile)
            SeedingDone = 'yes'

        elif method == 'global_average':
            print '\n---------------------------------------------------------'
            print 'Automatically Seeding using Global Spatial Average Value '
            print '---------------------------------------------------------'
            print 'Calculating the global spatial average value for each epoch'+\
                  ' of all valid pixels ...'
            box = (0,0,width,length)
            meanList = ut.spatial_mean(File,M,box)
            seed_file(File,outFile,meanList,'','')
            SeedingDone = 'yes'

        return SeedingDone, next_method

    ##### Seeding
    SeedingDone = 'no'

    if method == 'input_coord':
        seed_xy(File,rx,ry,outFile)
        SeedingDone = 'yes'

    else:
        i = 0
        while SeedingDone == 'no' and i < 5:
            try:    SeedingDone,method = seed_method(method,File,stack,outFile,corFile)
            except: SeedingDone,method = seed_method(method,File,stack,outFile)
            i += 1
        if i >= 5:
            print 'ERROR: Seeding failed after more than '+str(i)+' times try ...'
            sys.exit(1)
Beispiel #18
0
def geocode_file_radar_lut(fname, lookup_file, fname_out=None, inps=None):
    '''Geocode file using lookup table file in radar coordinates (isce).
    Two solutions:
    1) scipy.interpolate.griddata, with a speed up solution from Jaime and Jeff (Stack Overflow)
        https://stackoverflow.com/questions/20915502/speedup-scipy-griddata-for-multiple-interpo
        lations-between-two-irregular-grids
    2) matplotlib.tri, interpolation from triangular grid to quad grid, which is much slower than 1).

    Inputs:
        fname       : string, file to be geocoded
        lookup_file : string, lookup table file, geometryRadar.h5
        fname_out   : string, optional, output geocoded filename
        inps        : namespace, object with the following items:
                      interp_method : string, interpolation/resampling method, supporting linear
                      fill_value    : value used for points outside of the interpolation domain
    Output:
        fname_out  : string, optional, output geocoded filename
    '''
    start = time.time()
    ## Default Inputs and outputs
    if not inps:
        inps = cmdLineParse()

    if inps.interp_method != 'linear':
        print 'ERROR: Supported interpolation method: linear'
        print 'Input method is '+inps.interp_method
        sys.exit(-1)

    if not fname_out:
        fname_out = geocode_output_filename(fname)

    ## Read lookup table file
    atr_rdr = readfile.read_attribute(fname)
    length = int(atr_rdr['FILE_LENGTH'])
    width = int(atr_rdr['WIDTH'])
    print 'reading lookup table file '+lookup_file
    lat = readfile.read(lookup_file, epoch='latitude')[0]
    lon = readfile.read(lookup_file, epoch='longitude')[0]

    #####Prepare output pixel grid: lat/lon range and step
    if os.path.isfile(inps.lalo_step):
        print 'use file %s as reference for output grid lat/lon range and step' % (inps.lalo_step)
        atr_ref = readfile.read_attribute(inps.lalo_step)
        inps.lat_step = float(atr_ref['Y_STEP'])
        inps.lon_step = float(atr_ref['X_STEP'])
        inps.lat_num = int(atr_ref['FILE_LENGTH'])
        inps.lon_num = int(atr_ref['WIDTH'])
        inps.lat0 = float(atr_ref['Y_FIRST'])
        inps.lon0 = float(atr_ref['X_FIRST'])
        inps.lat1 = inps.lat0 + inps.lat_step*inps.lat_num
        inps.lon1 = inps.lon0 + inps.lon_step*inps.lon_num
    else:
        try:
            inps.lat_step = -1*abs(float(inps.lalo_step))
            inps.lon_step = abs(float(inps.lalo_step))
            inps.lat0 = np.nanmax(lat)
            inps.lat1 = np.nanmin(lat)
            inps.lon0 = np.nanmin(lon)
            inps.lon1 = np.nanmax(lon)
            inps.lat_num = int((inps.lat1-inps.lat0)/inps.lat_step)
            inps.lon_num = int((inps.lon1-inps.lon0)/inps.lon_step)
            inps.lat_step = (inps.lat1 - inps.lat0)/inps.lat_num
            inps.lon_step = (inps.lon1 - inps.lon0)/inps.lon_num
        except ValueError:
            print 'Input lat/lon step is neither a float number nor a file in geo-coord, please try again.'

    print 'output lat range: %f - %f' % (inps.lat0, inps.lat1)
    print 'output lon range: %f - %f' % (inps.lon0, inps.lon1)
    print 'output lat_step : %f' % (inps.lat_step)
    print 'output lon_step : %f' % (inps.lon_step)
    print 'input  file size in   y/x  : %d/%d' % (length, width)
    print 'output file size in lat/lon: %d/%d' % (inps.lat_num, inps.lon_num)

    grid_lat, grid_lon = np.mgrid[inps.lat0:inps.lat1:inps.lat_num*1j,\
                                  inps.lon0:inps.lon1:inps.lon_num*1j]


    ##### Interpolate value on regular geo coordinates (from lookup table file attributes, 2D ndarray)
    ##### with known value on irregular geo coordinates (from lookup table file value, tuple of ndarray of float)

    ##Solution 1 - qhull
    print 'calculate triangulation and coordinates transformation using scipy.spatial.qhull.Delaunay ...'
    pts_old = np.hstack((lat.reshape(-1,1), lon.reshape(-1,1)))
    pts_new = np.hstack((grid_lat.reshape(-1,1), grid_lon.reshape(-1,1)))
    vtx, wts = interp_weights(pts_old, pts_new)
    del pts_old, pts_new, grid_lat, grid_lon

    ##Solution 2 - matplotlib.tri
    #triang = mtri.Triangulation(lat.flatten(),lon.flatten())

    data_geo = np.empty((inps.lat_num, inps.lon_num)).flatten()
    data_geo.fill(inps.fill_value)
    k = atr_rdr['FILE_TYPE']
    ##### Multiple Dataset File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+fname_out

        if k in multi_dataset_hdf5_file:
            print 'number of acquisitions: '+str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_geo = interpolate(data.flatten(), vtx, wts).reshape(inps.lat_num, inps.lon_num)

                dset = group.create_dataset(date, data=data_geo, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            prog_bar.close()

            print 'update attributes'
            atr = update_attribute_radar_lut(atr_rdr, inps, lat, lon)
            for key,value in atr.iteritems():
                group.attrs[key] = value

        elif k in multi_group_hdf5_file:
            print 'number of interferograms: '+str(epoch_num)
            try:    date12_list = ptime.list_ifgram2date12(epoch_list)
            except: date12_list = epoch_list
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_geo = interpolate(data.flatten(), vtx, wts).reshape(inps.lat_num, inps.lon_num)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_geo, compression='gzip')

                atr = update_attribute_radar_lut(h5[k][ifgram].attrs, inps, lat, lon, print_msg=False)
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])
            prog_bar.close()
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print 'reading '+fname
        data = readfile.read(fname)[0]

        ##Solution 1 - qhull
        data_geo = interpolate(data.flatten(), vtx, wts).reshape(inps.lat_num, inps.lon_num)

        ###Solution 2 - matplotlib.tri
        #interp_lin = mtri.LinearTriInterpolator(triang, data.flatten())
        #data_geo = interp_lin(grid_lat.flatten(), grid_lon.flatten())
        #interp_cubic = mtri.CubicTriInterpolator(triang, data, kind='geom')
        #data_geo = interp_cubic(grid_lat, grid_lon)

        print 'update attributes'
        atr = update_attribute_radar_lut(atr_rdr, inps, lat, lon)

        print 'writing >>> '+fname_out
        writefile.write(data_geo, atr, fname_out)

    del data_geo, vtx, wts
    print 'finished writing file: %s' % (fname_out)
    s = time.time()-start;  m, s = divmod(s, 60);  h, m = divmod(m, 60)
    print 'Time used: %02d hours %02d mins %02d secs' % (h, m, s)
    return fname_out
Beispiel #19
0
def subset_file(File, subset_dict, outFile=None):
    '''Subset file with
    Inputs:
        File        : str, path/name of file
        outFile     : str, path/name of output file
        subset_dict : dict, subsut parameter, including the following items:
                      subset_x   : list of 2 int,   subset in x direction,   default=None
                      subset_y   : list of 2 int,   subset in y direction,   default=None
                      subset_lat : list of 2 float, subset in lat direction, default=None
                      subset_lon : list of 2 float, subset in lon direction, default=None
                      fill_value : float, optional. filled value for area outside of data coverage. default=None
                                   None/not-existed to subset within data coverage only.
    Outputs:
        outFile :  str, path/name of output file; 
                   outFile = 'subset_'+File, if File is in current directory;
                   outFile = File, if File is not in the current directory.
    '''

    # Input File Info
    try:
        atr_dict = readfile.read_attribute(File)
    except:
        return None
    width = int(atr_dict['WIDTH'])
    length = int(atr_dict['FILE_LENGTH'])
    k = atr_dict['FILE_TYPE']
    print 'subset ' + k + ' file: ' + File + ' ...'

    # Read Subset Inputs into 4-tuple box in pixel and geo coord
    pix_box, geo_box = subset_input_dict2box(subset_dict, atr_dict)

    # if fill_value exists and not None, subset data and fill assigned value for area out of its coverage.
    # otherwise, re-check subset to make sure it's within data coverage and initialize the matrix with np.nan
    outfill = False
    try:
        subset_dict['fill_value']
        if subset_dict['fill_value']:
            outfill = True
    except:
        outfill = False
    if not outfill:
        pix_box = check_box_within_data_coverage(pix_box, atr_dict)
        subset_dict['fill_value'] = np.nan

    geo_box = box_pixel2geo(pix_box, atr_dict)
    data_box = (0, 0, width, length)
    print 'data   range in y/x: ' + str(data_box)
    print 'subset range in y/x: ' + str(pix_box)
    print 'data   range in lat/lon: ' + str(box_pixel2geo(data_box, atr_dict))
    print 'subset range in lat/lon: ' + str(geo_box)

    if pix_box == data_box:
        print 'Subset range == data coverage, no need to subset. Skip.'
        return File

    # Calculate Subset/Overlap Index
    pix_box4data, pix_box4subset = get_box_overlap_index(data_box, pix_box)

    ###########################  Data Read and Write  ######################
    # Output File Name
    if not outFile:
        if os.getcwd() == os.path.dirname(os.path.abspath(File)):
            outFile = 'subset_' + os.path.basename(File)
        else:
            outFile = os.path.basename(File)
    print 'writing >>> ' + outFile

    ##### Multiple Dataset File
    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        ##### Open Input File
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epochNum = len(epochList)
        if k in multi_dataset_hdf5_file:
            print 'number of acquisitions: ' + str(epochNum)
        else:
            print 'number of interferograms: ' + str(epochNum)

        ##### Open Output File
        h5out = h5py.File(outFile)
        group = h5out.create_group(k)

    ## Loop
    if k == 'timeseries':
        for i in range(epochNum):
            epoch = epochList[i]
            ut.print_progress(i + 1, epochNum, prefix='', suffix=epoch)

            dset = h5file[k].get(epoch)
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            dset = group.create_dataset(epoch, data=data, compression='gzip')

        atr_dict = subset_attribute(atr_dict, pix_box)
        for key, value in atr_dict.iteritems():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        for i in range(epochNum):
            epoch = epochList[i]
            ut.print_progress(i + 1, epochNum, prefix='', suffix=epoch)

            dset = h5file[k][epoch].get(epoch)
            atr_dict = h5file[k][epoch].attrs
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            atr_dict = subset_attribute(atr_dict, pix_box)
            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr_dict.iteritems():
                gg.attrs[key] = value

    ##### Single Dataset File
    elif k in ['.jpeg', '.jpg', '.png', '.ras', '.bmp']:
        data, atr_dict = readfile.read(File, pix_box)
        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    elif k == '.trans':
        rg_overlap, az_overlap, atr_dict = readfile.read(File, pix_box4data)

        rg = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        rg[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = rg_overlap

        az = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        az[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = az_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(rg, az, atr_dict, outFile)
    else:
        data_overlap, atr_dict = readfile.read(File, pix_box4data)

        data = np.ones((pix_box[3] - pix_box[1],
                        pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        data[pix_box4subset[1]:pix_box4subset[3],
             pix_box4subset[0]:pix_box4subset[2]] = data_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    ##### End Cleaning
    try:
        h5file.close()
        h5out.close()
    except:
        pass

    return outFile
Beispiel #20
0
def remove_surface(File, surf_type, Mask, outName=""):
    start = time.time()
    ##### Output File Name
    if outName == "":
        ext = os.path.splitext(File)[1].lower()
        outName = os.path.basename(File).split(ext)[0] + "_" + surf_type + ext

    ##### Input File Info
    atr = readfile.read_attributes(File)
    k = atr["FILE_TYPE"]
    print "Input file is " + atr["PROCESSOR"] + " " + k

    ## Multiple Datasets File
    if k in ["interferograms", "coherence", "wrapped", "timeseries"]:
        h5file = h5py.File(File, "r")
        ifgramList = h5file[k].keys()
        ifgramList = sorted(ifgramList)
        print "number of epochs: " + str(len(ifgramList))

        h5flat = h5py.File(outName, "w")
        group = h5flat.create_group(k)
        print "writing >>> " + outName

    if k in ["timeseries"]:
        for ifgram in ifgramList:
            print "Removing " + surf_type + " from " + ifgram
            data = h5file[k].get(ifgram)[:]

            data_n, ramp = remove_data_surface(data, Mask, surf_type)

            dset = group.create_dataset(ifgram, data=data_n, compression="gzip")
        for key, value in h5file[k].attrs.iteritems():
            group.attrs[key] = value

    elif k in ["interferograms", "wrapped", "coherence"]:
        for ifgram in ifgramList:
            print "Removing " + surf_type + " from " + ifgram
            data = h5file[k][ifgram].get(ifgram)[:]

            data_n, ramp = remove_data_surface(data, Mask, surf_type)

            gg = group.create_group(ifgram)
            dset = gg.create_dataset(ifgram, data=data_n, compression="gzip")
            for key, value in h5file[k][ifgram].attrs.iteritems():
                gg.attrs[key] = value

    ## Single Dataset File
    else:
        try:
            data, atr = readfile.read(File)
        except:
            pass
        print "Removing " + surf_type + " from " + k

        data_n, ramp = remove_data_surface(data, Mask, surf_type)

        writefile.write(data_n, atr, outName)

    try:
        h5file.close()
        h5flat.close()
    except:
        pass

    print "Remove " + surf_type + " took " + str(time.time() - start) + " secs"
Beispiel #21
0
def main(argv):

    ####################### Inputs Check ########################
    try:
        opts, args = getopt.getopt(argv, "h:f:o:", ['help'])
    except getopt.GetoptError:
        usage()
        sys.exit(1)

    if len(sys.argv) > 4:
        for opt, arg in opts:
            if opt in ("-h", "--help"):
                usage()
                sys.exit()
            elif opt == '-f':
                fileList = arg.split(',')
            elif opt == '-o':
                outName = arg

    elif len(sys.argv) <= 4 and len(sys.argv) >= 3:
        fileList = [sys.argv[1], sys.argv[2]]
        try:
            outName = sys.argv[3]
        except:
            pass
    else:
        usage()
        sys.exit(1)

    print '\n****************** Add **********************'
    print 'Input files: '
    print fileList

    ext = os.path.splitext(fileList[0])[1].lower()
    try:
        outName
    except:
        outName = File1.split('.')[0] + '_plus_' + File2.split('.')[0] + ext

    ##### Read File Info / Attributes
    atr = readfile.read_attribute(fileList[0])
    print 'Input file is ' + atr['PROCESSOR'] + ' ' + atr['FILE_TYPE']
    k = atr['FILE_TYPE']

    ##### File Type Check
    if k in ['timeseries', 'interferograms', 'coherence', 'wrapped']:
        for i in range(1, len(fileList)):
            File = fileList[i]
            r = readfile.read_attribute(File)
            if not r['FILE_TYPE'] == k:
                print 'Input file type is not the same: ' + r['FILE_TYPE']
                sys.exit(1)

        h5out = h5py.File(outName, 'w')
        group = h5out.create_group(k)

        h5in = h5py.File(fileList[0])
        epochList = sorted(h5in[k].keys())

    ########################### Add file by file ########################
    if k in ['timeseries']:
        for epoch in epochList:
            print epoch
            data = np.zeros((int(atr['FILE_LENGTH']), int(atr['WIDTH'])))
            for File in fileList:
                print File
                h5file = h5py.File(File, 'r')
                d = h5file[k].get(epoch)[:]

                data = add(data, d)

            dset = group.create_dataset(epoch, data=data, compression='gzip')
        for key, value in atr.iteritems():
            group.attrs[key] = value

        h5out.close()
        h5in.close()

    elif k in ['timeseries', 'interferograms', 'coherence', 'wrapped']:
        for epoch in epochList:
            print epoch
            data = np.zeros((int(atr['FILE_LENGTH']), int(atr['WIDTH'])))
            for File in fileList:
                print File
                h5file = h5py.File(File, 'r')
                d = h5file[k][epoch].get(epoch)[:]

                data = add(data, d)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in h5in[k][epoch].attrs.iteritems():
                gg.attrs[key] = value

        h5out.close()
        h5in.close()

    ## All the other file types
    else:
        data = np.zeros((int(atr['FILE_LENGTH']), int(atr['WIDTH'])))
        for File in fileList:
            print 'loading ' + File
            d, r = readfile.read(File)
            data = add(data, d)
        writefile.write(data, atr, outName)
Beispiel #22
0
def main(argv):
    inps = cmdLineParse()

    atr = readfile.read_attribute(inps.velocity_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    # Check subset input
    if inps.subset_y:
        inps.subset_y = sorted(inps.subset_y)
        print 'subset in y/azimuth direction: '+str(inps.subset_y)
    else:
        inps.subset_y = [0, length]

    if inps.subset_x:
        inps.subset_x = sorted(inps.subset_x)
        print 'subset in x/range direction: '+str(inps.subset_x)
    else:
        inps.subset_x = [0, width]
    y0, y1 = inps.subset_y
    x0, x1 = inps.subset_x

    # Read velocity/rate
    velocity = readfile.read(inps.velocity_file)[0]
    print 'read velocity file: '+inps.velocity_file

    k = 'interferograms'
    h5 = h5py.File(inps.ifgram_file, 'r')
    ifgram_list = sorted(h5[k].keys())
    ifgram_num = len(ifgram_list)
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    print 'number of interferograms: '+str(ifgram_num)

    ##### Select interferograms with unwrapping error
    if inps.percentage > 0.0:
        mask = readfile.read(inps.mask_file, epoch='mask')[0]
        print 'read mask for pixels with unwrapping error from file: '+inps.mask_file

        unw_err_ifgram_num = int(np.rint(inps.percentage*ifgram_num))
        unw_err_ifgram_idx = random.sample(range(ifgram_num), unw_err_ifgram_num)
        unw_err_ifgram_list = [ifgram_list[i] for i in unw_err_ifgram_idx]
        unw_err_date12_list = [date12_list[i] for i in unw_err_ifgram_idx]
        print 'randomly choose the following %d interferograms with unwrapping error' % unw_err_ifgram_num
        print unw_err_date12_list

        unit_unw_err = 2.0*np.pi*mask
    else:
        unw_err_ifgram_list = []

    ###### Generate simulated interferograms
    m_dates = ptime.yyyymmdd([i.split('-')[0] for i in date12_list])
    s_dates = ptime.yyyymmdd([i.split('-')[1] for i in date12_list])
    range2phase = -4.0*np.pi/float(atr['WAVELENGTH'])

    print 'writing simulated interferograms file: '+inps.outfile
    h5out=h5py.File(inps.outfile,'w') 
    group = h5out.create_group('interferograms')
    for i in range(ifgram_num):
        ifgram = ifgram_list[i]
        # Get temporal baseline in years
        t1 = datetime.datetime(*time.strptime(m_dates[i],"%Y%m%d")[0:5])
        t2 = datetime.datetime(*time.strptime(s_dates[i],"%Y%m%d")[0:5])
        dt = (t2-t1)
        dt = float(dt.days)/365.25

        # Simuated interferograms with unwrap error
        unw = velocity*dt*range2phase
        if ifgram in unw_err_ifgram_list:
            rand_int = random.sample(range(1,10),1)[0]
            unw += rand_int * unit_unw_err
            print ifgram+'  - add unwrapping error of %d*2*pi' % rand_int
        else:
            print ifgram

        gg = group.create_group(ifgram)
        dset = gg.create_dataset(ifgram, data=unw[y0:y1,x0:x1], compression='gzip')

        for key, value in h5[k][ifgram].attrs.iteritems():
            gg.attrs[key] = value
        if ifgram in unw_err_ifgram_list:
            gg.attrs['unwrap_error'] = 'yes'
        else:
            gg.attrs['unwrap_error'] = 'no'
        gg.attrs['FILE_LENGTH'] = y1-y0
        gg.attrs['WIDTH']       = x1-x0
    h5.close()
    h5out.close()
    print 'Done.'
    return inps.outfile
Beispiel #23
0
def main(argv):
    try:
        if argv[0] in ['-h', '--help']:
            usage()
            sys.exit(1)
        else:
            File = argv[0]
    except:
        usage()
        sys.exit(1)

    try:
        maskFile = argv[1]
    except:
        pass

    ##################################
    h5file = h5py.File(File)
    dateList = h5file['timeseries'].keys()
    ##################################

    ##### Read Mask File
    ## Priority:
    ## Input mask file > pysar.mask.file > existed Modified_Mask.h5 > existed Mask.h5
    try:
        maskFile
    except:
        if os.path.isfile('Modified_Mask.h5'): maskFile = 'Modified_Mask.h5'
        elif os.path.isfile('Mask.h5'): maskFile = 'Mask.h5'
        else:
            print 'No mask found!'
            sys.exit(1)
    try:
        Mask, Matr = readfile.read(maskFile, epoch='mask')
        print 'mask: ' + maskFile
    except:
        print 'Can not open mask file: ' + maskFile
        sys.exit(1)

    ##################################
    Mask = Mask.flatten(1)
    ndx = Mask != 0
    ##################################
    nt = float(h5file['timeseries'].attrs['LOOK_REF1'])
    ft = float(h5file['timeseries'].attrs['LOOK_REF2'])
    sy, sx = np.shape(dset1)
    npixel = sx * sy
    lookangle = np.tile(np.linspace(nt, ft, sx), [sy, 1])
    lookangle = lookangle.flatten(1) * np.pi / 180.0
    Fh = -np.sin(lookangle)
    Fv = -np.cos(lookangle)

    try:
        daz = float(h5file['timeseries'].attrs['AZIMUTH_PIXEL_SIZE'])
    except:
        print '''
        ERROR!
        The attribute AZIMUTH_PIXEL_SIZE was not found!
        Possible cause of error: Geo coordinate.
        This function works only in radar coordinate system.
        '''
        sys.exit(1)
    lines = np.tile(np.arange(0, sy, 1), [1, sx])
    lines = lines.flatten(1)
    rs = lines * daz

    A = np.zeros([npixel, 4])

    A[:, 0] = Fh
    A[:, 1] = Fh * rs
    A[:, 2] = Fv
    A[:, 3] = Fv * rs

    Bh = []
    Bv = []
    Bhrate = []
    Bvrate = []
    Be = np.zeros([len(dateList), 4])
    try:
        excludedDates = argv[2]
    except:
        excludedDates = []

    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    for i in range(1, len(dateList)):
        if not dateList[i] in excludedDates:
            dset = h5file['timeseries'].get(dateList[i])
            data = dset[0:dset.shape[0], 0:dset.shape[1]]
            L = data.flatten(1)
            Berror = np.dot(np.linalg.pinv(A[ndx]), L[ndx])
            Bh.append(Berror[0])
            Bhrate.append(Berror[1])
            Bv.append(Berror[2])
            Bvrate.append(Berror[3])
            Be[i, :] = Berror
        else:
            print str(dateList[i]
                      ) + ' is not considered for Baseline Error estimation'

    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    print 'baseline error           mean                          std'
    print '       bh     :  ' + str(np.mean(Bh)) + '     ,  ' + str(np.std(Bh))
    print '     bh rate  :  ' + str(np.mean(Bhrate)) + '     ,  ' + str(
        np.std(Bhrate))
    print '       bv     :  ' + str(np.mean(Bv)) + '     ,  ' + str(np.std(Bv))
    print '     bv rate  :  ' + str(np.mean(Bvrate)) + '     ,  ' + str(
        np.std(Bvrate))
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    print 'bh error of each epoch:'
    print Bh
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    print 'bv error of each epoch:'
    print Bv
    # plt.hist(Bh,bins=8,normed=True)
    # formatter = FuncFormatter(to_percent)
    # Set the formatter
    # plt.gca().yaxis.set_major_formatter(formatter)
    # plt.show()
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    print 'Estimating Baseline error from each differences ...'

    Bedif = np.zeros([len(dateList), 4])
    for i in range(1, len(dateList)):
        dset1 = h5file['timeseries'].get(dateList[i - 1])
        data1 = dset1[0:dset1.shape[0], 0:dset1.shape[1]]
        dset2 = h5file['timeseries'].get(dateList[i])
        data2 = dset2[0:dset2.shape[0], 0:dset2.shape[1]]
        data = data2 - data1
        L = data.flatten(1)
        Berrord = np.dot(np.linalg.pinv(A[ndx]), L[ndx])
        Bedif[i, :] = Berrord

    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'

    yref = int(h5file['timeseries'].attrs['ref_y'])
    xref = int(h5file['timeseries'].attrs['ref_x'])

    orbEffect = np.zeros([len(dateList), sy, sx])
    for i in range(1, len(dateList)):
        effect = np.dot(A, Be[i, :])
        effect = np.reshape(effect, [sx, sy]).T
        # orbEffect[i,:,:]=orbEffect[i-1,:,:]+effect
        # orbEffect[i,:,:]=orbEffect[i,:,:]-orbEffect[i,yref,xref]
        orbEffect[i, :, :] = effect - effect[yref, xref]
        del effect

    print 'Correctiing the time series '
    outName = File.replace('.h5', '') + '_baselineCor.h5'
    h5orbCor = h5py.File(outName, 'w')
    group = h5orbCor.create_group('timeseries')
    for i in range(len(dateList)):
        dset1 = h5file['timeseries'].get(dateList[i])
        data = dset1[0:dset1.shape[0], 0:dset1.shape[1]] - orbEffect[i, :, :]
        dset = group.create_dataset(dateList[i], data=data, compression='gzip')

    for key, value in h5file['timeseries'].attrs.iteritems():
        group.attrs[key] = value

    try:
        dset1 = h5file['mask'].get('mask')
        group = h5orbCor.create_group('mask')
        dset = group.create_dataset('mask', data=dset1, compression='gzip')
    except:
        pass

    h5file.close()
    h5orbCor.close()
Beispiel #24
0
def main(argv):

    ####################### Inputs Check ########################
    try:    opts, args = getopt.getopt(argv,"h:f:o:",['help'])
    except getopt.GetoptError:    Usage() ; sys.exit(1)
  
    if len(sys.argv) > 4:
        for opt,arg in opts:
            if opt in ("-h","--help"):  Usage();  sys.exit()
            elif opt == '-f':   fileList = arg.split(',')
            elif opt == '-o':   outName  = arg
  
    elif len(sys.argv) <= 4 and len(sys.argv) >= 3:
        fileList = [sys.argv[1],sys.argv[2]]
        try: outName = sys.argv[3]
        except: pass
    else: Usage();  sys.exit(1)
  
    print '\n****************** Add **********************'
    print 'Input files: '
    print fileList
  
    ext = os.path.splitext(fileList[0])[1].lower()
    try:     outName
    except:  outName = File1.split('.')[0]+'_plus_'+File2.split('.')[0]+ext
  
  
    ##### Read File Info / Attributes
    atr  = readfile.read_attributes(fileList[0])
    print 'Input file is '+atr['PROCESSOR']+' '+atr['FILE_TYPE']
    k = atr['FILE_TYPE']
  
    ##### File Type Check
    if k in ['timeseries','interferograms','coherence','wrapped']:
        for i in range(1,len(fileList)):
            File = fileList[i]
            r = readfile.read_attributes(File)
            if not r['FILE_TYPE'] == k:
                print 'Input file type is not the same: '+r['FILE_TYPE']
                sys.exit(1)
  
        h5out = h5py.File(outName,'w')
        group = h5out.create_group(k)
  
        h5in  = h5py.File(fileList[0])
        epochList = h5in[k].keys()

    ########################### Add file by file ########################
    if k in ['timeseries']:
        for epoch in epochList:
            print epoch
            data = np.zeros((int(atr['FILE_LENGTH']),int(atr['WIDTH'])))
            for File in fileList:
                print File
                h5file = h5py.File(File,'r')
                d = h5file[k].get(epoch)[:]
  
                data = add(data,d)
  
            dset = group.create_dataset(epoch, data=data, compression='gzip')
        for key,value in atr.iteritems():   group.attrs[key] = value
  
        h5out.close()
        h5in.close()
  
    elif k in ['timeseries','interferograms','coherence','wrapped']:
        for epoch in epochList:
            print epoch
            data = np.zeros((int(atr['FILE_LENGTH']),int(atr['WIDTH'])))
            for File in fileList:
                print File
                h5file = h5py.File(File,'r')
                d = h5file[k][epoch].get(epoch)[:]
  
                data = add(data,d)
  
            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in h5in[k][epoch].attrs.iteritems():
                gg.attrs[key] = value
  
        h5out.close()
        h5in.close()
  
    ## All the other file types
    else:
        data = np.zeros((int(atr['FILE_LENGTH']),int(atr['WIDTH'])))
        for File in fileList:
            print 'loading '+File
            d,r = readfile.read(File)
            data = add(data,d)
        writefile.write(data,atr,outName)
Beispiel #25
0
def main(argv):
    inps = cmdLineParse()

    # Input File Info
    atr = readfile.read_attribute(inps.file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    k = atr['FILE_TYPE']
    print 'Input file is '+k+': '+inps.file

    # default output filename
    if not inps.outfile:
        if k == 'temporal_coherence':
            inps.outfile = 'maskTempCoh.h5'
        else:
            inps.outfile = 'mask.h5'
        if inps.file.startswith('geo_'):
            inps.outfile = 'geo_'+inps.outfile

    ##### Mask: Non-zero
    if inps.nonzero and k == 'interferograms':
        print 'generate mask for all pixels with non-zero value'
        inps.outfile = ut.nonzero_mask(inps.file, inps.outfile)
        return inps.outfile

    ##### Mask: Threshold 
    print 'create initial mask with the same size as the input file and all = 1'
    mask = np.ones((length, width), dtype=np.float32)

    data, atr = readfile.read(inps.file, epoch=inps.epoch)

    if inps.nonzero:
        print 'all pixels with zero value = 0'
        mask[data == 0] = 0

    # min threshold
    if inps.vmin:
        mask[data<inps.vmin] = 0
        print 'all pixels with value < %s = 0' % str(inps.vmin)

    # max threshold
    if inps.vmax:
        mask[data>inps.vmax] = 0
        print 'all pixels with value > %s = 0' % str(inps.vmax)

    # nan value
    mask[np.isnan(data)] = 0
    print 'all pixels with nan value = 0'

    # subset in Y
    if inps.subset_y:
        y0,y1 = sorted(inps.subset_y)
        mask[0:y0,:] = 0
        mask[y1:length,:] = 0
        print 'all pixels with y OUT of [%d, %d] = 0' % (y0,y1)

    # subset in x
    if inps.subset_x:
        x0,x1 = sorted(inps.subset_x)
        mask[:,0:x0] = 0
        mask[:,x1:width] = 0
        print 'all pixels with x OUT of [%d, %d] = 0' % (x0,x1)
  
    ## Write mask file
    print 'writing >>> '+inps.outfile
    atr['FILE_TYPE'] = 'mask'
    writefile.write(mask, atr, inps.outfile)
    return inps.outfile
Beispiel #26
0
def main(argv):

    outName = 'mask.h5'
    method  = 'threshold'

    ##### Check Inputs
    if len(sys.argv)>2:
        try:   opts, args = getopt.getopt(argv,'h:f:m:M:x:y:o:d:e:',['nonzero'])
        except getopt.GetoptError:      Usage() ; sys.exit(1)
  
        for opt,arg in opts:
            if opt in ("-h","--help"):   Usage();   sys.exit()
            elif opt == '-f':         File = arg
            elif opt == '-m':         minV = float(arg)
            elif opt == '-M':         maxV = float(arg)
            elif opt == '-y':         ysub = [int(i) for i in arg.split(':')];        ysub.sort()
            elif opt == '-x':         xsub = [int(i) for i in arg.split(':')];        xsub.sort()
            elif opt == '-o':         outName    = arg
            elif opt == '-d':         epoch_date = arg
            elif opt == '-e':         epoch_num  = int(arg) - 1
            elif opt == '--nonzero':  method     = 'nonzero'

    elif len(sys.argv)==2:
        if   argv[0] in ['-h','--help']:    Usage(); sys.exit(1)
        elif os.path.isfile(argv[0]):       File = argv[0]
        else:    print 'Input file does not existed: '+argv[0];  sys.exit(1)
    else:                                   Usage(); sys.exit(1)

    ##### Input File Info
    atr = readfile.read_attributes(File)
    print '\n****************** Generate Mask *******************'
    print 'Input file is '+atr['PROCESSOR']+' '+atr['FILE_TYPE']+': '+File
    mask = np.ones([int(atr['FILE_LENGTH']),int(atr['WIDTH'])])
    print 'Create initial mask with the same size as the input file and all = 1'

    ##### Non-zero Mask #######
    if method == 'nonzero':
        k = atr['FILE_TYPE']
        MaskZero = np.ones([int(atr['FILE_LENGTH']),int(atr['WIDTH'])])
  
        ext = os.path.splitext(File)[1].lower()
        if ext == '.h5' and k in ['interferograms','coherence','wrapped','timeseries']:
            h5file = h5py.File(File,'r')
            epochList = h5file[k].keys()
  
            for epoch in epochList:
                print epoch
                if k in ['interferograms','coherence','wrapped']:
                    data = h5file[k][epoch].get(epoch)[:]
                elif k in ['timeseries']:
                    data = h5file[k].get(epoch)
                MaskZero *= data
                MaskZero[np.isnan(data)] = 0
            h5file.close()
  
        else:
            data,atr = readfile.read(File)
            MaskZero *= data
            MaskZero[np.isnan(data)] = 0
  
        mask = np.ones([int(atr['FILE_LENGTH']),int(atr['WIDTH'])])
        mask[MaskZero==0] = 0


    ##### Threshold ##########
    else:
        ##### Read and Initiate Mask
        try:        V, atr = readfile.read(File,epoch_date)
        except:
            try:    V, atr = readfile.read(File,epoch_num)
            except: V, atr = readfile.read(File)
  
        ##### Calculating Mask
        ## threshold
        try:
            mask[V<minV]=0
            print 'all value < '+str(minV)+' = 0'
        except:  print 'No min threshold'
        try:
            mask[V>maxV]=0
            print 'all value > '+str(maxV)+' = 0'
        except:  print 'No max threshold'  
        ## nan value
        mask[np.isnan(V)]=0
  
    ## subset
    try:
        mask[0:ysub[0],:]=0
        mask[ysub[1]:mask.shape[0],:]=0
        print 'all y in [0,'+str(ysub[0])+'] and ['+str(ysub[1])+',end] = 0'
    except:  print 'No subset in y direction'
    try:
        mask[:,0:xsub[0]]=0
        mask[:,xsub[1]:mask.shape[1]]=0
        print 'all x in [0,'+str(xsub[0])+'] and ['+str(xsub[1])+',end] = 0'
    except:  print 'No subset in x direction'
   
  
    ##### Writing mask file
    atr['FILE_TYPE'] = 'mask'
    writefile.write(mask,atr,outName)
Beispiel #27
0
def main(argv):
    try:  File = argv[0]
    except:  Usage() ; sys.exit(1)
    try:  maskFile = argv[1]
    except: pass
  
    ##################################
    h5file = h5py.File(File)
    dateList = h5file['timeseries'].keys()
    ##################################
  
    ##### Read Mask File 
    ## Priority:
    ## Input mask file > pysar.mask.file > existed Modified_Mask.h5 > existed Mask.h5
    try:  maskFile
    except:
        if   os.path.isfile('Modified_Mask.h5'):  maskFile = 'Modified_Mask.h5'
        elif os.path.isfile('Mask.h5'):           maskFile = 'Mask.h5'
        else: print 'No mask found!'; sys.exit(1)
    try:  Mask,Matr = readfile.read(maskFile);   print 'mask: '+maskFile
    except: print 'Can not open mask file: '+maskFile; sys.exit(1)


    ##################################
    Mask=Mask.flatten(1)
    ndx= Mask !=0
    ##################################
    nt=float(h5file['timeseries'].attrs['LOOK_REF1'])
    ft=float(h5file['timeseries'].attrs['LOOK_REF2'])
    sy,sx=np.shape(dset1)
    npixel=sx*sy
    lookangle=np.tile(np.linspace(nt,ft,sx),[sy,1])
    lookangle=lookangle.flatten(1)*np.pi/180.0
    Fh=-np.sin(lookangle)
    Fv=-np.cos(lookangle)  
  
    try:
        daz=float(h5file['timeseries'].attrs['AZIMUTH_PIXEL_SIZE'])
    except:
        print'''
        ERROR!
        The attribute AZIMUTH_PIXEL_SIZE was not found!
        Possible cause of error: Geo coordinate.
        This function works only in radar coordinate system.
        '''
        sys.exit(1)
    lines=np.tile(np.arange(0,sy,1),[1,sx])
    lines=lines.flatten(1)
    rs=lines*daz
    
    A = np.zeros([npixel,4])

    A[:,0]=Fh
    A[:,1]=Fh*rs
    A[:,2]=Fv
    A[:,3]=Fv*rs 
  
    Bh=[]
    Bv=[]
    Bhrate=[]
    Bvrate=[]
    Be=np.zeros([len(dateList),4])
    try:     excludedDates=argv[2] 
    except:  excludedDates=[]
  
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    for i in range(1,len(dateList)):
        if not dateList[i] in excludedDates:
            dset = h5file['timeseries'].get(dateList[i])
            data = dset[0:dset.shape[0],0:dset.shape[1]]
            L = data.flatten(1)
            Berror=np.dot(np.linalg.pinv(A[ndx]),L[ndx])
            Bh.append(Berror[0])
            Bhrate.append(Berror[1])
            Bv.append(Berror[2])
            Bvrate.append(Berror[3])
            Be[i,:]=Berror
        else:
            print str(dateList[i]) + ' is not considered for Baseline Error estimation'
  
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%' 
    print 'baseline error           mean                          std'   
    print '       bh     :  ' +str(np.mean(Bh)) + '     ,  '+str(np.std(Bh))
    print '     bh rate  :  ' +str(np.mean(Bhrate)) + '     ,  '+str(np.std(Bhrate))
    print '       bv     :  ' +str(np.mean(Bv)) + '     ,  '+str(np.std(Bv))
    print '     bv rate  :  ' +str(np.mean(Bvrate)) + '     ,  '+str(np.std(Bvrate))
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'       
    print 'bh error of each epoch:'
    print Bh
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    print 'bv error of each epoch:'
    print Bv
    # plt.hist(Bh,bins=8,normed=True)
    # formatter = FuncFormatter(to_percent)
    # Set the formatter
    # plt.gca().yaxis.set_major_formatter(formatter)    
    # plt.show()
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    print 'Estimating Baseline error from each differences ...'
  
    Bedif=np.zeros([len(dateList),4])
    for i in range(1,len(dateList)):
        dset1 = h5file['timeseries'].get(dateList[i-1])
        data1 = dset1[0:dset1.shape[0],0:dset1.shape[1]]
        dset2 = h5file['timeseries'].get(dateList[i])
        data2 = dset2[0:dset2.shape[0],0:dset2.shape[1]]
        data=data2-data1
        L = data.flatten(1)
        Berrord=np.dot(np.linalg.pinv(A[ndx]),L[ndx])
        Bedif[i,:]=Berrord
       
  
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
  
    yref=int(h5file['timeseries'].attrs['ref_y'])
    xref=int(h5file['timeseries'].attrs['ref_x'])
  
    orbEffect=np.zeros([len(dateList),sy,sx])
    for i in range(1,len(dateList)):
        effect=np.dot(A,Be[i,:])
        effect=np.reshape(effect,[sx,sy]).T
        # orbEffect[i,:,:]=orbEffect[i-1,:,:]+effect     
        # orbEffect[i,:,:]=orbEffect[i,:,:]-orbEffect[i,yref,xref]
        orbEffect[i,:,:]=effect - effect[yref,xref]
        del effect
  
    print 'Correctiing the time series '
    outName=File.replace('.h5','')+'_baselineCor.h5'
    h5orbCor=h5py.File(outName,'w')
    group = h5orbCor.create_group('timeseries')
    for i in range(len(dateList)):
        dset1 = h5file['timeseries'].get(dateList[i])
        data = dset1[0:dset1.shape[0],0:dset1.shape[1]] - orbEffect[i,:,:]
        dset = group.create_dataset(dateList[i], data=data, compression='gzip')      
  
    for key,value in h5file['timeseries'].attrs.iteritems():
        group.attrs[key] = value
  
    try:
        dset1 = h5file['mask'].get('mask')
        group=h5orbCor.create_group('mask')
        dset = group.create_dataset('mask', data=dset1, compression='gzip')
    except: pass
  
    h5file.close()
    h5orbCor.close()
Beispiel #28
0
def main(argv):

    global xsub, ysub, thr
    parallel = 'yes'     ## Use parallel by default for multiple input files

    ######################################
    try:    opts, args = getopt.getopt(argv,"h:f:m:t:x:y:o:",['no-parallel'])
    except getopt.GetoptError:    Usage() ; sys.exit(1)

    if len(sys.argv) > 3:
        for opt,arg in opts:
            if opt in ("-h","--help"):     Usage();  sys.exit()
            elif opt == '-f':        File     = arg.split(',')
            elif opt == '-m':        maskFile = arg
            elif opt == '-t':        thr  = float(arg)
            elif opt == '-y':        ysub = [int(i) for i in arg.split(':')];     ysub.sort()
            elif opt == '-x':        xsub = [int(i) for i in arg.split(':')];     xsub.sort()
            elif opt == '-o':        outFile = arg
            elif opt == '--no-parallel':   parallel = 'no'

    elif len(sys.argv)==3:
        if os.path.isfile(argv[0]) and os.path.isfile(argv[1]):
            File     = argv[0].split(',')
            maskFile = argv[1]
        else:  print 'Input file does not existed: '+argv[0]+' / '+argv[1];  sys.exit(1)
    else:   Usage();  sys.exit(1)

    try:
        File
        maskFile
    except:    Usage() ; sys.exit(1)

    ##### Check Input File List
    print '\n****************** Masking *********************'
    fileList = ut.get_file_list(File)
    print 'number of file to mask: '+str(len(fileList))
    print fileList

    if len(fileList) == 1:
        parallel = 'no'
        try: outFile          ## Customized output file name for one input file only
        except:
            ext     = os.path.splitext(fileList[0])[1]
            outFile = os.path.basename(fileList[0]).split('.')[0]+'_masked'+ext
    elif len(fileList) > 1:
        try:
            del outFile
            print 'Disabled customized output name for multiple input files, continue with automatic naming insread.'
        except: pass
    else: print 'ERROR: No input file!'; sys.exit(1)

    ##### Check parallel computing requirement
    if parallel == 'yes':
        try:
            from joblib import Parallel, delayed
            import multiprocessing
        except:
            print '/nCannot import joblib or multiprocessing!'
            print 'Disabled parallel masking.'
            print 'Continue with masking file by file ...'

    ###### Read Mask File
    atr_mask = readfile.read_attributes(maskFile)
    k_mask = atr_mask['FILE_TYPE']
    if not k_mask == 'coherence':    ## Read mask file once 
        M,Matr = readfile.read(maskFile)
        print 'mask file: '+maskFile

    ##### Masking - file by file
    if parallel == 'no':
        ##### Single Mask
        if not k_mask == 'coherence':
            for in_file in fileList:
                print '-------------------------------------------'
                print 'masking  : '+in_file
                try:    mask_file(in_file,M,outFile)
                except: mask_file(in_file,M)
        ##### Multiple Mask
        else:
            try:    mask_with_multi_masks(fileList[0],maskFile,outFile)
            except: mask_with_multi_masks(fileList[0],maskFile)

    ##### Masking - parallel
    else:
        print '-----------------------'
        print 'parallel masking ...'
        print '-----------------------'
        num_cores = multiprocessing.cpu_count()
        Parallel(n_jobs=num_cores)(delayed(mask_file)(in_file,M) for in_file in fileList)
Beispiel #29
0
        try:
            outName
        except:
            ext     = os.path.splitext(fileList[0])[1].lower()
            outName = os.path.basename(fileList[0]).split(ext)[0]+'_'+surfType+ext
  
    ##### Read Mask File 
    ## Priority:
    ## Input mask file > pysar.mask.file > existed Modified_Mask.h5 > existed Mask.h5
    try:      maskFile
    except:
        try:  maskFile = templateContents['pysar.mask.file']
        except: pass
  
    try:
        Mask,Matr = readfile.read(maskFile)
        print 'mask file: '+maskFile
        Masking = 'yes'
    except:
        print 'No mask. Use the whole area for ramp estimation.'
        Masking = 'no'
        Mask=np.ones((length,width))

    ## Plot mask
    if save_mask == 'yes':
        mask_dis = np.zeros((length,width))
        if surfNum == 1:
            mask_dis = Mask
        else:
            i = 0
            mask_dis[ysub[2*i]:ysub[2*i+1],:] = Mask[ysub[2*i]:ysub[2*i+1],:]
Beispiel #30
0
def seed_file(File,outName,refList,ref_x='',ref_y=''):
    ## Seed Input File with reference value in refList
    print 'Reference value: '
    print refList

    #####  IO Info
    atr = readfile.read_attributes(File)
    k = atr['FILE_TYPE']
    print 'file type: '+k

    ##### Multiple Dataset File
    if k in ['timeseries','interferograms','wrapped','coherence']:
        ##### Input File Info
        h5file = h5py.File(File,'r')
        epochList = h5file[k].keys()
        epochList = sorted(epochList)
        epochNum  = len(epochList)
        print 'number of epochs: '+str(epochNum)
        
        ##### Check Epoch Number
        if not epochNum == len(refList):
            print '\nERROR: Reference value has different epoch number'+\
                  'from input file.'
            print 'Reference List epoch number: '+str(refList)
            print 'Input file     epoch number: '+str(epochNum)
            sys.exit(1)
  
        ##### Output File Info
        h5out = h5py.File(outName,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+outName

    ## Loop
    if k == 'timeseries':
        for i in range(epochNum):
            epoch = epochList[i]
            print epoch
            data = h5file[k].get(epoch)[:]
            
            data -= refList[i]
  
            dset = group.create_dataset(epoch, data=data, compression='gzip')

        atr  = seed_attributes(atr,ref_x,ref_y)
        for key,value in atr.iteritems():   group.attrs[key] = value

    elif k in ['interferograms','wrapped','coherence']:
        for i in range(epochNum):
            epoch = epochList[i]
            #print epoch
            data = h5file[k][epoch].get(epoch)[:]
            atr  = h5file[k][epoch].attrs

            data -= refList[i]
            atr  = seed_attributes(atr,ref_x,ref_y)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr.iteritems():    gg.attrs[key] = value

            ut.printProgress(i+1,epochNum,'seeding:',epoch)
  
    ##### Single Dataset File
    else:
        data,atr = readfile.read(File)

        data -= refList
        atr  = seed_attributes(atr,ref_x,ref_y)

        writefile.write(data,atr,outName)
  
    ##### End & Cleaning
    try:
        h5file.close()
        h5out.close()
    except: pass

    return 1
Beispiel #31
0
def spatial_mean(File,mask_orig,box):
    ## Calculate the Spatial Average of all non-nan pixels for each epoch
    ##     and return the mean value.
    ## 
    ## Inputs:
    ##     File      :
    ##     mask_orig : mask, same size as File
    ##     box       : 4-tuple defining the left, upper, right, and lower pixel coordinate [optional]
    ## Output: list for multi-dataset file, and float for single-dataset file

    print 'calculating spatial average of '+File+' within '+str(box)+' ...'
    ##### Input File Info
    atr  = readfile.read_attributes(File)
    k = atr['FILE_TYPE']
    width  = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])

    ##### Bounding Box
    #if box       == None:    box = [0,0,width,length]
    ##### Mask Info
    #if mask_orig == None:    mask_orig = np.ones((length,width))

    mask = mask_orig[box[1]:box[3],box[0]:box[2]]
    idx = mask != 0

    ##### Calculation
    if k in ['timeseries','interferograms','coherence','wrapped']:
        h5file = h5py.File(File,'r')
        epochList = h5file[k].keys();
        epochList = sorted(epochList)
        epochNum  = len(epochList)
        #print 'number of epoch: '+str(epochNum)

        meanList   = np.zeros(epochNum)
        for i in range(epochNum):
            epoch = epochList[i]
            if k in ['interferograms','coherence','wrapped']:
                dset = h5file[k][epoch].get(epoch)
            elif k == 'timeseries':
                dset = h5file[k].get(epoch)
            else:  print 'Unrecognized group type: '+k
            
            d = dset[box[1]:box[3],box[0]:box[2]]
            ## supress warning 
            ## url - http://stackoverflow.com/questions/29688168/mean-nanmean-and-warning-mean-of-empty-slice
            with warnings.catch_warnings():
                warnings.simplefilter("ignore", category=RuntimeWarning)
                meanList[i] = np.nanmean(d[idx])
            printProgress(i+1,epochNum)
        del d
        h5file.close()
        
        if epochNum == 1:   meanList = float(meanList)

    else:
        data,atr = readfile.read(File,box)
        with warnings.catch_warnings():
            warnings.simplefilter("ignore", category=RuntimeWarning)
            meanList = np.nanmean(data[idx])

    return meanList
Beispiel #32
0
def main(argv):

    method    = 'triangular_consistency'    ## or 'bonding_point'
    ramp_type = 'plane'
    save_rampCor = 'yes'
    plot_bonding_points = 'yes'
  
    ##### Check Inputs
    if len(sys.argv)>2:
        try: opts, args = getopt.getopt(argv,'h:f:m:x:y:o:t:',['ramp=','no-ramp-save'])
        except getopt.GetoptError:  print 'Error while getting args';  Usage(); sys.exit(1)
  
        for opt,arg in opts:
            if   opt in ['-h','--help']:    Usage(); sys.exit()
            elif opt in '-f':    File     = arg
            elif opt in '-m':    maskFile = arg
            elif opt in '-o':    outName  = arg
            elif opt in '-x':    x = [int(i) for i in arg.split(',')];    method = 'bonding_point'
            elif opt in '-y':    y = [int(i) for i in arg.split(',')];    method = 'bonding_point'
            elif opt in '-t':    templateFile = arg
            elif opt in '--ramp'         :  ramp_type    = arg.lower()
            elif opt in '--no-ramp-save' :  save_rampCor = 'no'
  
    elif len(sys.argv)==2:
        if argv[0] in ['-h','--help']:    Usage();  sys.exit()
        elif os.path.isfile(argv[0]):     File = argv[0];  maskFile = argv[1]
        else:    print 'Input file does not existed: '+argv[0];  sys.exit(1)
  
    else:  Usage(); sys.exit(1)
  
    ##### Check template file
    try:
        templateFile
        templateContents = readfile.read_template(templateFile)
    except: pass
  
    try:
        yx = [int(i) for i in templateContents['pysar.unwrapError.yx'].split(',')]
        x = yx[1::2]
        y = yx[0::2]
        method = 'bonding_point'
    except: pass

    ##### Read Mask File 
    ## Priority:
    ## Input mask file > pysar.mask.file > existed Modified_Mask.h5 > existed Mask.h5
    try:       maskFile
    except:
        try:    maskFile = templateContents['pysar.mask.file']
        except:
            if   os.path.isfile('Modified_Mask.h5'):  maskFile = 'Modified_Mask.h5'
            elif os.path.isfile('Mask.h5'):           maskFile = 'Mask.h5'
            else: print 'No mask found!'; sys.exit(1)
    try:    Mask,Matr = readfile.read(maskFile);   print 'mask: '+maskFile
    except: print 'Can not open mask file: '+maskFile; sys.exit(1)
  
    ##### Output file name
    ext = os.path.splitext(File)[1]
    try:    outName
    except: outName = File.split('.')[0]+'_unwCor'+ext
  
    print '\n**************** Unwrapping Error Correction ******************'

    ####################  Triangular Consistency (Phase Closure)  ####################
    if method == 'triangular_consistency':
        print 'Phase unwrapping error correction using Triangular Consistency / Phase Closure'
  
        h5file=h5py.File(File)
        ifgramList = h5file['interferograms'].keys()
        sx = int(h5file['interferograms'][ifgramList[0]].attrs['WIDTH'])
        sy = int(h5file['interferograms'][ifgramList[0]].attrs['FILE_LENGTH'])
        curls,Triangles,C=ut.get_triangles(h5file)
        A,B = ut.design_matrix(h5file)   
        ligram,lv=np.shape(B)
        lcurls=np.shape(curls)[0]
        print 'Number of all triangles: '+  str(lcurls)
        print 'Number of interferograms: '+ str(ligram)
        #print curls
  
        curlfile='curls.h5'
        if not os.path.isfile(curlfile):
            ut.generate_curls(curlfile,h5file,Triangles,curls)
         
        thr=0.50
        curls=np.array(curls);   n1=curls[:,0];   n2=curls[:,1];   n3=curls[:,2]
  
        numPixels=sy*sx
        print 'reading interferograms...'   
        data = np.zeros((ligram,numPixels),np.float32)
        for ni in range(ligram):
            dset=h5file['interferograms'][ifgramList[ni]].get(ifgramList[ni])
            d = dset[0:dset.shape[0],0:dset.shape[1]]
            data[ni] = d.flatten(1)   
  
        print np.shape(data)
        print 'reading curls ...' 
        h5curl=h5py.File(curlfile)
        curlList=h5curl['interferograms'].keys()
        curlData = np.zeros((lcurls,numPixels),np.float32)
        for ni in range(lcurls):
            dset=h5curl['interferograms'][curlList[ni]].get(curlList[ni])
            d = dset[0:dset.shape[0],0:dset.shape[1]]
            curlData[ni] = d.flatten(1)
        pi=np.pi
        EstUnwrap=np.zeros((ligram,numPixels),np.float32)
  
        #try:
        #    maskFile=argv[1]
        #    h5Mask=h5py.File(maskFile)
        #    dset = h5Mask['mask'].get('mask')
        #    Mask=dset[0:dset.shape[0],0:dset.shape[1]]
        #except:
        #    dset = h5file['mask'].get('mask')
        #    Mask=dset[0:dset.shape[0],0:dset.shape[1]]
        
        Mask=Mask.flatten(1)

        from scipy.linalg import pinv as pinv
        for ni in range(numPixels):
            #dU = np.zeros([ligram,1])
            #print np.shape(dU)
            #print np.shape(data[:,ni])
  
            if Mask[ni]==1:
                dU = data[:,ni]
                #nan_ndx = dataPoint == 0.
                unwCurl = np.array(curlData[:,ni])
                #print unwCurl
  
                ind  = np.abs(unwCurl)>=thr;      N1 =n1[ind];      N2 =n2[ind];      N3 =n3[ind]
                indC = np.abs(unwCurl)< thr;      Nc1=n1[indC];     Nc2=n2[indC];     Nc3=n3[indC]
  
                N =np.hstack([N1, N2, N3]);       UniN =np.unique(N)
                Nc=np.hstack([Nc1,Nc2,Nc3]);      UniNc=np.unique(Nc)
  
                inter=list(set(UniNc) & set(UniN)) # intersetion
                UniNc= list(UniNc)
                for x in inter:
                    UniNc.remove(x)
  
                D=np.zeros([len(UniNc),ligram])
                for i in range(len(UniNc)):
                    D[i,UniNc[i]]=1
  
                AAA=np.vstack([-2*pi*C,D])
                #AAA1=np.hstack([AAA,np.zeros([AAA.shape[0],lv])])
                #AAA2=np.hstack([-2*pi*np.eye(ligram),B]) 
                #AAAA=np.vstack([AAA1,AAA2])
                AAAA=np.vstack([AAA,0.25*np.eye(ligram)])
  
                #print '************************'
                #print np.linalg.matrix_rank(C)
                #print np.linalg.matrix_rank(AAA) 
                #print np.linalg.matrix_rank(AAAA)
                #print '************************'
  
                #LLL=list(np.dot(C,dU)) + list(np.zeros(np.shape(UniNc)[0]))# + list(dU)
                #ind=np.isnan(AAA)
                #M1=pinv(AAA)      
                #M=np.dot(M1,LLL)
                #EstUnwrap[:,ni]=np.round(M[0:ligram])*2.0*np.pi
  
                ##########
                # with Tikhonov regularization:
                AAAA=np.vstack([AAA,0.25*np.eye(ligram)])
                LLL=list(np.dot(C,dU)) + list(np.zeros(np.shape(UniNc)[0])) + list(np.zeros(ligram))
                ind=np.isnan(AAAA)
                M1=pinv(AAAA)
                M=np.dot(M1,LLL)
                EstUnwrap[:,ni]=np.round(M[0:ligram])*2.0*np.pi
                #print M[0:ligram]
                #print np.round(M[0:ligram])
  
            else:
                EstUnwrap[:,ni]=np.zeros([ligram])
                if not np.remainder(ni,10000): print 'Processing point: %7d of %7d ' % (ni,numPixels)

        ##### Output
        dataCor = data+EstUnwrap
        unwCorFile=File.replace('.h5','')+'_unwCor.h5';  print 'writing >>> '+unwCorFile
        h5unwCor=h5py.File(unwCorFile,'w') 
        gg = h5unwCor.create_group('interferograms') 
        for i in range(ligram):
            group = gg.create_group(ifgramList[i])
            dset = group.create_dataset(ifgramList[i], data=np.reshape(dataCor[i,:],[sx,sy]).T, compression='gzip')
            for key, value in h5file['interferograms'][ifgramList[i]].attrs.iteritems():
                group.attrs[key] = value
  
        try:
            MASK=h5file['mask'].get('mask')
            gm = h5unwCor.create_group('mask')
            dset = gm.create_dataset('mask', data=MASK, compression='gzip')
        except: pass
  
        h5unwCor.close()
        h5file.close()
        h5curl.close() 


    ####################  Bonding Points (Spatial Continuity)  ####################
    elif method == 'bonding_point':
        print 'Phase unwrapping error correction using Bonding Points / Spatial Continuity'
  
        ##### Read Bridge Points Info
        try:
            x
            y
            if len(x) != len(y) or np.mod(len(x),2) != 0:
                print 'Wrong number of bridge points input: '+str(len(x))+' for x, '+str(len(y))+' for y'
                Usage();  sys.exit(1)
        except: print 'Error in reading bridge points info!';  Usage();  sys.exit(1)
        for i in range(0,len(x)):
            if Mask[y[i],x[i]] == 0:
                print '\nERROR: Connecting point ('+str(y[i])+','+str(x[i])+') is out of masked area! Select them again!\n'
                sys.exit(1)
  
        print 'Number of bonding point pairs: '+str(len(x)/2)
        print 'Bonding points coordinates:\nx: '+str(x)+'\ny: '+str(y)
  
        ## Plot Connecting Pair of Points
        if plot_bonding_points == 'yes':
            point_yx = ''
            line_yx  = ''
            n_bridge = len(x)/2
            for i in range(n_bridge):
                pair_yx = str(y[2*i])+','+str(x[2*i])+','+str(y[2*i+1])+','+str(x[2*i+1])
                if not i == n_bridge-1:
                    point_yx += pair_yx+','
                    line_yx  += pair_yx+';'
                else:
                    point_yx += pair_yx
                    line_yx  += pair_yx

            try:
                plot_cmd = 'view.py --point-yx="'+point_yx+'" --line-yx="'+line_yx+\
                           '" --nodisplay -o bonding_points.png -f '+maskFile
                print plot_cmd
                os.system(plot_cmd)
            except: pass


        ##### Ramp Info
        ramp_mask = Mask==1
        print 'estimate phase ramp during the correction'
        print 'ramp type: '+ramp_type
        if save_rampCor == 'yes':
            outName_ramp = os.path.basename(outName).split(ext)[0]+'_'+ramp_type+ext
  
        ########## PySAR ##########
        if ext == '.h5':
            ##### Read
            try:     h5file=h5py.File(File,'r')
            except:  print 'ERROR: Cannot open input file: '+File; sys.exit(1)
            k=h5file.keys()
            if 'interferograms' in k: k[0] = 'interferograms';  print 'Input file is '+k[0]
            else: print 'Input file - '+File+' - is not interferograms.';  Usage();  sys.exit(1)
            igramList = h5file[k[0]].keys()
            igramList = sorted(igramList)
  
            #### Write
            h5out = h5py.File(outName,'w')
            gg = h5out.create_group(k[0])
            print 'writing >>> '+outName
  
            if save_rampCor == 'yes':
                h5out_ramp = h5py.File(outName_ramp,'w')
                gg_ramp = h5out_ramp.create_group(k[0])
                print 'writing >>> '+outName_ramp
  
            ##### Loop
            print 'Number of interferograms: '+str(len(igramList))
            for igram in igramList:
                print igram
                data = h5file[k[0]][igram].get(igram)[:]
  
                data_ramp,ramp = rm.remove_data_surface(data,ramp_mask,ramp_type)
                #ramp = data_ramp - data
                data_rampCor = phase_bonding(data_ramp,Mask,x,y)
                dataCor = data_rampCor - ramp
  
                group = gg.create_group(igram)
                dset = group.create_dataset(igram, data=dataCor, compression='gzip')
                for key, value in h5file[k[0]][igram].attrs.iteritems():
                    group.attrs[key]=value
  
                if save_rampCor == 'yes':
                    group_ramp = gg_ramp.create_group(igram)
                    dset = group_ramp.create_dataset(igram, data=data_rampCor, compression='gzip')
                    for key, value in h5file[k[0]][igram].attrs.iteritems():
                        group_ramp.attrs[key]=value
  
            try:
                mask = h5file['mask'].get('mask');
                gm = h5out.create_group('mask')
                dset = gm.create_dataset('mask', data=mask[0:mask.shape[0],0:mask.shape[1]], compression='gzip')
            except: print 'no mask group found.'
  
            h5file.close()
            h5out.close()
            if save_rampCor == 'yes':
                h5out_ramp.close()

        ########## ROI_PAC ##########
        elif ext == '.unw':
            print 'Input file is '+ext
            a,data,atr = readfile.read_float32(File);
  
            data_ramp,ramp = rm.remove_data_surface(data,ramp_mask,ramp_type)
            #ramp = data_ramp - data
            data_rampCor = phase_bonding(data_ramp,Mask,x,y)
            dataCor = data_rampCor - ramp
  
            writefile.write(dataCor, atr, outName)
            if save_rampCor == 'yes':
                writefile.write(data_rampCor,atr,outName_ramp)
  
        else: print 'Un-supported file type: '+ext;  Usage();  sys.exit(1)
Beispiel #33
0
def file_operation(fname, operator, operand, fname_out=None):
    '''Mathmathic operation of file'''

    # Basic Info
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    print 'input is '+k+' file: '+fname
    print 'operation: file %s %f' % (operator, operand)

    # default output filename
    if not fname_out:
        if   operator in ['+','plus',  'add',      'addition']:        suffix = 'plus'
        elif operator in ['-','minus', 'substract','substraction']:    suffix = 'minus'
        elif operator in ['*','times', 'multiply', 'multiplication']:  suffix = 'multiply'
        elif operator in ['/','obelus','divide',   'division']:        suffix = 'divide'
        elif operator in ['^','pow','power']:                          suffix = 'pow'
        ext = os.path.splitext(fname)[1]
        fname_out = os.path.splitext(fname)[0]+'_'+suffix+str(operand)+ext

    ##### Multiple Dataset HDF5 File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+fname_out

        if k == 'timeseries':
            print 'number of acquisitions: '+str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_out = data_operation(data, operator, operand)

                dset = group.create_dataset(date, data=data_out, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            for key,value in atr.iteritems():
                group.attrs[key] = value

        elif k in ['interferograms','wrapped','coherence']:
            print 'number of interferograms: '+str(epoch_num)
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_out = data_operation(data, operator, operand)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_out, compression='gzip')
                for key, value in h5[k][ifgram].attrs.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])

        h5.close()
        h5out.close()
        prog_bar.close()

    ##### Duo datasets non-HDF5 File
    elif k in ['.trans']:
        rg, az, atr = readfile.read(fname)
        rg_out = data_operation(rg, operator, operand)
        az_out = data_operation(az, operator, operand)
        print 'writing >>> '+fname_out
        writefile.write(rg_out, az_out, atr, fname_out)

    ##### Single Dataset File
    else:
        data, atr = readfile.read(fname)
        data_out = data_operation(data, operator, operand)
        print 'writing >>> '+fname_out
        writefile.write(data_out, atr, fname_out)

    return fname_out
Beispiel #34
0
def main(argv):

    try:
        file=argv[0]
        geomap=argv[1]
    except:
        Usage();sys.exit(1)
 
    ######################################################################################
    fileName=os.path.basename(file).split('.')[0]
    h5file=h5py.File(file,'r')
    atr = readfile.read_attributes(file)
    k = atr['FILE_TYPE']
    print '\n***************** Geocoding *******************'
    print 'input file: '+k
 
    #### Subsetted radar coded file
    try:
        x0 = float(atr['subset_x0'])
        y0 = float(atr['subset_y0'])
        print '\nSubsetted radar coded file:\n    creating temporary geomap file for it...'
        rg,az,rsc = readfile.read_float32(geomap)
        rg = rg - x0
        az = az - y0
        geomap = 'temp_'+geomap
        print '    writing '+geomap+'\n'
        writefile.write_float32(rg,az,geomap)
        fg = open(geomap+'.rsc','w')
        for kg in rsc.keys():    fg.write(kg+'    '+rsc[kg]+'\n')
        fg.close()
    except: pass


    ######################################################################################
    if k in ['timeseries']:
        outname='epoch_temp.unw'
 
        f = h5py.File('geo_'+file,'w')
        group = f.create_group('timeseries')
        epochList = h5file['timeseries'].keys()
        epochList = sorted(epochList)
        for epoch in epochList:
            print 'geocoding '+epoch
            data = h5file['timeseries'].get(epoch)[:]
 
            amp,unw,unwrsc = geocode_one(data,geomap,outname)
            dset = group.create_dataset(epoch, data=unw, compression='gzip')
 
        atr = geocode_attributes(atr,unwrsc)
        for key,value in atr.iteritems():
            group.attrs[key] = value

    ######################################################################################
    elif k in ['interferograms','coherence','wrapped']:
        if   k == 'interferograms': outname = k[0]+'_temp.unw'
        elif k == 'coherence'     : outname = k[0]+'_temp.cor'
        else:                       outname = k[0]+'_temp.int'
 
        f = h5py.File('geo_'+file,'w')
        gg = f.create_group('interferograms')
        igramList = h5file[k].keys()
        igramList = sorted(igramList)
        for igram in igramList:
            print 'geocoding '+igram
            data = h5file[k][igram].get(igram)[:]
 
            amp,unw,unwrsc = geocode_one(data,geomap,outname)
 
            group = gg.create_group('geo_'+igram)
            dset = group.create_dataset('geo_'+igram, data=unw, compression='gzip')
 
            atr = geocode_attributes(h5file[k][igram].attrs, unwrsc)
            for key,value in atr.iteritems():
                group.attrs[key] = value
 
        #######################  support of old format  #######################
        ### mask
        try:
            data = h5file['mask'].get('mask')[:]
            amp,unw,unwrsc = geocode_one(data,geomap,'mask_'+outname)
            gm = f.create_group('mask')
            dset = gm.create_dataset('mask', data=unw, compression='gzip')
        except:  print 'No group for mask found in the file.'
        ### meanCoherence
        try:
            data = h5file['meanCoherence'].get('meanCoherence')[:]
            amp,unw,unwrsc = geocode_one(data,geomap,'meanCoherence_'+outname)
            gm = f.create_group('meanCoherence')
            dset = gm.create_dataset('meanCoherence', data=unw, compression='gzip')
        except:  print 'No group for meanCoherence found in the file'

    ######################################################################################
    else:
        data,atr = readfile.read(file)
        outname=fileName+'.unw'
 
        amp,unw,unwrsc = geocode_one(data,geomap,outname)
        atr = geocode_attributes(atr,unwrsc)
 
        writefile.write(unw,atr,'geo_'+file)
 
 
    ######################################################################################
    try:
        atr['subset_x0']
        rmCmd='rm '+geomap;            os.system(rmCmd);       print rmCmd
        rmCmd='rm '+geomap+'.rsc';     os.system(rmCmd);       print rmCmd
    except: pass
 
    try:
        f.close()
        h5file.close()
    except: pass
Beispiel #35
0
def main(argv):

    ##### Default
    fontSize    = 12
    lineWidth   = 2
    markerColor = 'crimson'
    markerSize  = 16

    disp_fig  = 'no'
    save_fig  = 'yes'
    save_list = 'yes'

    ref_file  = 'reference_date.txt'
    drop_file = 'drop_date.txt'

    ##### Check Inputs
    if len(sys.argv)>3:
        try:
            opts, args = getopt.getopt(argv,'h:f:m:o:x:y:',['help','circle='])
        except getopt.GetoptError:
            print 'Error in reading input options!';  Usage() ; sys.exit(1)

        for opt,arg in opts:
            if opt in ("-h","--help"):    Usage() ; sys.exit()
            elif opt == '-f':  File      = arg
            elif opt == '-m':  maskFile  = arg
            elif opt == '-x':  xsub = [int(i) for i in arg.split(':')];  xsub.sort()
            elif opt == '-y':  ysub = [int(i) for i in arg.split(':')];  ysub.sort()
            elif opt == '--circle'   :  cir_par   = [i for i in arg.split(';')]
            #elif opt == '-o':  outName   = arg
            
    else:
        try:  File = argv[0]
        except: Usage(); sys.exit(1)
        try:  maskFile = argv[1]
        except: pass

    try:  atr  = readfile.read_attributes(File)
    except: Usage(); sys.exit(1)
    ext      = os.path.splitext(File)[1].lower()
    FileBase = os.path.basename(File).split(ext)[0]
    outNameBase = 'spatialMean_'+FileBase
    print '\n*************** Spatial Average ******************'

    ##### Input File Info
    k = atr['FILE_TYPE']
    print 'Input file is '+k
    width  = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])

    h5file = h5py.File(File)
    epochList = h5file[k].keys();
    epochList = sorted(epochList)
    epochNum  = len(epochList)
    print 'number of epoch: '+str(epochNum)
    dates,datevector = ptime.date_list2vector(epochList)

    ##### Mask Info
    try:
        Mask_orig,Matr = readfile.read(maskFile)
        print 'mask file: '+maskFile
        Masking = 'yes'
    except:
        print 'No mask. Use the whole area for ramp estimation.'
        Masking = 'no'
        Mask_orig=np.ones((length,width))
    Mask = np.zeros((length,width))
    Mask[:] = Mask_orig[:]

    ## Bounding Subset
    try:
        xsub
        ysub
        ysub,xsub = subset.check_subset_range(ysub,xsub,atr)
        Mask[ysub[0]:ysub[1],xsub[0]:xsub[1]] = Mask_orig[ysub[0]:ysub[1],xsub[0]:xsub[1]]*2
        #Mask[0:ysub[0],:]      = 0
        #Mask[ysub[1]:length,:] = 0
        #Mask[:,0:xsub[0]]      = 0
        #Mask[:,xsub[1]:width]  = 0
    except:
        Mask = Mask_orig*2
        print 'No subset input.'

    ## Circle Inputs
    try:
        cir_par
        for i in range(len(cir_par)):
            cir_idx = circle_index(atr,cir_par[i])
            Mask[cir_idx] = Mask_orig[cir_idx]
            print 'Circle '+str(i)+': '+cir_par[i]
    except: print 'No circle of interest input.'
    
    ## Mask output
    idx = Mask == 2
    idxNum = float(sum(sum(idx)))
    
    fig = plt.figure()
    plt.imshow(Mask,cmap='gray')
    plt.savefig(outNameBase+'_mask.png',bbox_inches='tight')
    print 'save mask to '+outNameBase+'_mask.png'
    #fig.clf()

    ##### Calculation
    meanList   = np.zeros(epochNum)
    pixPercent = np.zeros(epochNum)
    pixT = 0.7
    print 'calculating ...'
    print '  Date       Mean   Percentage'
    for i in range(epochNum):
        epoch = epochList[i]
        d      = h5file[k].get(epoch)[:]
        #d[Mask==0]  = np.nan
        
        meanList[i]   = np.nanmean(d[idx])
        pixPercent[i] = np.sum(d[idx] >= pixT)/idxNum
        
        print epoch+' :   %.2f    %.1f%%'%(meanList[i],pixPercent[i]*100)
    del d
    h5file.close()

    ##### Reference date - Max Value
    top3 = sorted(zip(meanList,epochList), reverse=True)[:3]
    print '------------ Top 3 Mean ------------------'
    print top3
    ## Write to txt file
    fref = open(ref_file,'w')
    fref.write(str(top3[0][1])+'\n')
    fref.close()
    print 'write optimal reference date to '+ref_file
    idxMean = meanList == np.nanmax(meanList)

    ##### Drop dates - mean threshold
    #meanT = 0.7
    #idxMean  = meanList < meanT
    #print '------------ Mean Value < '+str(meanT)+' --------'
    #print np.array(epochList)[idxMean]
    #print meanList[idxMean]

    ##### Drop dates - good pixel percentage
    pixNumT = 0.7
    print '------------ Good Pixel Percentage < %.0f%% -------'%(pixNumT*100)
    idxPix = pixPercent < pixNumT
    dropEpochList = np.array(epochList)[idxPix]
    print dropEpochList
    print pixPercent[idxPix]
    ## Write to txt file
    fdrop = open(drop_file,'w')
    for i in range(len(dropEpochList)):
        fdrop.write(str(dropEpochList[i])+'\n')
    fdrop.close()
    print 'write drop dates to '+drop_file
    print '-------------------------------------------'

    ##### Display
    fig = plt.figure(figsize=(12,12))
    ax  = fig.add_subplot(211)
    ax.plot(dates, meanList, '-ko', ms=markerSize, lw=lineWidth, alpha=0.7, mfc=markerColor)
    #ax.plot([dates[0],dates[-1]],[meanT,meanT], '--b', lw=lineWidth)
    #sc = ax.scatter(dates, np.tile(0.5,epochNum), c=meanList, s=22**2, alpha=0.3, vmin=0.0, vmax=1.0)
    #ax.scatter(np.array(dates)[idxMean], 0.5, c=meanList[idxMean], s=22**2, alpha=1.0, vmin=0.0, vmax=1.0)
    ax = ptime.adjust_xaxis_date(ax,datevector)
    ax.set_ylim(0,1)
    ax.set_title('Spatial Average Value', fontsize=fontSize)
    ax.set_xlabel('Time [years]',         fontsize=fontSize)
    #cbar = plt.colorbar(sc)
    #cbar.set_label('Spatial Mean of Normalized Sum Epochs')

    ax  = fig.add_subplot(212)
    ax.plot(dates, pixPercent, '-ko', ms=markerSize, lw=lineWidth, alpha=0.7, mfc=markerColor)
    ax.plot([dates[0],dates[-1]],[pixNumT,pixNumT], '--b', lw=lineWidth)
    ax = ptime.adjust_xaxis_date(ax,datevector)
    ax.set_ylim(0,1)
    ax.set_title('Percenrage of Pixels with Value > '+str(pixNumT), fontsize=fontSize)
    ax.set_xlabel('Time [years]',         fontsize=fontSize)
    vals = ax.get_yticks()
    ax.set_yticklabels(['{:3.0f}%'.format(i*100) for i in vals])

    if save_fig == 'yes':
        plt.savefig(outNameBase+'.png',bbox_inches='tight')
        print 'save figure to '+outNameBase+'.png'

    if disp_fig == 'yes':
        plt.show()

    ##### Output
    if save_list == 'yes':
        epochList6 = ptime.yymmdd(epochList)
        fl = open(outNameBase+'.txt','w')
        for i in range(epochNum):
            str_line = epochList6[i]+'    %.2f    %.2f\n'%(meanList[i],pixPercent[i])
            fl.write(str_line)
        fl.close()
        print 'write data to '+outNameBase+'.txt\n'
Beispiel #36
0
def main(argv):

    method = 'triangular_consistency'  ## or 'bonding_point'
    ramp_type = 'plane'
    save_rampCor = 'yes'
    plot_bonding_points = 'yes'

    ##### Check Inputs
    if len(sys.argv) > 2:
        try:
            opts, args = getopt.getopt(argv, 'h:f:m:x:y:o:t:',
                                       ['ramp=', 'no-ramp-save'])
        except getopt.GetoptError:
            print 'Error while getting args'
            usage()
            sys.exit(1)

        for opt, arg in opts:
            if opt in ['-h', '--help']:
                usage()
                sys.exit()
            elif opt in '-f':
                File = arg
            elif opt in '-m':
                maskFile = arg
            elif opt in '-o':
                outName = arg
            elif opt in '-x':
                x = [int(i) for i in arg.split(',')]
                method = 'bonding_point'
            elif opt in '-y':
                y = [int(i) for i in arg.split(',')]
                method = 'bonding_point'
            elif opt in '-t':
                templateFile = arg
            elif opt in '--ramp':
                ramp_type = arg.lower()
            elif opt in '--no-ramp-save':
                save_rampCor = 'no'

    elif len(sys.argv) == 2:
        if argv[0] in ['-h', '--help']:
            usage()
            sys.exit()
        elif os.path.isfile(argv[0]):
            File = argv[0]
            maskFile = argv[1]
        else:
            print 'Input file does not existed: ' + argv[0]
            sys.exit(1)

    else:
        usage()
        sys.exit(1)

    ##### Check template file
    try:
        templateFile
        templateContents = readfile.read_template(templateFile)
    except:
        pass

    try:
        yx = [
            int(i) for i in templateContents['pysar.unwrapError.yx'].split(',')
        ]
        x = yx[1::2]
        y = yx[0::2]
        method = 'bonding_point'
    except:
        pass

    ##### Read Mask File
    ## Priority:
    ## Input mask file > pysar.mask.file > existed Modified_Mask.h5 > existed Mask.h5
    try:
        maskFile
    except:
        try:
            maskFile = templateContents['pysar.mask.file']
        except:
            if os.path.isfile('Modified_Mask.h5'):
                maskFile = 'Modified_Mask.h5'
            elif os.path.isfile('Mask.h5'):
                maskFile = 'Mask.h5'
            else:
                print 'No mask found!'
                sys.exit(1)
    try:
        Mask, Matr = readfile.read(maskFile)
        print 'mask: ' + maskFile
    except:
        print 'Can not open mask file: ' + maskFile
        sys.exit(1)

    ##### Output file name
    ext = os.path.splitext(File)[1]
    try:
        outName
    except:
        outName = File.split('.')[0] + '_unwCor' + ext

    print '\n**************** Unwrapping Error Correction ******************'

    ####################  Triangular Consistency (Phase Closure)  ####################
    if method == 'triangular_consistency':
        print 'Phase unwrapping error correction using Triangular Consistency / Phase Closure'

        h5file = h5py.File(File)
        ifgramList = h5file['interferograms'].keys()
        sx = int(h5file['interferograms'][ifgramList[0]].attrs['WIDTH'])
        sy = int(h5file['interferograms'][ifgramList[0]].attrs['FILE_LENGTH'])
        curls, Triangles, C = ut.get_triangles(h5file)
        A, B = ut.design_matrix(h5file)
        ligram, lv = np.shape(B)
        lcurls = np.shape(curls)[0]
        print 'Number of all triangles: ' + str(lcurls)
        print 'Number of interferograms: ' + str(ligram)
        #print curls

        curlfile = 'curls.h5'
        if not os.path.isfile(curlfile):
            ut.generate_curls(curlfile, h5file, Triangles, curls)

        thr = 0.50
        curls = np.array(curls)
        n1 = curls[:, 0]
        n2 = curls[:, 1]
        n3 = curls[:, 2]

        numPixels = sy * sx
        print 'reading interferograms...'
        data = np.zeros((ligram, numPixels), np.float32)
        for ni in range(ligram):
            dset = h5file['interferograms'][ifgramList[ni]].get(ifgramList[ni])
            d = dset[0:dset.shape[0], 0:dset.shape[1]]
            data[ni] = d.flatten(1)

        print np.shape(data)
        print 'reading curls ...'
        h5curl = h5py.File(curlfile)
        curlList = h5curl['interferograms'].keys()
        curlData = np.zeros((lcurls, numPixels), np.float32)
        for ni in range(lcurls):
            dset = h5curl['interferograms'][curlList[ni]].get(curlList[ni])
            d = dset[0:dset.shape[0], 0:dset.shape[1]]
            curlData[ni] = d.flatten(1)
        pi = np.pi
        EstUnwrap = np.zeros((ligram, numPixels), np.float32)

        #try:
        #    maskFile=argv[1]
        #    h5Mask=h5py.File(maskFile)
        #    dset = h5Mask['mask'].get('mask')
        #    Mask=dset[0:dset.shape[0],0:dset.shape[1]]
        #except:
        #    dset = h5file['mask'].get('mask')
        #    Mask=dset[0:dset.shape[0],0:dset.shape[1]]

        Mask = Mask.flatten(1)

        for ni in range(numPixels):
            #dU = np.zeros([ligram,1])
            #print np.shape(dU)
            #print np.shape(data[:,ni])

            if Mask[ni] == 1:
                dU = data[:, ni]
                #nan_ndx = dataPoint == 0.
                unwCurl = np.array(curlData[:, ni])
                #print unwCurl

                ind = np.abs(unwCurl) >= thr
                N1 = n1[ind]
                N2 = n2[ind]
                N3 = n3[ind]
                indC = np.abs(unwCurl) < thr
                Nc1 = n1[indC]
                Nc2 = n2[indC]
                Nc3 = n3[indC]

                N = np.hstack([N1, N2, N3])
                UniN = np.unique(N)
                Nc = np.hstack([Nc1, Nc2, Nc3])
                UniNc = np.unique(Nc)

                inter = list(set(UniNc) & set(UniN))  # intersetion
                UniNc = list(UniNc)
                for x in inter:
                    UniNc.remove(x)

                D = np.zeros([len(UniNc), ligram])
                for i in range(len(UniNc)):
                    D[i, UniNc[i]] = 1

                AAA = np.vstack([-2 * pi * C, D])
                #AAA1=np.hstack([AAA,np.zeros([AAA.shape[0],lv])])
                #AAA2=np.hstack([-2*pi*np.eye(ligram),B])
                #AAAA=np.vstack([AAA1,AAA2])
                AAAA = np.vstack([AAA, 0.25 * np.eye(ligram)])

                #print '************************'
                #print np.linalg.matrix_rank(C)
                #print np.linalg.matrix_rank(AAA)
                #print np.linalg.matrix_rank(AAAA)
                #print '************************'

                #LLL=list(np.dot(C,dU)) + list(np.zeros(np.shape(UniNc)[0]))# + list(dU)
                #ind=np.isnan(AAA)
                #M1=pinv(AAA)
                #M=np.dot(M1,LLL)
                #EstUnwrap[:,ni]=np.round(M[0:ligram])*2.0*np.pi

                ##########
                # with Tikhonov regularization:
                AAAA = np.vstack([AAA, 0.25 * np.eye(ligram)])
                LLL = list(np.dot(C, dU)) + list(np.zeros(
                    np.shape(UniNc)[0])) + list(np.zeros(ligram))
                ind = np.isnan(AAAA)
                M1 = pinv(AAAA)
                M = np.dot(M1, LLL)
                EstUnwrap[:, ni] = np.round(M[0:ligram]) * 2.0 * np.pi
                #print M[0:ligram]
                #print np.round(M[0:ligram])

            else:
                EstUnwrap[:, ni] = np.zeros([ligram])
                if not np.remainder(ni, 10000):
                    print 'Processing point: %7d of %7d ' % (ni, numPixels)

        ##### Output
        dataCor = data + EstUnwrap
        unwCorFile = File.replace('.h5', '') + '_unwCor.h5'
        print 'writing >>> ' + unwCorFile
        h5unwCor = h5py.File(unwCorFile, 'w')
        gg = h5unwCor.create_group('interferograms')
        for i in range(ligram):
            group = gg.create_group(ifgramList[i])
            dset = group.create_dataset(ifgramList[i],
                                        data=np.reshape(
                                            dataCor[i, :], [sx, sy]).T,
                                        compression='gzip')
            for key, value in h5file['interferograms'][
                    ifgramList[i]].attrs.iteritems():
                group.attrs[key] = value

        try:
            MASK = h5file['mask'].get('mask')
            gm = h5unwCor.create_group('mask')
            dset = gm.create_dataset('mask', data=MASK, compression='gzip')
        except:
            pass

        h5unwCor.close()
        h5file.close()
        h5curl.close()

    ####################  Bonding Points (Spatial Continuity)  ####################
    elif method == 'bonding_point':
        print 'Phase unwrapping error correction using Bonding Points / Spatial Continuity'

        ##### Read Bridge Points Info
        try:
            x
            y
            if len(x) != len(y) or np.mod(len(x), 2) != 0:
                print 'Wrong number of bridge points input: ' + str(
                    len(x)) + ' for x, ' + str(len(y)) + ' for y'
                usage()
                sys.exit(1)
        except:
            print 'Error in reading bridge points info!'
            usage()
            sys.exit(1)
        for i in range(0, len(x)):
            if Mask[y[i], x[i]] == 0:
                print '\nERROR: Connecting point (' + str(y[i]) + ',' + str(
                    x[i]) + ') is out of masked area! Select them again!\n'
                sys.exit(1)

        print 'Number of bonding point pairs: ' + str(len(x) / 2)
        print 'Bonding points coordinates:\nx: ' + str(x) + '\ny: ' + str(y)

        ## Plot Connecting Pair of Points
        if plot_bonding_points == 'yes':
            point_yx = ''
            line_yx = ''
            n_bridge = len(x) / 2
            for i in range(n_bridge):
                pair_yx = str(y[2 * i]) + ',' + str(x[2 * i]) + ',' + str(
                    y[2 * i + 1]) + ',' + str(x[2 * i + 1])
                if not i == n_bridge - 1:
                    point_yx += pair_yx + ','
                    line_yx += pair_yx + ';'
                else:
                    point_yx += pair_yx
                    line_yx += pair_yx

            try:
                plot_cmd = 'view.py --point-yx="'+point_yx+'" --line-yx="'+line_yx+\
                           '" --nodisplay -o bonding_points.png -f '+maskFile
                print plot_cmd
                os.system(plot_cmd)
            except:
                pass

        ##### Ramp Info
        ramp_mask = Mask == 1
        print 'estimate phase ramp during the correction'
        print 'ramp type: ' + ramp_type
        if save_rampCor == 'yes':
            outName_ramp = os.path.basename(outName).split(
                ext)[0] + '_' + ramp_type + ext

        ########## PySAR ##########
        if ext == '.h5':
            ##### Read
            try:
                h5file = h5py.File(File, 'r')
            except:
                print 'ERROR: Cannot open input file: ' + File
                sys.exit(1)
            k = h5file.keys()
            if 'interferograms' in k:
                k[0] = 'interferograms'
                print 'Input file is ' + k[0]
            else:
                print 'Input file - ' + File + ' - is not interferograms.'
                usage()
                sys.exit(1)
            igramList = sorted(h5file[k[0]].keys())

            #### Write
            h5out = h5py.File(outName, 'w')
            gg = h5out.create_group(k[0])
            print 'writing >>> ' + outName

            if save_rampCor == 'yes':
                h5out_ramp = h5py.File(outName_ramp, 'w')
                gg_ramp = h5out_ramp.create_group(k[0])
                print 'writing >>> ' + outName_ramp

            ##### Loop
            print 'Number of interferograms: ' + str(len(igramList))
            for igram in igramList:
                print igram
                data = h5file[k[0]][igram].get(igram)[:]

                data_ramp, ramp = rm.remove_data_surface(
                    data, ramp_mask, ramp_type)
                #ramp = data_ramp - data
                data_rampCor = phase_bonding(data_ramp, Mask, x, y)
                dataCor = data_rampCor - ramp

                group = gg.create_group(igram)
                dset = group.create_dataset(igram,
                                            data=dataCor,
                                            compression='gzip')
                for key, value in h5file[k[0]][igram].attrs.iteritems():
                    group.attrs[key] = value

                if save_rampCor == 'yes':
                    group_ramp = gg_ramp.create_group(igram)
                    dset = group_ramp.create_dataset(igram,
                                                     data=data_rampCor,
                                                     compression='gzip')
                    for key, value in h5file[k[0]][igram].attrs.iteritems():
                        group_ramp.attrs[key] = value

            try:
                mask = h5file['mask'].get('mask')
                gm = h5out.create_group('mask')
                dset = gm.create_dataset('mask',
                                         data=mask[0:mask.shape[0],
                                                   0:mask.shape[1]],
                                         compression='gzip')
            except:
                print 'no mask group found.'

            h5file.close()
            h5out.close()
            if save_rampCor == 'yes':
                h5out_ramp.close()

        ########## ROI_PAC ##########
        elif ext == '.unw':
            print 'Input file is ' + ext
            a, data, atr = readfile.read_float32(File)

            data_ramp, ramp = rm.remove_data_surface(data, ramp_mask,
                                                     ramp_type)
            #ramp = data_ramp - data
            data_rampCor = phase_bonding(data_ramp, Mask, x, y)
            dataCor = data_rampCor - ramp

            writefile.write(dataCor, atr, outName)
            if save_rampCor == 'yes':
                writefile.write(data_rampCor, atr, outName_ramp)

        else:
            print 'Un-supported file type: ' + ext
            usage()
            sys.exit(1)
def main(argv):

    ##### Default Values
    save_plot = 'no'
    maskThr  = 0.7

    ##### Check Inputs
    try:  opts, args = getopt.getopt(argv,"f:d:p:m:M:t:o:",['plot'])
    except getopt.GetoptError:  Usage() ; sys.exit(1)

    for opt,arg in opts:
        if   opt == '-f':        timeSeriesFile = arg
        elif opt == '-d':        demFile        = arg
        elif opt == '-p':        p              = int(arg)
        elif opt == '-m':        maskFile       = arg
        elif opt == '-M':        maskThr        = float(arg)
        elif opt == '-t':        corThr         = float(arg)
        elif opt == '-o':        outName        = arg
        elif opt == '--plot':    save_plot      = 'yes'

    try:
        timeSeriesFile
        demFile
    except:
        Usage() ; sys.exit(1)
    
    try:       p
    except:    p=1
    
    try:    outName
    except: outName = timeSeriesFile.split('.')[0]+'_tropHgt.h5'

    ##### Read Mask File 
    ## Priority:
    ## Input mask file > pysar.mask.file > existed Modified_Mask.h5 > existed Mask.h5
    try:       maskFile
    except:
        try:    maskFile = templateContents['pysar.mask.file']
        except:
            if   os.path.isfile('Modified_Mask.h5'):  maskFile = 'Modified_Mask.h5'
            elif os.path.isfile('Mask.h5'):           maskFile = 'Mask.h5'
            else: print 'No mask found!'; sys.exit(1)
    try:    Mask,Matr = readfile.read(maskFile);   print 'mask: '+maskFile
    except: print 'Can not open mask file: '+maskFile; sys.exit(1)

    #try:       maskFile
    #except:    maskFile='Mask.h5'
    #print 'Mask file: ' + maskFile 

    #h5Mask=h5py.File(maskFile)
    #kMask=h5Mask.keys()
    #dset = h5Mask[kMask[0]].get(kMask[0])
    #Mask = dset[0:dset.shape[0],0:dset.shape[1]]
    kMask = Matr['FILE_TYPE']
    Mask=Mask.flatten(1)

    #print maskThr

    if   kMask=='mask':                 ndx = Mask != 0
    elif kMask=='temporal_coherence':   ndx = Mask >  maskThr
    else:  print 'Mask file not recognized!';  Usage();  sys.exit(1)    

    #h5Mask.close()

    print '\n************ Tropospheric Delay Correction - Topo-related *************'

    ###################################################
    h5timeseries = h5py.File(timeSeriesFile)
    yref=h5timeseries['timeseries'].attrs['ref_y']
    xref=h5timeseries['timeseries'].attrs['ref_x']
    ###################################################
    dem,demRsc = readfile.read(demFile)
    dem -= dem[yref,xref]

    print 'considering the look angle of each resolution cell...'
    near_LA=float(h5timeseries['timeseries'].attrs['LOOK_REF1'])
    far_LA=float(h5timeseries['timeseries'].attrs['LOOK_REF2'])
    Length,Width=np.shape(dem)
    LA=np.linspace(near_LA,far_LA,Width)
    LA=np.tile(LA,[Length,1])
    dem=dem/np.cos(LA*np.pi/180.0)       
       
    dem=dem.flatten(1)
    print np.shape(dem)
    ###################################################
    if p==1:
        A=np.vstack((dem[ndx],np.ones(len(dem[ndx])))).T
        B = np.vstack((dem,np.ones(len(dem)))).T
    elif p==2: 
        A=np.vstack((dem[ndx]**2,dem[ndx],np.ones(len(dem[ndx])))).T
        B = np.vstack((dem**2,dem,np.ones(len(dem)))).T  
    elif p==3:
        A = np.vstack((dem[ndx]**3,dem[ndx]**2,dem[ndx],np.ones(len(dem[ndx])))).T
        B = np.vstack((dem**3,dem**2,dem,np.ones(len(dem)))).T
    print np.shape(A)

    Ainv=np.linalg.pinv(A)
    ###################################################
    print 'Estimating the tropospheric effect using the differences of the subsequent epochs and DEM'
    
    dateList = h5timeseries['timeseries'].keys()
    dateList = sorted(dateList)
    nrows,ncols=np.shape(h5timeseries['timeseries'].get(dateList[0]))
    PAR_EPOCH_DICT_2={} 
    par_diff_Dict={}
    Correlation_Dict={}
    Correlation_Dict[dateList[0]]=0
    Correlation_diff_Dict={}

    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    print 'correlation of dem with :'
    print '******************************'

    for i in range(len(dateList)-1):
        dset1 = h5timeseries['timeseries'].get(dateList[i])
        dset2 = h5timeseries['timeseries'].get(dateList[i+1])
        data1 = dset1[0:dset1.shape[0],0:dset1.shape[1]]
        data2 = dset2[0:dset2.shape[0],0:dset2.shape[1]]
        d = dset2[0:dset2.shape[0],0:dset2.shape[1]] - dset1[0:dset1.shape[0],0:dset1.shape[1]]
         
        del dset1
        del dset2
        d=d.flatten(1)
        data1=data1.flatten(1)
        data2=data2.flatten(1)
        ##############################
        C1=np.zeros([2,len(dem[ndx])])
        C1[0][:]=dem[ndx]
        C1[1][:]=data1[ndx]
        print dateList[i]+': '+str(np.corrcoef(C1)[0][1])
 
        C2=np.zeros([2,len(dem[ndx])])
        C2[0][:]=dem[ndx]
        C2[1][:]=data2[ndx]
        print dateList[i+1]+': '+str(np.corrcoef(C2)[0][1])
        Correlation_Dict[dateList[i+1]]=np.corrcoef(C2)[0][1]
 
        C=np.zeros([2,len(dem[ndx])])
        C[0][:]=dem[ndx]
        C[1][:]=d[ndx]
        print dateList[i]+'-'+dateList[i+1]+': '+str(np.corrcoef(C)[0][1])
        print '******************************'
        Correlation_diff_Dict[dateList[i]+'-'+dateList[i+1]]=np.corrcoef(C)[0][1]

        ##############################
        par=np.dot(Ainv,d[ndx])
        par_diff_Dict[dateList[i]+'-'+dateList[i+1]]=par  
 
        try:
            if np.abs(np.corrcoef(C2)[0][1]) >= corThr:        
                PAR2=np.dot(Ainv,data2[ndx])
            else:
                PAR2=list(np.zeros(p+1))
        except:
             PAR2=np.dot(Ainv,data2[ndx])
  
        PAR_EPOCH_DICT_2[dateList[i+1]]=PAR2
    ###################################################
    print'****************************************'
    print 'Correlation of DEM with each time-series epoch:'
    average_phase_height_cor=0
    for date in dateList:
        print date + ' : '+str(Correlation_Dict[date])
        average_phase_height_cor=average_phase_height_cor+np.abs(Correlation_Dict[date])
    print'****************************************'
    print'****************************************'
    print ''
    print 'Average Correlation of DEM with time-series epochs: ' + str(average_phase_height_cor/(len(dateList)-1))
    print ''
    print '****************************************'
    print'****************************************'

    ###################################################
    if save_plot == 'yes':
        fig=plt.figure(1)
        ax = fig.add_subplot(3,1,1)
        ax.plot(dem[ndx],data1[ndx],'o',ms=1)
        ax = fig.add_subplot(3,1,2)
        ax.plot(dem[ndx],data2[ndx],'o',ms=1)
        ax = fig.add_subplot(3,1,3)
        ax.plot(dem[ndx],d[ndx],'o',ms=1)
        plt.show()

    ###################################################
    # print par_diff_Dict
    par_epoch_Dict={}
    par_epoch_Dict[dateList[1]]=par_diff_Dict[dateList[0]+'-'+dateList[1]]

    for i in range(2,len(dateList)):
        par_epoch_Dict[dateList[i]]=par_epoch_Dict[dateList[i-1]]+par_diff_Dict[dateList[i-1]+'-'+dateList[i]]

    yref=h5timeseries['timeseries'].attrs['ref_y']
    xref=h5timeseries['timeseries'].attrs['ref_x']
    print 'removing the tropospheric delay from each epoch'
    print 'writing >>> '+outName
    h5tropCor = h5py.File(outName,'w')
    group = h5tropCor.create_group('timeseries')
    dset = group.create_dataset(dateList[0], data=h5timeseries['timeseries'].get(dateList[0]), compression='gzip')
    for date in dateList:
        if not date in h5tropCor['timeseries']:
            print date
            dset = h5timeseries['timeseries'].get(date) 
            data = dset[0:dset.shape[0],0:dset.shape[1]]
            par=PAR_EPOCH_DICT_2[date]
   
            tropo_effect = np.reshape(np.dot(B,par),[dset.shape[1],dset.shape[0]]).T
            tropo_effect -= tropo_effect[yref,xref]
            dset = group.create_dataset(date, data=data-tropo_effect, compression='gzip')

    for key,value in h5timeseries['timeseries'].attrs.iteritems():
        group.attrs[key] = value
   
    try: 
        dset1 = h5timeseries['mask'].get('mask')
        group=h5tropCor.create_group('mask')
        dset = group.create_dataset('mask', data=dset1, compression='gzip')
    except: pass

    h5tropCor.close()
    h5timeseries.close()
Beispiel #38
0
def load_multi_group_hdf5(fileType,
                          fileList,
                          outfile='unwrapIfgram.h5',
                          exDict=dict()):
    '''Load multiple ROI_PAC files into HDF5 file (Multi-group, one dataset and one attribute dict per group).
    Inputs:
        fileType : string, i.e. interferograms, coherence, snaphu_connect_component, etc.
        fileList : list of path, ROI_PAC .unw/.cor/.int/.byt file
        outfile : string, file name/path of the multi-group hdf5 PySAR file
        exDict : dict, extra attribute dictionary 
    Outputs:
        outfile : output hdf5 file name
        fileList : list of string, files newly added
    '''
    ext = os.path.splitext(fileList[0])[1]
    print 'loading ' + ext + ' files into ' + fileType + ' HDF5 file ...'
    print 'number of ' + ext + ' input: ' + str(len(fileList))

    # Check width/length mode of input files
    fileList, mode_width, mode_length = check_file_size(fileList)
    if not fileList:
        return None, None

    # Check conflict with existing hdf5 file
    fileList2 = check_existed_hdf5_file(fileList, outfile)

    # Open(Create) HDF5 file with r+/w mode based on fileList2
    if fileList2 == fileList:
        # Create and open new hdf5 file with w mode
        print 'number of ' + ext + ' to add: ' + str(len(fileList))
        print 'open ' + outfile + ' with w mode'
        h5file = h5py.File(outfile, 'w')
    elif fileList2:
        # Open existed hdf5 file with r+ mode
        print 'Continue by adding the following new epochs ...'
        print 'number of ' + ext + ' to add: ' + str(len(fileList2))
        print 'open ' + outfile + ' with r+ mode'
        h5file = h5py.File(outfile, 'r+')
        fileList = list(fileList2)
    else:
        print 'All input ' + ext + ' are included, no need to re-load.'
        fileList = None

    # Loop - Writing ROI_PAC files into hdf5 file
    if fileList:
        # Unwraped Interferograms
        if not fileType in h5file.keys():
            gg = h5file.create_group(fileType)  # new hdf5 file
        else:
            gg = h5file[fileType]  # existing hdf5 file

        for file in fileList:
            # Read data and attributes
            print 'Adding ' + file
            data, atr = readfile.read(file)

            # PySAR attributes
            atr['drop_ifgram'] = 'no'
            try:
                atr['PROJECT_NAME'] = exDict['project_name']
            except:
                atr['PROJECT_NAME'] = 'PYSAR'
            key = 'INSAR_PROCESSOR'
            if key not in atr.keys():
                try:
                    atr[key] = exDict['insarProcessor']
                except:
                    pass
            key = 'PLATFORM'
            if ((key not in atr.keys() or not any(re.search(i, atr[key].lower()) for i in sensorList))\
                and exDict['PLATFORM']):
                atr[key] = exDict['PLATFORM']

            # Write dataset
            group = gg.create_group(os.path.basename(file))
            dset = group.create_dataset(os.path.basename(file),
                                        data=data,
                                        compression='gzip')

            # Write attributes
            for key, value in atr.iteritems():
                group.attrs[key] = str(value)

        # End of Loop
        h5file.close()
        print 'finished writing to ' + outfile

    return outfile, fileList
Beispiel #39
0
def main(argv):
    inps = cmdLineParse()

    #print '\n**************** Output to UNAVCO **************'
    ##### Prepare Metadata
    pysar_meta_dict = readfile.read_attribute(inps.timeseries)
    k = pysar_meta_dict['FILE_TYPE']
    h5_timeseries = h5py.File(inps.timeseries, 'r')
    dateList = sorted(h5_timeseries[k].keys())
    unavco_meta_dict = metadata_pysar2unavco(pysar_meta_dict, dateList)
    print '## UNAVCO Metadata:'
    print '-----------------------------------------'
    info.print_attributes(unavco_meta_dict)

    meta_dict = pysar_meta_dict.copy()
    meta_dict.update(unavco_meta_dict)

    #### Open HDF5 File
    SAT = meta_dict['mission']
    SW = meta_dict[
        'beam_mode']  # should be like FB08 for ALOS, need to find out, Yunjun, 2016-12-26
    RELORB = "%03d" % (int(meta_dict['relative_orbit']))
    FRAME = "%04d" % (int(meta_dict['frame']))
    DATE1 = dt.strptime(meta_dict['first_date'], '%Y-%m-%d').strftime('%Y%m%d')
    DATE2 = dt.strptime(meta_dict['last_date'], '%Y-%m-%d').strftime('%Y%m%d')
    TBASE = "%04d" % (0)
    BPERP = "%05d" % (0)
    outName = SAT + '_' + SW + '_' + RELORB + '_' + FRAME + '_' + DATE1 + '-' + DATE2 + '_' + TBASE + '_' + BPERP + '.he5'

    print '-----------------------------------------'
    print 'writing >>> ' + outName
    f = h5py.File(outName, 'w')
    hdfeos = f.create_group('HDFEOS')
    if 'Y_FIRST' in meta_dict.keys():
        gg_coord = hdfeos.create_group('GRIDS')
    else:
        gg_coord = hdfeos.create_group('SWATHS')
    group = gg_coord.create_group('timeseries')

    ##### Write Attributes to the HDF File
    print 'write metadata to ' + str(f)
    for key, value in meta_dict.iteritems():
        f.attrs[key] = value

    print 'write data to ' + str(group)
    ##### Write Time Series Data
    print 'reading file: ' + inps.timeseries
    print 'number of acquisitions: %d' % len(dateList)
    for date in dateList:
        print date
        data = h5_timeseries[k].get(date)[:, :]
        dset = group.create_dataset(date, data=data, compression='gzip')
        dset.attrs['Title'] = 'Time series displacement'
        dset.attrs['MissingValue'] = FLOAT_ZERO
        dset.attrs['Units'] = 'meters'
        dset.attrs['_FillValue'] = FLOAT_ZERO

    ##### Write Incidence_Angle
    if os.path.isfile(inps.incidence_angle):
        print 'reading file: ' + inps.incidence_angle
        inc_angle, inc_angle_meta = readfile.read(inps.incidence_angle)
        dset = group.create_dataset('incidence_angle',
                                    data=inc_angle,
                                    compression='gzip')
        dset.attrs['Title'] = 'Incidence angle'
        dset.attrs['MissingValue'] = FLOAT_ZERO
        dset.attrs['Units'] = 'degrees'
        dset.attrs['_FillValue'] = FLOAT_ZERO

    ##### Write DEM
    if os.path.isfile(inps.dem):
        print 'reading file: ' + inps.dem
        dem, dem_meta = readfile.read(inps.dem)
        dset = group.create_dataset('dem', data=dem, compression='gzip')
        dset.attrs['Title'] = 'Digital elevatino model'
        dset.attrs['MissingValue'] = INT_ZERO
        dset.attrs['Units'] = 'meters'
        dset.attrs['_FillValue'] = INT_ZERO

    ##### Write Coherence
    if os.path.isfile(inps.coherence):
        print 'reading file: ' + inps.coherence
        coherence, coherence_meta = readfile.read(inps.coherence)
        dset = group.create_dataset('coherence',
                                    data=coherence,
                                    compression='gzip')
        dset.attrs['Title'] = 'Temporal Coherence'
        dset.attrs['MissingValue'] = FLOAT_ZERO
        dset.attrs['Units'] = 'None'
        dset.attrs['_FillValue'] = FLOAT_ZERO

    ##### Write Mask
    if os.path.isfile(inps.mask):
        print 'reading file: ' + inps.mask
        mask, mask_meta = readfile.read(inps.mask)
        dset = group.create_dataset('mask', data=mask, compression='gzip')
        dset.attrs['Title'] = 'Mask'
        dset.attrs['MissingValue'] = INT_ZERO
        dset.attrs['Units'] = 'None'
        dset.attrs['_FillValue'] = INT_ZERO

    f.close()
    print 'Done.'
    return
Beispiel #40
0
    if inps.geomap_file: print 'Transform     file: ' + str(inps.geomap_file)
    else:
        print '\nWARNING: No transform file found! Cannot geocoding without it.\n'

    #########################################
    # Check the subset (Optional)
    #########################################
    print '\n*************** Subset ****************'
    print "Get tight subset of geomap*.trans file and/or DEM file in geo coord"
    print '--------------------------------------------'
    if inps.geomap_file:
        outName = os.path.splitext(
            inps.geomap_file)[0] + '_tight' + os.path.splitext(
                inps.geomap_file)[1]
        # Get bounding box of non-zero area in geomap*.trans file
        trans_rg, trans_atr = readfile.read(inps.geomap_file, (), 'range')
        idx_row, idx_col = np.nonzero(trans_rg)
        pix_box = (np.min(idx_col) - 10, np.min(idx_row) - 10,
                   np.max(idx_col) + 10, np.max(idx_row) + 10)
        inps = subset.subset_box2inps(inps, pix_box, None)
        inps.geomap_file = check_subset_file(inps.geomap_file, vars(inps),
                                             outName)

        # Subset DEM in geo coord
        outName = os.path.splitext(
            inps.dem_geo_file)[0] + '_tight' + os.path.splitext(
                inps.dem_geo_file)[1]
        geomap_atr = readfile.read_attribute(inps.geomap_file)
        pix_box, geo_box = subset.get_coverage_box(geomap_atr)
        inps = subset.subset_box2inps(inps, pix_box, geo_box)
        inps.dem_geo_file = check_subset_file(inps.dem_geo_file,
Beispiel #41
0
def unwrap_error_correction_bridging(ifgram_file, mask_file, y_list, x_list, ramp_type='plane',\
                                     ifgram_cor_file=None, save_cor_deramp_file=False):
    '''Unwrapping error correction with bridging.
    Inputs:
        ifgram_file : string, name/path of interferogram(s) to be corrected
        mask_file   : string, name/path of mask file to mark different patches 
        y/x_list    : list of int, bonding points in y/x 
        ifgram_cor_file : string, optional, output file name
        save_cor_deramp_file : bool, optional
    Output:
        ifgram_cor_file
    Example:
        y_list = [235, 270, 350, 390]
        x_list = [880, 890, 1200, 1270]
        unwrap_error_correction_bridging('unwrapIfgram.h5', 'mask_all.h5', y_list, x_list, 'quadratic')
    '''
    ##### Mask and Ramp
    mask = readfile.read(mask_file)[0]
    ramp_mask = mask == 1
    print 'estimate phase ramp during the correction'
    print 'ramp type: ' + ramp_type

    ##### Bridge Info
    # Check
    for i in range(len(x_list)):
        if mask[y_list[i], x_list[i]] == 0:
            print '\nERROR: Connecting point (%d,%d) is out of masked area! Select them again!\n' % (
                y_list[i], x_list[i])
            sys.exit(1)
    print 'Number of bridges: ' + str(len(x_list) / 2)
    print 'Bonding points coordinates:\nx: ' + str(x_list) + '\ny: ' + str(
        y_list)

    # Plot Connecting Pair of Points
    plot_bonding_points = False
    if plot_bonding_points:
        point_yx = ''
        line_yx = ''
        n_bridge = len(x) / 2
        for i in range(n_bridge):
            pair_yx = str(y[2 * i]) + ',' + str(x[2 * i]) + ',' + str(
                y[2 * i + 1]) + ',' + str(x[2 * i + 1])
            if not i == n_bridge - 1:
                point_yx += pair_yx + ','
                line_yx += pair_yx + ';'
            else:
                point_yx += pair_yx
                line_yx += pair_yx

        try:
            plot_cmd = 'view.py --point-yx="'+point_yx+'" --line-yx="'+line_yx+\
                       '" --nodisplay -o bonding_points.png -f '+maskFile
            print plot_cmd
            os.system(plot_cmd)
        except:
            pass

    # Basic info
    ext = os.path.splitext(ifgram_file)[1]
    atr = readfile.read_attribute(ifgram_file)
    k = atr['FILE_TYPE']

    try:
        ref_y = int(atr['ref_y'])
        ref_x = int(atr['ref_x'])
        print 'reference pixel in y/x: %d/%d' % (ref_y, ref_x)
    except:
        sys.exit(
            'ERROR: Can not find ref_y/x value, input file is not referenced in space!'
        )

    # output file name
    if not ifgram_cor_file:
        ifgram_cor_file = os.path.splitext(ifgram_file)[0] + '_unwCor' + ext
    ifgram_cor_deramp_file = os.path.splitext(
        ifgram_cor_file)[0] + '_' + ramp_type + ext

    ##### HDF5 file
    if ext == '.h5':
        ##### Read
        h5 = h5py.File(ifgram_file, 'r')
        ifgram_list = sorted(h5[k].keys())
        ifgram_num = len(ifgram_list)

        h5out = h5py.File(ifgram_cor_file, 'w')
        group = h5out.create_group(k)
        print 'writing >>> ' + ifgram_cor_file

        if save_cor_deramp_file:
            h5out_deramp = h5py.File(ifgram_cor_deramp_file, 'w')
            group_deramp = h5out_deramp.create_group(k)
            print 'writing >>> ' + ifgram_cor_deramp_file

        ##### Loop
        print 'Number of interferograms: ' + str(ifgram_num)
        prog_bar = ptime.progress_bar(maxValue=ifgram_num)
        date12_list = ptime.list_ifgram2date12(ifgram_list)
        for i in range(ifgram_num):
            ifgram = ifgram_list[i]
            data = h5[k][ifgram].get(ifgram)[:]
            data -= data[ref_y, ref_x]

            data_deramp, ramp = rm.remove_data_surface(data, ramp_mask,
                                                       ramp_type)
            data_derampCor = bridging_data(data_deramp, mask, x_list, y_list)

            ramp[data == 0.] = 0.
            gg = group.create_group(ifgram)
            dset = gg.create_dataset(ifgram,
                                     data=data_derampCor + ramp,
                                     compression='gzip')
            for key, value in h5[k][ifgram].attrs.iteritems():
                gg.attrs[key] = value

            if save_cor_deramp_file:
                gg_deramp = group_deramp.create_group(ifgram)
                dset = gg_deramp.create_dataset(ifgram,
                                                data=data_derampCor,
                                                compression='gzip')
                for key, value in h5[k][ifgram].attrs.iteritems():
                    gg_deramp.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

        prog_bar.close()
        h5.close()
        h5out.close()
        try:
            h5out_deramp.close()
        except:
            pass

    #### .unw file
    elif ext == '.unw':
        print 'read ' + ifgram_file
        data = readfile.read(ifgram_file)[0]
        data -= data[ref_y, ref_x]

        data_deramp, ramp = rm.remove_data_surface(data, ramp_mask, ramp_type)
        data_derampCor = bridging_data(data_deramp, mask, x_list, y_list)

        print 'writing >>> ' + ifgram_cor_file
        ramp[data == 0.] = 0.
        ifgram_cor_file = writefile.write(data_derampCor + ramp, atr,
                                          ifgram_cor_file)
        if save_cor_deramp_file:
            print 'writing >>> ' + ifgram_cor_deramp_file
            ifgram_cor_deramp_file = writefile.write(data_derampCor, atr,
                                                     ifgram_cor_deramp_file)

    else:
        sys.exit('Un-supported file type: ' + ext)

    return ifgram_cor_file, ifgram_cor_deramp_file
Beispiel #42
0
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)

    #print '\n**************** Subset *********************'
    atr = readfile.read_attribute(inps.file[0])

    ##### Convert All Inputs into subset_y/x/lat/lon
    # Input Priority: subset_y/x/lat/lon > reference > template > footprint
    if not inps.subset_x and not inps.subset_y and not inps.subset_lat and not inps.subset_lon:
        # 1. Read subset info from Reference File
        if inps.reference:
            ref_atr = readfile.read_attribute(inps.reference)
            pix_box, geo_box = get_coverage_box(ref_atr)
            print 'using subset info from ' + inps.reference

        # 2. Read subset info from template options
        elif inps.template_file:
            pix_box, geo_box = read_subset_template2box(inps.template_file)
            print 'using subset info from ' + inps.template_file

        # 3. Use subset from footprint info
        elif inps.footprint:
            if atr['FILE_TYPE'] == '.trans':
                # Non-zero area in geomap_*.trans file, accurate
                trans_rg, trans_atr = readfile.read(inps.file[0], (), 'range')
                idx_row, idx_col = np.nonzero(trans_rg)
                pix_box = (np.min(idx_col) - 10, np.min(idx_row) - 10,
                           np.max(idx_col) + 10, np.max(idx_row) + 10)
                geo_box = box_pixel2geo(pix_box, trans_atr)
            else:
                print 'ERROR: --footprint option only works for geomap_*.trans file.\n'
                inps.footprint = False
                sys.exit(1)

            ## from LAT/LON_REF*, which is not accurate
            #lats = [atr['LAT_REF1'], atr['LAT_REF3'], atr['LAT_REF4'], atr['LAT_REF2']]
            #lons = [atr['LON_REF1'], atr['LON_REF3'], atr['LON_REF4'], atr['LON_REF2']]
            #lats = [float(i) for i in lats]
            #lons = [float(i) for i in lons]
            #lalo_buff = min([max(lats)-min(lats), max(lons)-min(lons)]) * 0.05
            #geo_box = (min(lons)-lalo_buff, max(lats)+lalo_buff, max(lons)+lalo_buff, min(lats)-lalo_buff)
            #pix_box = None
            #if not inps.fill_value: inps.fill_value = np.nan
            #print 'using subset info from scene footprint - LAT/LON_REF1/2/3/4'
        else:
            raise Exception('No subset inputs found!')
        # Update subset_y/x/lat/lon
        inps = subset_box2inps(inps, pix_box, geo_box)

    ##### --bbox option
    if inps.trans_file:
        ## Seperate files in radar and geo coord
        rdrFileList = []
        geoFileList = []
        for File in inps.file:
            atr = readfile.read_attribute(File)
            if 'X_FIRST' in atr.keys():
                geoFileList.append(File)
            else:
                rdrFileList.append(File)

        ## Calculate bbox
        rdrFile = rdrFileList[0]
        atr_rdr = readfile.read_attribute(rdrFile)
        if inps.subset_lat and inps.subset_lon:
            print 'use subset input in lat/lon'
            print 'calculate corresponding bounding box in radar coordinate.'
            geo_box = (inps.subset_lon[0], inps.subset_lat[1],
                       inps.subset_lon[1], inps.subset_lat[0])
            pix_box = bbox_geo2radar(geo_box, atr_rdr, inps.trans_file)
        else:
            print 'use subset input in y/x'
            print 'calculate corresponding bounding box in geo coordinate.'
            pix_box = (inps.subset_x[0], inps.subset_y[0], inps.subset_x[1],
                       inps.subset_y[1])
            geo_box = bbox_radar2geo(pix_box, atr_rdr, inps.trans_file)
        print 'geo   box: ' + str(geo_box)
        print 'pixel box: ' + str(pix_box)

        ## Subset files
        inps.fill_value = 0
        print '--------------------------------------------'
        print 'subseting dataset in geo coord geo_box: ' + str(geo_box)
        inps = subset_box2inps(inps, None, geo_box)
        subset_file_list(geoFileList, inps)
        print '--------------------------------------------'
        print 'subseting dataset in radar coord pix_box: ' + str(pix_box)
        inps = subset_box2inps(inps, pix_box, None)
        subset_file_list(rdrFileList, inps)

    else:
        ##### Subset files
        subset_file_list(inps.file, inps)

    print 'Done.'
    return
Beispiel #43
0
def unwrap_error_correction_phase_closure(ifgram_file,
                                          mask_file,
                                          ifgram_cor_file=None):
    '''Correct unwrapping errors in network of interferograms using phase closure.
    Inputs:
        ifgram_file     - string, name/path of interferograms file
        mask_file       - string, name/path of mask file to mask the pixels to be corrected
        ifgram_cor_file - string, optional, name/path of corrected interferograms file
    Output:
        ifgram_cor_file
    Example:
        'unwrapIfgram_unwCor.h5' = unwrap_error_correction_phase_closure('Seeded_unwrapIfgram.h5','mask.h5')
    '''
    print 'read mask from file: ' + mask_file
    mask = readfile.read(mask_file)[0].flatten(1)

    atr = readfile.read_attribute(ifgram_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    k = atr['FILE_TYPE']
    pixel_num = length * width

    # Check reference pixel
    try:
        ref_y = int(atr['ref_y'])
        ref_x = int(atr['ref_x'])
        print 'reference pixel in y/x: %d/%d' % (ref_y, ref_x)
    except:
        sys.exit(
            'ERROR: Can not find ref_y/x value, input file is not referenced in space!'
        )

    h5 = h5py.File(ifgram_file, 'r')
    ifgram_list = sorted(h5[k].keys())
    ifgram_num = len(ifgram_list)

    ##### Prepare curls
    curls, Triangles, C = ut.get_triangles(h5)
    curl_num = np.shape(curls)[0]
    print 'Number of      triangles: ' + str(curl_num)

    curl_file = 'curls.h5'
    if not os.path.isfile(curl_file):
        print 'writing >>> ' + curl_file
        ut.generate_curls(curl_file, h5, Triangles, curls)

    thr = 0.50
    curls = np.array(curls)
    n1 = curls[:, 0]
    n2 = curls[:, 1]
    n3 = curls[:, 2]

    print 'reading interferograms...'
    print 'Number of interferograms: ' + str(ifgram_num)
    data = np.zeros((ifgram_num, pixel_num), np.float32)
    prog_bar = ptime.progress_bar(maxValue=ifgram_num)
    for ni in range(ifgram_num):
        ifgram = ifgram_list[ni]
        d = h5[k][ifgram].get(ifgram)[:].flatten(1)
        data[ni, :] = d
        prog_bar.update(ni + 1)
    prog_bar.close()

    print 'reading curls ...'
    print 'number of culrs: ' + str(curl_num)
    h5curl = h5py.File(curl_file, 'r')
    curl_list = sorted(h5curl[k].keys())
    curl_data = np.zeros((curl_num, pixel_num), np.float32)
    prog_bar = ptime.progress_bar(maxValue=curl_num)
    for ni in range(curl_num):
        d = h5curl[k][curl_list[ni]].get(curl_list[ni])[:].flatten(1)
        curl_data[ni, :] = d.flatten(1)
        prog_bar.update(ni + 1)
    prog_bar.close()
    h5curl.close()

    print 'estimating unwrapping error pixel by pixel ...'
    EstUnwrap = np.zeros((ifgram_num, pixel_num), np.float32)
    prog_bar = ptime.progress_bar(maxValue=pixel_num)
    for ni in range(pixel_num):
        if mask[ni] == 1:
            dU = data[:, ni]
            unwCurl = np.array(curl_data[:, ni])

            ind = np.abs(unwCurl) >= thr
            N1 = n1[ind]
            N2 = n2[ind]
            N3 = n3[ind]
            indC = np.abs(unwCurl) < thr
            Nc1 = n1[indC]
            Nc2 = n2[indC]
            Nc3 = n3[indC]

            N = np.hstack([N1, N2, N3])
            UniN = np.unique(N)
            Nc = np.hstack([Nc1, Nc2, Nc3])
            UniNc = np.unique(Nc)

            inter = list(set(UniNc) & set(UniN))  # intersetion
            UniNc = list(UniNc)
            for x in inter:
                UniNc.remove(x)

            D = np.zeros([len(UniNc), ifgram_num])
            for i in range(len(UniNc)):
                D[i, UniNc[i]] = 1

            AAA = np.vstack([-2 * np.pi * C, D])
            AAAA = np.vstack([AAA, 0.25 * np.eye(ifgram_num)])

            ##########
            # with Tikhonov regularization:
            LLL = list(np.dot(C, dU)) + list(np.zeros(
                np.shape(UniNc)[0])) + list(np.zeros(ifgram_num))
            ind = np.isnan(AAAA)
            M1 = pinv(AAAA)
            M = np.dot(M1, LLL)
            EstUnwrap[:, ni] = np.round(M[0:ifgram_num]) * 2.0 * np.pi
        prog_bar.update(ni + 1, suffix='%s/%d' % (ni, pixel_num))
    prog_bar.close()

    dataCor = data + EstUnwrap

    ##### Output
    if not ifgram_cor_file:
        ifgram_cor_file = os.path.splitext(ifgram_file)[0] + '_unwCor.h5'
    print 'writing >>> ' + ifgram_cor_file
    h5unwCor = h5py.File(ifgram_cor_file, 'w')
    gg = h5unwCor.create_group(k)

    prog_bar = ptime.progress_bar(maxValue=ifgram_num)
    for i in range(ifgram_num):
        ifgram = ifgram_list[i]
        group = gg.create_group(ifgram)
        dset = group.create_dataset(ifgram,
                                    data=np.reshape(dataCor[i, :],
                                                    [width, length]).T,
                                    compression='gzip')
        for key, value in h5[k][ifgram].attrs.iteritems():
            group.attrs[key] = value
        prog_bar.update(i + 1)
    prog_bar.close()
    h5unwCor.close()
    h5.close()
    return ifgram_cor_file
Beispiel #44
0
def geocode_file_geo_lut(fname, lookup_file, fname_out, inps):
    '''Geocode file using ROI_PAC/Gamma lookup table file.
    Related module: scipy.interpolate.RegularGridInterpolator

    Inputs:
        fname      : string, file to be geocoded
        lookup_file   : string, optional, lookup table file genereated by ROIPAC or Gamma
                     i.e. geomap_4rlks.trans           from ROI_PAC
                          sim_150911-150922.UTM_TO_RDC from Gamma
        interp_method     : string, optional, interpolation/resampling method, supporting nearest, linear
        fill_value : value used for points outside of the interpolation domain.
        fname_out  : string, optional, output geocoded filename
    Output:
        fname_out  : string, optional, output geocoded filename
    '''

    start = time.time()
    ## Default Inputs and outputs
    if not fname_out:
        fname_out = geocode_output_filename(fname)

    ##### Interpolate value on irregular radar coordinates (from lookup table file value)
    ##### with known value on regular radar coordinates (from radar file attribute)
    ## Grid/regular coordinates from row/column number in radar file
    print '------------------------------------------------------'
    print 'geocoding file: '+fname
    atr_rdr = readfile.read_attribute(fname)
    len_rdr = int(atr_rdr['FILE_LENGTH'])
    wid_rdr = int(atr_rdr['WIDTH'])
    pts_old = (np.arange(len_rdr), np.arange(wid_rdr))

    ## Irregular coordinates from data value in lookup table
    print 'reading lookup table file: '+lookup_file
    atr_lut = readfile.read_attribute(lookup_file)
    rg = readfile.read(lookup_file, epoch='range')[0]
    az = readfile.read(lookup_file, epoch='azimuth')[0]
    len_geo = int(atr_lut['FILE_LENGTH'])
    wid_geo = int(atr_lut['WIDTH'])

    # adjustment if input radar file has been subseted.
    if 'subset_x0' in atr_rdr.keys():
        x0 = float(atr_rdr['subset_x0'])
        y0 = float(atr_rdr['subset_y0'])
        rg -= x0
        az -= y0
        print '\tinput radar coord file has been subsetted, adjust lookup table value'

    # extract pixels only available in radar file (get ride of invalid corners)
    idx = (az>0.0)*(az<=len_rdr)*(rg>0.0)*(rg<=wid_rdr)
    pts_new = np.hstack((az[idx].reshape(-1,1), rg[idx].reshape(-1,1)))
    del az, rg

    print 'geocoding using scipy.interpolate.RegularGridInterpolator ...'
    data_geo = np.empty((len_geo, wid_geo))
    data_geo.fill(inps.fill_value)
    k = atr_rdr['FILE_TYPE']
    ##### Multiple Dataset File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+fname_out

        if k in multi_dataset_hdf5_file:
            print 'number of datasets: '+str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                RGI_func = RGI(pts_old, data, method=inps.interp_method,\
                               bounds_error=False, fill_value=inps.fill_value)
                data_geo[idx] = RGI_func(pts_new)

                dset = group.create_dataset(date, data=data_geo, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            prog_bar.close()

            print 'update attributes'
            atr = update_attribute_geo_lut(atr_rdr, atr_lut)
            for key,value in atr.iteritems():
                group.attrs[key] = value

        elif k in multi_group_hdf5_file:
            print 'number of interferograms: '+str(epoch_num)
            try:    date12_list = ptime.list_ifgram2date12(epoch_list)
            except: date12_list = epoch_list
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                RGI_func = RGI(pts_old, data, method=inps.interp_method,\
                               bounds_error=False, fill_value=inps.fill_value)
                data_geo[idx] = RGI_func(pts_new)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_geo, compression='gzip')

                atr = update_attribute_geo_lut(h5[k][ifgram].attrs, atr_lut, print_msg=False)
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print 'reading '+fname
        data = readfile.read(fname)[0]
        RGI_func = RGI(pts_old, data, method=inps.interp_method,\
                       bounds_error=False, fill_value=inps.fill_value)
        data_geo[idx] = RGI_func(pts_new)

        print 'update attributes'
        atr = update_attribute_geo_lut(atr_rdr, atr_lut)

        print 'writing >>> '+fname_out
        writefile.write(data_geo, atr, fname_out)

    del data_geo
    print 'finished writing file: %s' % (fname_out)
    s = time.time()-start;  m, s = divmod(s, 60);  h, m = divmod(m, 60)
    print 'Time used: %02d hours %02d mins %02d secs' % (h, m, s)
    return fname_out
Beispiel #45
0
def mask_file(in_file,M,out_file=''):
    ## Mask input file with mask matrix M

    atr = readfile.read_attributes(in_file)
    k = atr['FILE_TYPE']
    print 'file type: '+k

    if out_file == '':
        ext      = os.path.splitext(in_file)[1]
        out_file = os.path.basename(in_file).split('.')[0]+'_masked'+ext

    if k in ['timeseries','interferograms','wrapped','coherence']:
        h5file = h5py.File(in_file,'r')
        epochList = h5file[k].keys()
        epochList = sorted(epochList)
        print 'number of epochs: '+str(len(epochList))

        h5out = h5py.File(out_file,'w')
        print 'writing >>> '+out_file

    ##### Multiple Dataset File
    if k == 'timeseries':
        group = h5out.create_group(k)
        for d in epochList:
            print d
            unwset = h5file[k].get(d)
            unw=unwset[0:unwset.shape[0],0:unwset.shape[1]]

            unw = mask_data(unw,M)

            dset = group.create_dataset(d, data=unw, compression='gzip')
        for key,value in atr.iteritems():   group.attrs[key] = value

    elif k in ['interferograms','wrapped','coherence']:
        gg = h5out.create_group(k)
        for igram in epochList:
            print igram
            unwset = h5file[kf[0]][igram].get(igram)
            unw=unwset[0:unwset.shape[0],0:unwset.shape[1]]

            unw = mask_data(unw,M)

            group = gg.create_group(igram)
            dset = group.create_dataset(igram, data=unw, compression='gzip')
            for key, value in h5file[k][igram].attrs.iteritems():
                group.attrs[key] = value
        try:
            mask = h5file['mask'].get('mask')
            gm = h5out.create_group('mask')
            dset = gm.create_dataset('mask', data=mask, compression='gzip')
        except: print 'no mask group found.'

    ##### Single Dataset File
    else:
        import pysar._writefile as writefile
        unw,atr = readfile.read(in_file)
        unw     = mask_data(unw,M)
        writefile.write(unw,atr,out_file)

    try:
        h5file.close()
        h5out.close()
    except: pass
Beispiel #46
0
def main(argv):
    inps = cmdLineParse()
    if inps.template_file:
        inps = read_template2inps(inps.template_file, inps)

    ##### Prepare Metadata
    pysar_meta_dict = readfile.read_attribute(inps.timeseries_file)
    k = pysar_meta_dict['FILE_TYPE']
    length = int(pysar_meta_dict['FILE_LENGTH'])
    width = int(pysar_meta_dict['WIDTH'])
    h5_timeseries = h5py.File(inps.timeseries_file, 'r')
    dateList = sorted(h5_timeseries[k].keys())
    dateNum = len(dateList)
    dateListStr = str(dateList).translate(None, "[],u'")
    pysar_meta_dict['DATE_TIMESERIES'] = dateListStr

    unavco_meta_dict = metadata_pysar2unavco(pysar_meta_dict, dateList)
    print '## UNAVCO Metadata:'
    print '-----------------------------------------'
    info.print_attributes(unavco_meta_dict)

    meta_dict = pysar_meta_dict.copy()
    meta_dict.update(unavco_meta_dict)
    print '-----------------------------------------'

    ##### Open HDF5 File
    #####Get output filename
    SAT = meta_dict['mission']
    SW = meta_dict['beam_mode']
    if meta_dict['beam_swath']:
        SW += str(meta_dict['beam_swath'])
    RELORB = "%03d" % (int(meta_dict['relative_orbit']))

    ##Frist and/or Last Frame
    frame1 = int(meta_dict['frame'])
    key = 'first_frame'
    if key in meta_dict.keys():
        frame1 = int(meta_dict[key])
    FRAME = "%04d" % (frame1)
    key = 'last_frame'
    if key in meta_dict.keys():
        frame2 = int(meta_dict[key])
        if frame2 != frame1:
            FRAME += "_%04d" % (frame2)

    TBASE = "%04d" % (0)
    BPERP = "%05d" % (0)
    DATE1 = dt.datetime.strptime(meta_dict['first_date'],
                                 '%Y-%m-%d').strftime('%Y%m%d')
    DATE2 = dt.datetime.strptime(meta_dict['last_date'],
                                 '%Y-%m-%d').strftime('%Y%m%d')
    #end_date = dt.datetime.strptime(meta_dict['last_date'], '%Y-%m-%d')
    #if inps.update and (dt.datetime.utcnow() - end_date) < dt.timedelta(days=365):
    if inps.update:
        print 'Update mode is enabled, put endDate as XXXXXXXX.'
        DATE2 = 'XXXXXXXX'

    #outName = SAT+'_'+SW+'_'+RELORB+'_'+FRAME+'_'+DATE1+'-'+DATE2+'_'+TBASE+'_'+BPERP+'.he5'
    outName = SAT + '_' + SW + '_' + RELORB + '_' + FRAME + '_' + DATE1 + '_' + DATE2 + '.he5'

    if inps.subset:
        print 'Subset mode is enabled, put subset range info in output filename.'
        lat1 = float(meta_dict['Y_FIRST'])
        lon0 = float(meta_dict['X_FIRST'])
        lat0 = lat1 + float(meta_dict['Y_STEP']) * length
        lon1 = lon0 + float(meta_dict['X_STEP']) * width

        lat0Str = 'N%05d' % (round(lat0 * 1e3))
        lat1Str = 'N%05d' % (round(lat1 * 1e3))
        lon0Str = 'E%06d' % (round(lon0 * 1e3))
        lon1Str = 'E%06d' % (round(lon1 * 1e3))
        if lat0 < 0.0: lat0Str = 'S%05d' % (round(abs(lat0) * 1e3))
        if lat1 < 0.0: lat1Str = 'S%05d' % (round(abs(lat1) * 1e3))
        if lon0 < 0.0: lon0Str = 'W%06d' % (round(abs(lon0) * 1e3))
        if lon1 < 0.0: lon1Str = 'W%06d' % (round(abs(lon1) * 1e3))

        SUB = '_%s_%s_%s_%s' % (lat0Str, lat1Str, lon0Str, lon1Str)
        outName = os.path.splitext(outName)[0] + SUB + os.path.splitext(
            outName)[1]

    ##### Open HDF5 File
    print 'writing >>> ' + outName
    f = h5py.File(outName, 'w')
    hdfeos = f.create_group('HDFEOS')
    if 'Y_FIRST' in meta_dict.keys():
        gg_coord = hdfeos.create_group('GRIDS')
    else:
        gg_coord = hdfeos.create_group('SWATHS')
    group = gg_coord.create_group('timeseries')

    ##### Write Attributes to the HDF File
    print 'write metadata to ' + str(f)
    for key, value in meta_dict.iteritems():
        f.attrs[key] = value

    ##### Write Observation - Displacement
    groupObs = group.create_group('observation')
    print 'write data to ' + str(groupObs)

    disDset = np.zeros((dateNum, length, width), np.float32)
    for i in range(dateNum):
        sys.stdout.write('\rreading 3D displacement from file %s: %d/%d ...' %
                         (inps.timeseries_file, i + 1, dateNum))
        sys.stdout.flush()
        disDset[i] = h5_timeseries[k].get(dateList[i])[:]
    print ' '

    dset = groupObs.create_dataset('displacement',
                                   data=disDset,
                                   dtype=np.float32)
    dset.attrs['DATE_TIMESERIES'] = dateListStr
    dset.attrs['Title'] = 'Displacement time-series'
    dset.attrs['MissingValue'] = FLOAT_ZERO
    dset.attrs['Units'] = 'meters'
    dset.attrs['_FillValue'] = FLOAT_ZERO

    ##### Write Quality
    groupQ = group.create_group('quality')
    print 'write data to ' + str(groupQ)

    ## 1 - temporalCoherence
    print 'reading coherence       from file: ' + inps.coherence_file
    data = readfile.read(inps.coherence_file)[0]
    dset = groupQ.create_dataset('temporalCoherence',
                                 data=data,
                                 compression='gzip')
    dset.attrs['Title'] = 'Temporal Coherence'
    dset.attrs['MissingValue'] = FLOAT_ZERO
    dset.attrs['Units'] = '1'
    dset.attrs['_FillValue'] = FLOAT_ZERO

    ## 2 - mask
    print 'reading mask            from file: ' + inps.mask_file
    data = readfile.read(inps.mask_file, epoch='mask')[0]
    dset = groupQ.create_dataset('mask', data=data, compression='gzip')
    dset.attrs['Title'] = 'Mask'
    dset.attrs['MissingValue'] = BOOL_ZERO
    dset.attrs['Units'] = '1'
    dset.attrs['_FillValue'] = BOOL_ZERO

    ##### Write Geometry
    ## Required: height, incidenceAngle
    ## Optional: rangeCoord, azimuthCoord, headingAngle, slantRangeDistance, waterMask, shadowMask
    groupGeom = group.create_group('geometry')
    print 'write data to ' + str(groupGeom)

    ## 1 - height
    print 'reading height          from file: ' + inps.dem_file
    data = readfile.read(inps.dem_file, epoch='height')[0]
    dset = groupGeom.create_dataset('height', data=data, compression='gzip')
    dset.attrs['Title'] = 'Digital elevatino model'
    dset.attrs['MissingValue'] = INT_ZERO
    dset.attrs['Units'] = 'meters'
    dset.attrs['_FillValue'] = INT_ZERO

    ## 2 - incidenceAngle
    print 'reading incidence angle from file: ' + inps.inc_angle_file
    data = readfile.read(inps.inc_angle_file, epoch='incidenceAngle')[0]
    dset = groupGeom.create_dataset('incidenceAngle',
                                    data=data,
                                    compression='gzip')
    dset.attrs['Title'] = 'Incidence angle'
    dset.attrs['MissingValue'] = FLOAT_ZERO
    dset.attrs['Units'] = 'degrees'
    dset.attrs['_FillValue'] = FLOAT_ZERO

    ## 3 - rangeCoord
    try:
        data = readfile.read(inps.rg_coord_file,
                             epoch='rangeCoord',
                             print_msg=False)[0]
        print 'reading range coord     from file: ' + inps.rg_coord_file
        dset = groupGeom.create_dataset('rangeCoord',
                                        data=data,
                                        compression='gzip')
        dset.attrs['Title'] = 'Range Coordinates'
        dset.attrs['MissingValue'] = FLOAT_ZERO
        dset.attrs['Units'] = '1'
        dset.attrs['_FillValue'] = FLOAT_ZERO
    except:
        print 'No rangeCoord found in file %s' % (inps.rg_coord_file)

    ## 4 - azimuthCoord
    try:
        data = readfile.read(inps.az_coord_file,
                             epoch='azimuthCoord',
                             print_msg=False)[0]
        print 'reading azimuth coord   from file: ' + inps.az_coord_file
        dset = groupGeom.create_dataset('azimuthCoord',
                                        data=data,
                                        compression='gzip')
        dset.attrs['Title'] = 'Azimuth Coordinates'
        dset.attrs['MissingValue'] = FLOAT_ZERO
        dset.attrs['Units'] = '1'
        dset.attrs['_FillValue'] = FLOAT_ZERO
    except:
        print 'No azimuthCoord found in file %s' % (inps.az_coord_file)

    ## 5 - headingAngle
    try:
        data = readfile.read(inps.head_angle_file,
                             epoch='heandingAngle',
                             print_msg=False)[0]
        print 'reading azimuth coord   from file: ' + inps.head_angle_file
        dset = groupGeom.create_dataset('heandingAngle',
                                        data=data,
                                        compression='gzip')
        dset.attrs['Title'] = 'Heanding Angle'
        dset.attrs['MissingValue'] = FLOAT_ZERO
        dset.attrs['Units'] = 'degrees'
        dset.attrs['_FillValue'] = FLOAT_ZERO
    except:
        print 'No headingAngle found in file %s' % (inps.head_angle_file)

    ## 6 - slantRangeDistance
    try:
        data = readfile.read(inps.slant_range_dist_file,
                             epoch='slantRangeDistance',
                             print_msg=False)[0]
        print 'reading slant range distance from file: ' + inps.slant_range_dist_file
        dset = groupGeom.create_dataset('slantRangeDistance',
                                        data=data,
                                        compression='gzip')
        dset.attrs['Title'] = 'Slant Range Distance'
        dset.attrs['MissingValue'] = FLOAT_ZERO
        dset.attrs['Units'] = 'meters'
        dset.attrs['_FillValue'] = FLOAT_ZERO
    except:
        print 'No slantRangeDistance found in file %s' % (
            inps.slant_range_dist_file)

    ## 7 - waterMask
    try:
        data = readfile.read(inps.water_mask_file,
                             epoch='waterMask',
                             print_msg=False)[0]
        print 'reading water mask      from file: ' + inps.water_mask_file
        dset = groupGeom.create_dataset('waterMask',
                                        data=data,
                                        compression='gzip')
        dset.attrs['Title'] = 'Water Mask'
        dset.attrs['MissingValue'] = BOOL_ZERO
        dset.attrs['Units'] = '1'
        dset.attrs['_FillValue'] = BOOL_ZERO
    except:
        print 'No waterMask found in file %s' % (inps.water_mask_file)

    ## 8 - shadowMask
    try:
        data = readfile.read(inps.shadow_mask_file,
                             epoch='shadowMask',
                             print_msg=False)[0]
        print 'reading shadow mask     from file: ' + inps.shadow_mask_file
        dset = groupGeom.create_dataset('shadowMask',
                                        data=data,
                                        compression='gzip')
        dset.attrs['Title'] = 'Shadow Mask'
        dset.attrs['MissingValue'] = BOOL_ZERO
        dset.attrs['Units'] = '1'
        dset.attrs['_FillValue'] = BOOL_ZERO
    except:
        print 'No shadowMask found in file %s' % (inps.shadow_mask_file)

    f.close()
    print 'Done.'
    return
Beispiel #47
0
def main(argv):

    ## Default settings
    contour_step = 200.0
    contour_sigma = 3.0
    demShade = "yes"
    demContour = "yes"

    global markerSize, markderSize2, markerColor, markerColor2, rectColor
    global lineWidth, lineWidth2, edgeWidth, fontSize
    # global markerColor_ref, markerColor_ref2

    markerSize = 16
    markerSize2 = 16
    markerColor = "crimson"  # g
    markerColor2 = "lightgray"
    markerColor_ref = "white"
    markerColor_ref2 = "lightgray"
    rectColor = "black"
    lineWidth = 0
    lineWidth2 = 0
    edgeWidth = 1.5
    fontSize = 16

    global unit, radius, saveFig, dispFig, fig_dpi

    fig_dpi = 300
    radius = 0
    saveFig = "no"
    dispFig = "yes"
    unit = "cm"

    dispDisplacement = "no"
    dispOpposite = "no"
    dispContour = "only"
    smoothContour = "no"
    contour_step = 200
    showRef = "yes"
    vel_alpha = 1.0
    zero_start = "yes"

    global ref_xsub, ref_ysub, ref_date
    global h5timeseries_2, dates_2, dateList_2
    global lbound, hbound

    ############### Check Inputs ##################
    if len(sys.argv) < 2:
        Usage()
        sys.exit(1)
    elif len(sys.argv) == 2:
        if argv[0] == "-h":
            Usage()
            sys.exit(1)
        elif os.path.isfile(argv[0]):
            timeSeriesFile = argv[0]
            h5timeseries = h5py.File(timeSeriesFile)
            k = h5timeseries.keys()
            if not "timeseries" in k:
                print "ERROR: Input file is " + k[0] + ".\n\tOnly timeseries is supported.\n"
                sys.exit(1)
        else:
            Usage()
            sys.exit(1)

    elif len(sys.argv) > 2:
        try:
            opts, args = getopt.getopt(
                argv,
                "f:F:v:a:b:s:m:c:w:u:l:h:D:V:t:T:d:r:x:y:X:Y:o:E:",
                [
                    "save",
                    "nodisplay",
                    "unit=",
                    "exclude=",
                    "ref-date=",
                    "rect-color=",
                    "zero-start=",
                    "zoom-x=",
                    "zoom-y=",
                    "zoom-lon",
                    "zoom-lat",
                    "lalo=",
                    "opposite",
                    "dem-nocontour",
                    "dem-noshade",
                    "displacement",
                    "contour-step=",
                    "contour-smooth=",
                    "LALO=",
                ],
            )
        except getopt.GetoptError:
            Usage()
            sys.exit(1)

        for opt, arg in opts:
            if opt == "-f":
                timeSeriesFile = arg
            elif opt == "-F":
                timeSeriesFile_2 = arg
            elif opt == "-v":
                velocityFile = arg
            elif opt == "-a":
                vmin = float(arg)
            elif opt == "-b":
                vmax = float(arg)
            elif opt == "-s":
                fontSize = int(arg)
            elif opt == "-m":
                markerSize = int(arg)
                markerSize2 = int(arg)
            elif opt == "-c":
                markerColor = arg
            elif opt == "-w":
                lineWidth = int(arg)
            elif opt == "-u":
                unit = arg
            elif opt == "-l":
                lbound = float(arg)
            elif opt == "-h":
                hbound = float(arg)
            elif opt == "-D":
                demFile = arg
            elif opt == "-V":
                contour_step = float(arg)
            elif opt == "-t":
                minDate = arg
            elif opt == "-T":
                maxDate = arg
            elif opt == "-r":
                radius = abs(int(arg))
            elif opt == "-x":
                xsub = [int(i) for i in arg.split(":")]
                xsub.sort()
                # dispVelFig='no'
            elif opt == "-y":
                ysub = [int(i) for i in arg.split(":")]
                ysub.sort()
                # dispVelFig='no'
            elif opt == "-X":
                ref_xsub = [int(i) for i in arg.split(":")]
                ref_xsub.sort()
            elif opt == "-Y":
                ref_ysub = [int(i) for i in arg.split(":")]
                ref_ysub.sort()
                # dispVelFig='no'

            elif opt == "--contour-step":
                contour_step = float(arg)
            elif opt == "--contour-smooth":
                contour_sigma = float(arg)
            elif opt == "--dem-nocontour":
                demContour = "no"
            elif opt == "--dem-noshade":
                demShade = "no"
            elif opt == "--displacement":
                dispDisplacement = "yes"
            elif opt in ["-E", "--exclude"]:
                datesNot2show = arg.split(",")
            elif opt in "--lalo":
                lalosub = [float(i) for i in arg.split(",")]
            elif opt in "--LALO":
                ref_lalosub = [float(i) for i in arg.split(",")]
            elif opt in ["--rect-color"]:
                rectColor = arg
            elif opt in ["--ref-date"]:
                ref_date = ptime.yyyymmdd(arg)
            elif opt in ["-u", "--unit"]:
                unit = arg.lower()
            elif opt == "--save":
                saveFig = "yes"
            elif opt == "--nodisplay":
                dispFig = "no"
                saveFig = "yes"
            elif opt == "--opposite":
                dispOpposite = "yes"
            elif opt == "--zero-start":
                zero_start = arg.lower()
            elif opt == "--zoom-x":
                win_x = [int(i) for i in arg.split(":")]
                win_x.sort()
            elif opt == "--zoom-y":
                win_y = [int(i) for i in arg.split(":")]
                win_y.sort()
            elif opt == "--zoom-lon":
                win_lon = [float(i) for i in arg.split(":")]
                win_lon.sort()
            elif opt == "--zoom-lat":
                win_lat = [float(i) for i in arg.split(":")]
                win_lat.sort()

    ##############################################################
    ## Read time series file info
    if not os.path.isfile(timeSeriesFile):
        print "\nERROR: Input time series file does not exist: " + timeSeriesFile + "\n"
        sys.exit(1)
    h5timeseries = h5py.File(timeSeriesFile)
    k = h5timeseries.keys()
    # read h5 file and its group type
    if not "timeseries" in k:
        print "ERROR: Input file is " + k[0] + ".\n\tOnly timeseries is supported.\n"
        sys.exit(1)

    atr = readfile.read_attributes(timeSeriesFile)
    dateList1 = h5timeseries["timeseries"].keys()
    dateList1 = sorted(dateList1)
    dates1, datevector1 = ptime.date_list2vector(dateList1)
    print "\n************ Time Series Display - Point *************"

    ##### Select Check
    try:
        lalosub
        xsub = subset.coord_geo2radar([lalosub[1]], atr, "longitude")
        ysub = subset.coord_geo2radar([lalosub[0]], atr, "latitude")
        xsub = [xsub]
        ysub = [ysub]
        if radius == 0:
            radius = 3
    except:
        pass

    try:
        ref_lalosub
        ref_xsub = subset.coord_geo2radar([ref_lalosub[1]], atr, "longitude")
        ref_ysub = subset.coord_geo2radar([ref_lalosub[0]], atr, "latitude")
        ref_xsub = [ref_xsub]
        ref_ysub = [ref_ysub]
        if radius == 0:
            radius = 3
    except:
        pass

    ##############################################################
    global dates, dateList, datevector_all, dateListMinMax

    print "*******************"
    print "All dates existed:"
    print dateList1
    print "*******************"

    ## Check exclude date input
    try:
        datesNot2show
        if os.path.isfile(datesNot2show[0]):
            try:
                datesNot2show = ptime.read_date_list(datesNot2show[0])
            except:
                print "Can not read date list file: " + datesNot2show[0]
        print "dates not to show: " + str(datesNot2show)
    except:
        datesNot2show = []

    ## Check Min / Max Date
    dateListMinMax = []
    try:
        minDate
        minDate = ptime.yyyymmdd(minDate)
        dateListMinMax.append(minDate)
        minDateyy = ptime.yyyymmdd2years(minDate)
        print "minimum date: " + minDate
        for date in dateList1:
            yy = ptime.yyyymmdd2years(date)
            if yy < minDateyy:
                datesNot2show.append(date)
    except:
        pass
    try:
        maxDate
        maxDate = ptime.yyyymmdd(maxDate)
        dateListMinMax.append(maxDate)
        maxDateyy = ptime.yyyymmdd2years(maxDate)
        print "maximum date: " + maxDate
        for date in dateList1:
            yy = ptime.yyyymmdd2years(date)
            if yy > maxDateyy:
                datesNot2show.append(date)
    except:
        pass

    dateListMinMax = sorted(dateListMinMax)
    if not dateListMinMax:
        print "no min/max date input."
    else:
        datesMinMax, dateVecMinMax = ptime.date_list2vector(dateListMinMax)

    ## Finalize Date List
    try:
        dateList = []
        for date in dateList1:
            if date not in datesNot2show:
                dateList.append(date)
        print "--------------------------------------------"
        print "dates used to show time series displacements:"
        print dateList
        print "--------------------------------------------"
    except:
        dateList = dateList1
        print "using all dates to show time series displacement"

    ## Read Date Info (x axis for time series display)
    dates, datevector = ptime.date_list2vector(dateList)
    datevector_all = list(datevector)

    ## Check reference date input
    try:
        ref_date
        if not ref_date in dateList:
            print "Reference date - " + ref_date + " - is not included in date list to show."
            sys.exit(1)
        else:
            print "reference date: " + ref_date
    except:
        if zero_start == "yes":
            ref_date = dateList[0]
            print "set the 1st date as reference for displacement display."
        else:
            pass

    ##############################################################
    ##### Plot Fig 1 - Velocity / last epoch of time series / DEM
    fig = plt.figure(1)
    ax = fig.add_subplot(111)

    ##### Check subset range
    width = int(atr["WIDTH"])
    length = int(atr["FILE_LENGTH"])
    print "file size: " + str(length) + ", " + str(width)
    try:
        win_y = subset.coord_geo2radar(win_lat, atr, "latitude")
    except:
        try:
            win_y
        except:
            win_y = [0, length]
    try:
        win_x = subset.coord_geo2radar(win_lon, atr, "longitude")
    except:
        try:
            win_x
        except:
            win_x = [0, width]
    win_y, win_x = subset.check_subset_range(win_y, win_x, atr)

    try:
        velocityFile
        try:
            vel, vel_atr = readfile.read(velocityFile)
        except:
            vel, vel_atr = readfile.read(timeSeriesFile, velocityFile)
        ax.set_title(velocityFile)
        print "display: " + velocityFile
    except:
        vel, vel_atr = readfile.read(timeSeriesFile, dateList1[-1])
        ax.set_title("epoch: " + dateList1[-1])
        print "display last epoch"

    ##### show displacement instead of phase
    if vel_atr["FILE_TYPE"] in ["interferograms", ".unw"] and dispDisplacement == "yes":
        print "show displacement"
        phase2range = -float(vel_atr["WAVELENGTH"]) / (4 * np.pi)
        vel *= phase2range
    else:
        dispDisplacement = "no"

    ## Reference Point
    if showRef == "yes":
        try:
            ax.plot(int(atr["ref_x"]), int(atr["ref_y"]), "ks", ms=6)
        except:
            pass

    if dispOpposite == "yes":
        print "show opposite value in figure/map 1"
        vel *= -1

    ## Flip
    try:
        flip_lr
    except:
        try:
            flip_ud
        except:
            flip_lr, flip_ud = view.auto_flip_check(atr)

    ## Status bar
    ## Geo coordinate
    try:
        ullon = float(atr["X_FIRST"])
        ullat = float(atr["Y_FIRST"])
        lon_step = float(atr["X_STEP"])
        lat_step = float(atr["Y_STEP"])
        lon_unit = atr["Y_UNIT"]
        lat_unit = atr["X_UNIT"]
        geocoord = "yes"
        print "Input file is Geocoded"
    except:
        geocoord = "no"

    def format_coord(x, y):
        col = int(x + 0.5)
        row = int(y + 0.5)
        if col >= 0 and col <= width and row >= 0 and row <= length:
            z = vel[row, col]
            try:
                lon = ullon + x * lon_step
                lat = ullat + y * lat_step
                return "x=%.1f, y=%.1f, value=%.4f, lon=%.4f, lat=%.4f" % (x, y, z, lon, lat)
            except:
                return "x=%.1f, y=%.1f, value=%.4f" % (x, y, z)

    ax.format_coord = format_coord

    ## DEM
    try:
        demFile
        dem, demRsc = readfile.read(demFile)
        ax = view.plot_dem_yx(ax, dem, demShade, demContour, contour_step, contour_sigma)
        vel_alpha = 0.8
    except:
        print "No DEM file"

    try:
        img = ax.imshow(vel, vmin=vmin, vmax=vmax, alpha=vel_alpha)
    except:
        img = ax.imshow(vel, alpha=vel_alpha)
    plt.colorbar(img)

    ## Zoom In (subset)
    if flip_lr == "yes":
        ax.set_xlim(win_x[1], win_x[0])
    else:
        ax.set_xlim(win_x[0], win_x[1])
    if flip_ud == "yes":
        ax.set_ylim(win_y[0], win_y[1])
    else:
        ax.set_ylim(win_y[1], win_y[0])

    ## Flip
    # if flip_lr == 'yes':  fig.gca().invert_xaxis()
    # if flip_ud == 'yes':  fig.gca().invert_yaxis()

    ##########################################
    ##### Plot Fig 2 - Time series plot
    # fig2 = plt.figure(num=2,figsize=(12,6))
    fig2 = plt.figure(2, figsize=(12, 6))
    ax2 = fig2.add_subplot(111)

    try:
        timeSeriesFile_2
        h5timeseries_2 = h5py.File(timeSeriesFile_2)
        dateList_2 = h5timeseries_2["timeseries"].keys()
        dateList_2 = sorted(dateList_2)
        dates_2, datevector_2 = ptime.date_list2vector(dateList_2)
        datevector_all += list(set(datevector_2) - set(datevector_all))
        datevector_all = sorted(datevector_all)
    except:
        pass

    ################################  Plot Code Package <start> #################################
    def plot_ts(ax, ax2, fig2, xsub, ysub, h5timeseries):
        ax2.cla()
        print "\n-------------------------------------------------------------------------------"
        disp_min = 0
        disp_max = 0

        ############################# Plot Time Series ##############################
        global ref_xsub, ref_ysub
        ##### 1.1 Plot Reference time series
        try:
            ref_xsub
            ref_ysub
            ref_xsub, ref_ysub = check_yx(ref_xsub, ref_ysub, radius, ax, rectColor)
            print "----------------------------------------------------"
            print "Reference Point:"
            print "ref_x=" + str(ref_xsub[0]) + ":" + str(ref_xsub[1])
            print "ref_y=" + str(ref_ysub[0]) + ":" + str(ref_ysub[1])

            print "-----------------------------"
            print "Time series with all dates:"
            dis1, dis1_mean, dis1_std, dis1_vel = read_dis(ref_xsub, ref_ysub, dateList1, h5timeseries, unit)
            (_, caps, _) = ax2.errorbar(
                dates1,
                dis1_mean,
                yerr=dis1_std,
                fmt="-ks",
                ms=markerSize2,
                lw=0,
                alpha=1,
                mfc=markerColor_ref,
                mew=edgeWidth,
                elinewidth=edgeWidth,
                ecolor="black",
                capsize=markerSize * 0.5,
            )
            for cap in caps:
                cap.set_markeredgewidth(edgeWidth)
            disp_min, disp_max = update_lim(disp_min, disp_max, dis1_mean, dis1_std)

            if not len(dateList) == len(dateList1):
                print "-----------------------------"
                print "Time series with dates of interest:"
                dis12, dis12_mean, dis12_std, dis12_vel = read_dis(ref_xsub, ref_ysub, dateList, h5timeseries, unit)
                (_, caps, _) = ax2.errorbar(
                    dates,
                    dis12_mean,
                    yerr=dis12_std,
                    fmt="-ks",
                    ms=markerSize2,
                    lw=0,
                    alpha=1,
                    mfc=markerColor_ref2,
                    mew=edgeWidth,
                    elinewidth=edgeWidth,
                    ecolor="black",
                    capsize=markerSize * 0.5,
                )
                for cap in caps:
                    cap.set_markeredgewidth(edgeWidth)
                disp_min, disp_max = update_lim(disp_min, disp_max, dis12_mean, dis12_std)

        except:
            pass

        ##### 1.2.0 Read y/x
        print "\n----------------------------------------------------"
        print "Point of Interest:"
        xsub, ysub = check_yx(xsub, ysub, radius, ax, rectColor)
        print "x=" + str(xsub[0]) + ":" + str(xsub[1])
        print "y=" + str(ysub[0]) + ":" + str(ysub[1])

        ##### 1.2.1 Plot 2nd time series
        try:
            timeSeriesFile_2
            print "-----------------------------"
            print "2nd Time Series:"
            dis2, dis2_mean, dis2_std, dis2_vel = read_dis(xsub, ysub, dateList_2, h5timeseries_2, unit)
            (_, caps, _) = ax2.errorbar(
                dates_2,
                dis2_mean,
                yerr=dis2_std,
                fmt="-ko",
                ms=markerSize2,
                lw=0,
                alpha=1,
                mfc=markerColor2,
                elinewidth=0,
                ecolor="black",
                capsize=0,
            )
            for cap in caps:
                cap.set_markeredgewidth(edgeWidth)
            disp_min, disp_max = update_lim(disp_min, disp_max, dis2_mean, dis2_std)
        except:
            pass

        ##### 1.2.2 Plot 1st time series
        print "-----------------------------"
        print "Time Series:"
        dis, dis_mean, dis_std, dis_vel = read_dis(xsub, ysub, dateList, h5timeseries, unit)
        (_, caps, _) = ax2.errorbar(
            dates,
            dis_mean,
            yerr=dis_std,
            fmt="-ko",
            ms=markerSize,
            lw=lineWidth,
            alpha=1,
            mfc=markerColor,
            elinewidth=edgeWidth,
            ecolor="black",
            capsize=markerSize * 0.5,
        )
        for cap in caps:
            cap.set_markeredgewidth(edgeWidth)
        disp_min, disp_max = update_lim(disp_min, disp_max, dis_mean, dis_std)

        ####################### Figure Format #######################
        ## x axis format
        try:
            ax2 = ptime.adjust_xaxis_date(ax2, dateVecMinMax, fontSize)
        except:
            ax2 = ptime.adjust_xaxis_date(ax2, datevector_all, fontSize)

        ## y axis format
        ax2.set_ylabel("Displacement [" + unit + "]", fontsize=fontSize)
        try:
            lbound
            hbound
            ax2.set_ylim(lbound, hbound)
        except:
            disp_buf = 0.2 * (disp_max - disp_min)
            ax2.set_ylim(disp_min - disp_buf, disp_max + disp_buf)
        for tick in ax2.yaxis.get_major_ticks():
            tick.label.set_fontsize(fontSize)

        ## title
        figTitle = "x=" + str(xsub[0]) + ":" + str(xsub[1]) + ", y=" + str(ysub[0]) + ":" + str(ysub[1])
        try:
            lonc = ullon + (xsub[0] + xsub[1]) / 2.0 * lon_step
            latc = ullat + (ysub[0] + ysub[1]) / 2.0 * lat_step
            figTitle += ", lalo=" + "%.4f,%.4f" % (latc, lonc)
        except:
            pass
        ax2.set_title(figTitle)

        ################## Save and Output #####################
        if saveFig == "yes":
            print "-----------------------------"
            Delay = {}
            Delay["displacement"] = dis
            Delay["unit"] = unit
            Delay["time"] = datevector
            Delay["velocity"] = dis_vel[0]
            Delay["velocity_unit"] = unit + "/yr"
            Delay["velocity_std"] = dis_vel[4]
            figBase = "x" + str(xsub[0]) + "_" + str(xsub[1] - 1) + "y" + str(ysub[0]) + "_" + str(ysub[1] - 1)
            sio.savemat(figBase + "_ts.mat", {"displacement": Delay})
            print "saved " + figBase + "_ts.mat"
            fig2.savefig(figBase + "_ts.pdf", bbox_inches="tight", transparent=True, dpi=fig_dpi)
            print "saved " + figBase + "_ts.pdf"
            if dispFig == "no":
                fig.savefig(figBase + "_vel.png", bbox_inches="tight", transparent=True, dpi=fig_dpi)
                print "saved " + figBase + "_vel.png"

    ################################  Plot Code Package <end> #################################

    ########### 1. Plot Time Series with x/y ##########
    try:
        xsub
        ysub
        plot_ts(ax, ax2, fig2, xsub, ysub, h5timeseries)
    except:
        print "No x/y input"
        pass

    ########### 2. Plot Time Series with Click ##########
    ## similar to 1. Plot Time Series with x/y

    def onclick(event):
        ax2.cla()
        xsub = [int(event.xdata)]
        ysub = [int(event.ydata)]
        plot_ts(ax, ax2, fig2, xsub, ysub, h5timeseries)

        if dispFig == "yes":
            plt.show()

    try:
        cid = fig.canvas.mpl_connect("button_press_event", onclick)
    except:
        pass

    if dispFig == "yes":
        plt.show()
Beispiel #48
0
def main(argv):

    try:  
        File = argv[0]
        alks = int(argv[1])
        rlks = int(argv[2])
    except:
        Usage();sys.exit(1)
  
    ext = os.path.splitext(File)[1]
    try:     outName = argv[3]
    except:  outName = File.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks'+ext
  
    ################################################################################
    atr = readfile.read_attributes(File)
    k = atr['FILE_TYPE']
    print '\n***************** Multilooking *********************'
    print 'number of multilooking in azimuth / latitude  direction: '+str(alks)
    print 'number of multilooking in range   / longitude direction: '+str(rlks)
    print 'input file: '+k
  
    if k in ['interferograms','coherence','wrapped','timeseries']:
        h5file     = h5py.File(File,'r')
        h5file_mli = h5py.File(outName,'w')
  
        print 'writing >>> '+outName 
  
        if k in ['interferograms','coherence','wrapped']:
            gg = h5file_mli.create_group(k)
            igramList = h5file[k].keys()
            igramList = sorted(igramList)
  
            for igram in igramList:
                print igram
                unw = h5file[k][igram].get(igram)[:]
                unwlks = multilook(unw,alks,rlks)
                group = gg.create_group(igram)
                dset = group.create_dataset(igram, data=unwlks, compression='gzip')
  
                atr = h5file[k][igram].attrs
                atr = multilook_attributes(atr,alks,rlks)
                for key, value in atr.iteritems():   group.attrs[key] = value
  
        elif k == 'timeseries':
            dateList=h5file[k].keys()
            dateList = sorted(dateList)
  
            group = h5file_mli.create_group(k)
            for d in dateList:
                print d
                unw = h5file[k].get(d)[:]
                unwlks=multilook(unw,alks,rlks)
                dset = group.create_dataset(d, data=unwlks, compression='gzip')
  
            ## Update attributes
            atr = h5file[k].attrs
            atr = multilook_attributes(atr,alks,rlks)
            for key, value in atr.iteritems():   group.attrs[key] = value
  
        h5file.close()
        h5file_mli.close()

    ################################################################################
    else:
        ####### To multi_look geomap*.trans file, both its file size and value need to be reduced.
        if k == '.trans':
            rg,az,atr = readfile.read(File)
            rgmli = multilook(rg,alks,rlks);    #rgmli = rgmli/float(rlks)
            azmli = multilook(az,alks,rlks);    #azmli = azmli/float(alks)
            atr = multilook_attributes(atr,alks,rlks)
            writefile.write(rgmli,azmli,atr,outName)
        else:
            data,atr = readfile.read(File)
            data_mli = multilook(data,alks,rlks)
            atr = multilook_attributes(atr,alks,rlks)
            writefile.write(data_mli,atr,outName)
Beispiel #49
0
def main(argv):
  
    try:
        File = argv[0]
        demFile=argv[1]
        p=int(argv[2])
    except:
        Usage() ; sys.exit(1)
  
    try:    baseline_error=argv[3]
    except: baseline_error='range_and_azimuth'
    print baseline_error  
    ##################################
    h5file = h5py.File(File)
    dateList = h5file['timeseries'].keys()
    ##################################
  
    try: maskFile=argv[4]
    except:
        if   os.path.isfile('Modified_Mask.h5'):  maskFile = 'Modified_Mask.h5'
        elif os.path.isfile('Mask.h5'):           maskFile = 'Mask.h5'
        else: print 'No mask found!'; sys.exit(1)
    try:  Mask,Matr = readfile.read(maskFile);   print 'mask: '+maskFile
    except: print 'Can not open mask file: '+maskFile; sys.exit(1)
  
    #try:
    #  maskFile=argv[4]
    #  h5Mask = h5py.File(maskFile,'r')
    #  kMask=h5Mask.keys()
    #  dset1 = h5Mask[kMask[0]].get(kMask[0])
    #  Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]]
    #except:
    #  dset1 = h5file['mask'].get('mask')
    #  Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]]
  
    ##################################
    Mask=Mask.flatten(1)
    ndx= Mask !=0
    ##################################
    # h5file = h5py.File(File)
    # dateList = h5file['timeseries'].keys() 
    ##################################
    nt=float(h5file['timeseries'].attrs['LOOK_REF1'])
    ft=float(h5file['timeseries'].attrs['LOOK_REF2'])
    sy,sx=np.shape(dset1)
    npixel=sx*sy
    lookangle=np.tile(np.linspace(nt,ft,sx),[sy,1])
    lookangle=lookangle.flatten(1)*np.pi/180.0
    Fh=-np.sin(lookangle)
    Fv=-np.cos(lookangle)  
  
    print 'Looking for azimuth pixel size'
    try:
        daz=float(h5file['timeseries'].attrs['AZIMUTH_PIXEL_SIZE'])
    except:
        print'''
        ERROR!
        The attribute AZIMUTH_PIXEL_SIZE was not found!
        Possible cause of error: Geo coordinate.
        This function works only in radar coordinate system.
        '''   
        sys.exit(1)
  
    lines=np.tile(np.arange(0,sy,1),[1,sx])
    lines=lines.flatten(1)
    rs=lines*daz
   
    if baseline_error=='range_and_azimuth': 
        A = np.zeros([npixel,4])
   
        A[:,0]=Fh
        A[:,1]=Fh*rs
        A[:,2]=Fv
        A[:,3]=Fv*rs 
        num_base_par=4
    elif baseline_error=='range':
        A = np.zeros([npixel,2])
   
        A[:,0]=Fh
        A[:,1]=Fv
        num_base_par=2

    ###########################################
    yref=int(h5file['timeseries'].attrs['ref_y'])
    xref=int(h5file['timeseries'].attrs['ref_x'])
    ###########################################
    if os.path.basename(demFile).split('.')[1]=='hgt':
         amp,dem,demRsc = readfile.read_float32(demFile)
    elif os.path.basename(demFile).split('.')[1]=='dem':
         dem,demRsc = readfile.read_real_int16(demFile)
  
    dem=dem-dem[yref][xref]
    dem=dem.flatten(1)
    ###################################################
    if p==1:
        # A=np.vstack((dem[ndx],np.ones(len(dem[ndx])))).T
        B = np.vstack((dem,np.ones(len(dem)))).T
    elif p==2:
        # A=np.vstack((dem[ndx]**2,dem[ndx],np.ones(len(dem[ndx])))).T
        B = np.vstack((dem**2,dem,np.ones(len(dem)))).T
    elif p==3:
        #  A = np.vstack((dem[ndx]**3,dem[ndx]**2,dem[ndx],np.ones(len(dem[ndx])))).T
        B = np.vstack((dem**3,dem**2,dem,np.ones(len(dem)))).T
    print np.shape(A)
  
    Ainv=np.linalg.pinv(A)
    ###################################################
 

    Bh=[]
    Bv=[]
    Bhrate=[]
    Bvrate=[]
    Be=np.zeros([len(dateList),num_base_par+p+1])  
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    for i in range(1,len(dateList)):
        dset = h5file['timeseries'].get(dateList[i])
        data = dset[0:dset.shape[0],0:dset.shape[1]]
        L = data.flatten(1)
        M=np.hstack((A,B))
        Berror=np.dot(np.linalg.pinv(M[ndx]),L[ndx])
        Bh.append(Berror[0])
        Bhrate.append(Berror[1])
        Bv.append(Berror[2])
        Bvrate.append(Berror[3])
        Be[i,:]=Berror
        print Berror
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%' 
    print 'baseline error           mean                          std'   
    print '       bh     :  ' +str(np.mean(Bh)) + '     ,  '+str(np.std(Bh))
    print '     bh rate  :  ' +str(np.mean(Bhrate)) + '     ,  '+str(np.std(Bhrate))
    print '       bv     :  ' +str(np.mean(Bv)) + '     ,  '+str(np.std(Bv))
    print '     bv rate  :  ' +str(np.mean(Bvrate)) + '     ,  '+str(np.std(Bvrate))
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'       
    # plt.hist(Bh,bins=8,normed=True)
    # formatter = FuncFormatter(to_percent)
    # Set the formatter
    # plt.gca().yaxis.set_major_formatter(formatter)    
    # plt.show()
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    # print 'Estimating Baseline error from each differences ...'

    orbEffect=np.zeros([len(dateList),sy,sx])
    for i in range(1,len(dateList)):
        effect=np.dot(M,Be[i,:])
        effect=np.reshape(effect,[sx,sy]).T
        # orbEffect[i,:,:]=orbEffect[i-1,:,:]+effect     
        # orbEffect[i,:,:]=orbEffect[i,:,:]-orbEffect[i,yref,xref]
        orbEffect[i,:,:]=effect - effect[yref,xref]
        del effect
  
    print 'Correctiing the time series '
    outName=File.replace('.h5','')+'_BaseTropCor.h5'
    h5orbCor=h5py.File(outName,'w')
    group = h5orbCor.create_group('timeseries')
    for i in range(len(dateList)):
        dset1 = h5file['timeseries'].get(dateList[i])
        data = dset1[0:dset1.shape[0],0:dset1.shape[1]] - orbEffect[i,:,:]
        dset = group.create_dataset(dateList[i], data=data, compression='gzip')      
  
    for key,value in h5file['timeseries'].attrs.iteritems():
        group.attrs[key] = value
  
  
    dset1 = h5file['mask'].get('mask')
    group=h5orbCor.create_group('mask')
    dset = group.create_dataset('mask', data=dset1, compression='gzip')
  
    h5file.close()
    h5orbCor.close()
Beispiel #50
0
def main(argv):

    try:
        File = argv[0]
        demFile = argv[1]
        p = int(argv[2])
    except:
        usage()
        sys.exit(1)

    try:
        baseline_error = argv[3]
    except:
        baseline_error = 'range_and_azimuth'
    print baseline_error
    ##################################
    h5file = h5py.File(File)
    dateList = h5file['timeseries'].keys()
    ##################################

    try:
        maskFile = argv[4]
    except:
        if os.path.isfile('Modified_Mask.h5'): maskFile = 'Modified_Mask.h5'
        elif os.path.isfile('Mask.h5'): maskFile = 'Mask.h5'
        else:
            print 'No mask found!'
            sys.exit(1)
    try:
        Mask, Matr = readfile.read(maskFile)
        print 'mask: ' + maskFile
    except:
        print 'Can not open mask file: ' + maskFile
        sys.exit(1)

    #try:
    #  maskFile=argv[4]
    #  h5Mask = h5py.File(maskFile,'r')
    #  kMask=h5Mask.keys()
    #  dset1 = h5Mask[kMask[0]].get(kMask[0])
    #  Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]]
    #except:
    #  dset1 = h5file['mask'].get('mask')
    #  Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]]

    ##################################
    Mask = Mask.flatten(1)
    ndx = Mask != 0
    ##################################
    # h5file = h5py.File(File)
    # dateList = h5file['timeseries'].keys()
    ##################################
    nt = float(h5file['timeseries'].attrs['LOOK_REF1'])
    ft = float(h5file['timeseries'].attrs['LOOK_REF2'])
    sy, sx = np.shape(dset1)
    npixel = sx * sy
    lookangle = np.tile(np.linspace(nt, ft, sx), [sy, 1])
    lookangle = lookangle.flatten(1) * np.pi / 180.0
    Fh = -np.sin(lookangle)
    Fv = -np.cos(lookangle)

    print 'Looking for azimuth pixel size'
    try:
        daz = float(h5file['timeseries'].attrs['AZIMUTH_PIXEL_SIZE'])
    except:
        print '''
        ERROR!
        The attribute AZIMUTH_PIXEL_SIZE was not found!
        Possible cause of error: Geo coordinate.
        This function works only in radar coordinate system.
        '''
        sys.exit(1)

    lines = np.tile(np.arange(0, sy, 1), [1, sx])
    lines = lines.flatten(1)
    rs = lines * daz

    if baseline_error == 'range_and_azimuth':
        A = np.zeros([npixel, 4])

        A[:, 0] = Fh
        A[:, 1] = Fh * rs
        A[:, 2] = Fv
        A[:, 3] = Fv * rs
        num_base_par = 4
    elif baseline_error == 'range':
        A = np.zeros([npixel, 2])

        A[:, 0] = Fh
        A[:, 1] = Fv
        num_base_par = 2

    ###########################################
    yref = int(h5file['timeseries'].attrs['ref_y'])
    xref = int(h5file['timeseries'].attrs['ref_x'])
    ###########################################
    if os.path.basename(demFile).split('.')[1] == 'hgt':
        amp, dem, demRsc = readfile.read_float32(demFile)
    elif os.path.basename(demFile).split('.')[1] == 'dem':
        dem, demRsc = readfile.read_real_int16(demFile)

    dem = dem - dem[yref][xref]
    dem = dem.flatten(1)
    ###################################################
    if p == 1:
        # A=np.vstack((dem[ndx],np.ones(len(dem[ndx])))).T
        B = np.vstack((dem, np.ones(len(dem)))).T
    elif p == 2:
        # A=np.vstack((dem[ndx]**2,dem[ndx],np.ones(len(dem[ndx])))).T
        B = np.vstack((dem**2, dem, np.ones(len(dem)))).T
    elif p == 3:
        #  A = np.vstack((dem[ndx]**3,dem[ndx]**2,dem[ndx],np.ones(len(dem[ndx])))).T
        B = np.vstack((dem**3, dem**2, dem, np.ones(len(dem)))).T
    print np.shape(A)

    Ainv = np.linalg.pinv(A)
    ###################################################

    Bh = []
    Bv = []
    Bhrate = []
    Bvrate = []
    Be = np.zeros([len(dateList), num_base_par + p + 1])
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    for i in range(1, len(dateList)):
        dset = h5file['timeseries'].get(dateList[i])
        data = dset[0:dset.shape[0], 0:dset.shape[1]]
        L = data.flatten(1)
        M = np.hstack((A, B))
        Berror = np.dot(np.linalg.pinv(M[ndx]), L[ndx])
        Bh.append(Berror[0])
        Bhrate.append(Berror[1])
        Bv.append(Berror[2])
        Bvrate.append(Berror[3])
        Be[i, :] = Berror
        print Berror
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    print 'baseline error           mean                          std'
    print '       bh     :  ' + str(np.mean(Bh)) + '     ,  ' + str(np.std(Bh))
    print '     bh rate  :  ' + str(np.mean(Bhrate)) + '     ,  ' + str(
        np.std(Bhrate))
    print '       bv     :  ' + str(np.mean(Bv)) + '     ,  ' + str(np.std(Bv))
    print '     bv rate  :  ' + str(np.mean(Bvrate)) + '     ,  ' + str(
        np.std(Bvrate))
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    # plt.hist(Bh,bins=8,normed=True)
    # formatter = FuncFormatter(to_percent)
    # Set the formatter
    # plt.gca().yaxis.set_major_formatter(formatter)
    # plt.show()
    print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
    # print 'Estimating Baseline error from each differences ...'

    orbEffect = np.zeros([len(dateList), sy, sx])
    for i in range(1, len(dateList)):
        effect = np.dot(M, Be[i, :])
        effect = np.reshape(effect, [sx, sy]).T
        # orbEffect[i,:,:]=orbEffect[i-1,:,:]+effect
        # orbEffect[i,:,:]=orbEffect[i,:,:]-orbEffect[i,yref,xref]
        orbEffect[i, :, :] = effect - effect[yref, xref]
        del effect

    print 'Correctiing the time series '
    outName = File.replace('.h5', '') + '_baseTropCor.h5'
    h5orbCor = h5py.File(outName, 'w')
    group = h5orbCor.create_group('timeseries')
    for i in range(len(dateList)):
        dset1 = h5file['timeseries'].get(dateList[i])
        data = dset1[0:dset1.shape[0], 0:dset1.shape[1]] - orbEffect[i, :, :]
        dset = group.create_dataset(dateList[i], data=data, compression='gzip')

    for key, value in h5file['timeseries'].attrs.iteritems():
        group.attrs[key] = value

    dset1 = h5file['mask'].get('mask')
    group = h5orbCor.create_group('mask')
    dset = group.create_dataset('mask', data=dset1, compression='gzip')

    h5file.close()
    h5orbCor.close()
Beispiel #51
0
def subset_file(File,sub_x,sub_y,outfill=np.nan,outName=''):

    ##### Overlap between subset and data range
    atr = readfile.read_attributes(File)
    width  = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])
    box1 = (0,0,width,length)
    box2 = (sub_x[0],sub_y[0],sub_x[1],sub_y[1])
    idx1,idx2 = box_overlap_index(box1,box2)
    print 'data   range:'
    print box1
    print 'subset range:'
    print box2

    ###########################  Data Read and Write  ######################
    k = atr['FILE_TYPE']
    print 'file type: '+k
    if outName == '':  outName = 'subset_'+os.path.basename(File)

    ##### Multiple Dataset File
    if k in ['timeseries','interferograms','wrapped','coherence']:
        ##### Input File Info
        h5file = h5py.File(File,'r')
        epochList = h5file[k].keys()
        epochList = sorted(epochList)
        print 'number of epochs: '+str(len(epochList))

        ##### Output File Info
        h5out = h5py.File(outName,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+outName

    ## Loop
    if k == 'timeseries':
        for epoch in epochList:
            print epoch
            dset = h5file[k].get(epoch)
            data_overlap = dset[idx1[1]:idx1[3],idx1[0]:idx1[2]]

            data = np.ones((box2[3]-box2[1],box2[2]-box2[0]))*outfill
            data[idx2[1]:idx2[3],idx2[0]:idx2[2]] = data_overlap

            dset = group.create_dataset(epoch, data=data, compression='gzip')

        atr  = subset_attributes(atr,sub_y,sub_x)
        for key,value in atr.iteritems():   group.attrs[key] = value

    elif k in ['interferograms','wrapped','coherence']:
        for epoch in epochList:
            print epoch
            dset = h5file[k][epoch].get(epoch)
            atr  = h5file[k][epoch].attrs
            data_overlap = dset[idx1[1]:idx1[3],idx1[0]:idx1[2]]

            data = np.ones((box2[3]-box2[1],box2[2]-box2[0]))*outfill
            data[idx2[1]:idx2[3],idx2[0]:idx2[2]] = data_overlap

            atr  = subset_attributes(atr,sub_y,sub_x)
            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr.iteritems():    gg.attrs[key] = value

    ##### Single Dataset File
    elif k in ['.jpeg','.jpg','.png','.ras','.bmp']:
        data, atr = readfile.read(File,box2)
        writefile.write(data,atr,outName)

    elif k == '.trans':
        rg_overlap,az_overlap,atr = readfile.read(File,idx1)

        rg = np.ones((box2[3]-box2[1],box2[2]-box2[0]))*outfill
        rg[idx2[1]:idx2[3],idx2[0]:idx2[2]] = rg_overlap

        az = np.ones((box2[3]-box2[1],box2[2]-box2[0]))*outfill
        az[idx2[1]:idx2[3],idx2[0]:idx2[2]] = az_overlap

        atr = subset_attributes(atr,sub_y,sub_x)
        writefile.write(rg,az,atr,outName)
    else:
        data_overlap,atr = readfile.read(File,idx1)

        data = np.ones((box2[3]-box2[1],box2[2]-box2[0]))*outfill
        data[idx2[1]:idx2[3],idx2[0]:idx2[2]] = data_overlap

        atr = subset_attributes(atr,sub_y,sub_x)
        writefile.write(data,atr,outName)

    ##### End Cleaning
    try:
        h5file.close()
        h5out.close()
    except: pass