Beispiel #1
0
def main(argv):
    try:
        File = argv[0]
        atr = readfile.read_attribute(File)
    except:
        usage()
        sys.exit(1)

    try:
        outFile = argv[1]
    except:
        outFile = 'rangeDistance.h5'

    # Calculate look angle
    range_dis = ut.range_distance(atr, dimension=2)

    # Geo coord
    if 'Y_FIRST' in atr.keys():
        print 'Input file is geocoded, only center range distance is calculated: '
        print range_dis
        return range_dis

    # Radar coord
    else:
        print 'writing >>> ' + outFile
        atr['FILE_TYPE'] = 'mask'
        atr['UNIT'] = 'm'
        writefile.write(range_dis, atr, outFile)
        return outFile
Beispiel #2
0
def main(argv):
    try:
        File = argv[0]
        atr = readfile.read_attribute(File)
    except:
        usage()
        sys.exit(1)

    try:
        outFile = argv[1]
    except:
        outFile = 'incidenceAngle.h5'

    # Calculate look angle
    angle = ut.incidence_angle(atr, dimension=2)

    # Geo coord
    if 'Y_FIRST' in atr.keys():
        print 'Input file is geocoded, only center incident angle is calculated: '
        print angle
        return angle

    # Radar coord
    else:
        print 'writing >>> ' + outFile
        atr['FILE_TYPE'] = 'mask'
        atr['UNIT'] = 'degree'
        writefile.write(angle, atr, outFile)
        return outFile
Beispiel #3
0
def main(argv):
    try:
        File = argv[0]
        atr = readfile.read_attribute(File)
    except:
        usage()
        sys.exit(1)

    try:
        outFile = argv[1]
    except:
        outFile = 'incidenceAngle.h5'

    # Calculate look angle
    angle = ut.incidence_angle(atr, dimension=2)

    # Geo coord
    if 'Y_FIRST' in atr.keys():
        print 'Input file is geocoded, only center incident angle is calculated: '
        print angle
        length = int(atr['FILE_LENGTH'])
        width = int(atr['WIDTH'])
        angle_mat = np.zeros((length, width), np.float32)
        angle_mat[:] = angle
        angle = angle_mat

    print 'writing >>> ' + outFile
    atr['FILE_TYPE'] = 'mask'
    atr['UNIT'] = 'degree'
    try:
        atr.pop('ref_date')
    except:
        pass
    writefile.write(angle, atr, outFile)
    return outFile
Beispiel #4
0
def remove_multiple_surface(File, surf_type, Mask, ysub, outName):
    start = time.time()
    ##### Output File Name
    if outName == "":
        ext = os.path.splitext(File)[1].lower()
        outName = os.path.basename(File).split(ext)[0] + "_" + surf_type + ext

    atr = readfile.read_attributes(File)
    k = atr["FILE_TYPE"]
    print "Input file is " + atr["PROCESSOR"] + " " + k

    if k == "timeseries":
        h5file = h5py.File(File, "r")
        ifgramList = h5file[k].keys()
        ifgramList = sorted(ifgramList)
        print "number of epochs: " + str(len(ifgramList))

        h5flat = h5py.File(outName, "w")
        group = h5flat.create_group(k)
        print "writing >>> " + outName

        for ifgram in ifgramList:
            print "Removing " + surf_type + " from " + ifgram
            dataIn = h5file[k].get(ifgram)[:]

            dataOut = remove_data_multiple_surface(dataIn, surf_type, Mask, ysub)

            dset = group.create_dataset(ifgram, data=dataOut, compression="gzip")
        for key, value in h5file[k].attrs.iteritems():
            group.attrs[key] = value

    else:
        try:
            dataIn, atr = readfile.read(File)
        except:
            print "Input file type is not supported: " + atr["FILE_TYPE"]

        dataOut = remove_data_multiple_surface(dataIn, surf_type, Mask, ysub)
        ramp = dataIn - dataOut

        writefile.write(dataOut, atr, outName)
        # atr['FILE_TYPE']='mask'
        # writefile.write(ramp,atr,'2quadratic.h5')

    print "Remove " + surf_type + " took " + str(time.time() - start) + " secs"
Beispiel #5
0
def main(argv):
    try:
        File = argv[0]
        atr = readfile.read_attribute(File)
    except:
        usage();  sys.exit(1)
    
    try:    outFile = argv[1]
    except: outFile = 'incidence_angle.h5'
    
    #print '\n*************** Generate Incidence Angle *****************'
    ##### Calculate look angle
    angle = ut.incidence_angle(atr)
    
    ##### Output
    print 'writing >>> '+outFile
    atr['FILE_TYPE'] = 'mask'
    atr['UNIT'] = 'degree'
    writefile.write(angle, atr, outFile)
Beispiel #6
0
def main(argv):
    try:  opts, args = getopt.getopt(argv,"f:h")
    except getopt.GetoptError:  Usage() ; sys.exit(1)
  
    if  opts==[]:  Usage() ; sys.exit(1)
    for opt,arg in opts:
        if opt in ("-h","--help"):   Usage();  sys.exit()
        elif opt == '-f':            File = arg
    
        ##### Read attributes
        atr = readfile.read_attributes(File)
        print '\n*************** Generate Incidence Angle *****************'
    
        ##### Calculate look angle
        angle = look_angle(atr)
    
        ##### Output
        atr['FILE_TYPE'] = 'mask'
        outName = 'incidence_angle.h5'
        writefile.write(angle,atr,outName)
Beispiel #7
0
def main(argv):
    try:
        File = argv[0]
        atr = readfile.read_attribute(File)
        epoch = argv[1]
    except:
        usage()
        sys.exit(1)

    try:
        outFile = argv[2]
    except:
        outFile = None

    # Calculate look angle
    pbase = ut.perp_baseline_timeseries(atr, dimension=1)

    if pbase.shape[1] == 1:
        print pbase
        return pbase

    k = atr['FILE_TYPE']
    width = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])

    h5 = h5py.File(File, 'r')
    epochList = sorted(h5[k].keys())
    epoch = ptime.yyyymmdd(epoch)
    epoch_idx = epochList.index(epoch)

    pbase_y = pbase[epoch_idx, :].reshape(length, 1)
    pbase_xy = np.tile(pbase_y, (1, width))

    if not outFile:
        outFile = 'perpBaseline_' + epoch + '.h5'

    print 'writing >>> ' + outFile
    atr['FILE_TYPE'] = 'mask'
    atr['UNIT'] = 'm'
    writefile.write(pbase_xy, atr, outFile)
    return outFile
Beispiel #8
0
def main(argv):
    try:
        ifgramFile = argv[0]
        timeseriesFile = argv[1]
    except:
        usage()
        sys.exit()

    temp_coherence = temporal_coherence(timeseriesFile, ifgramFile)

    try:
        tempCohFile = argv[2]
    except:
        tempCohFile = 'temporalCoherence.h5'
    print 'writing >>> ' + tempCohFile

    atr = readfile.read_attribute(timeseriesFile)
    atr['FILE_TYPE'] = 'temporal_coherence'
    atr['UNIT'] = '1'
    writefile.write(temp_coherence, atr, tempCohFile)
    print 'Done.'
    return tempCohFile
Beispiel #9
0
def main(argv):

    method    = 'triangular_consistency'    ## or 'bonding_point'
    ramp_type = 'plane'
    save_rampCor = 'yes'
    plot_bonding_points = 'yes'
  
    ##### Check Inputs
    if len(sys.argv)>2:
        try: opts, args = getopt.getopt(argv,'h:f:m:x:y:o:t:',['ramp=','no-ramp-save'])
        except getopt.GetoptError:  print 'Error while getting args';  Usage(); sys.exit(1)
  
        for opt,arg in opts:
            if   opt in ['-h','--help']:    Usage(); sys.exit()
            elif opt in '-f':    File     = arg
            elif opt in '-m':    maskFile = arg
            elif opt in '-o':    outName  = arg
            elif opt in '-x':    x = [int(i) for i in arg.split(',')];    method = 'bonding_point'
            elif opt in '-y':    y = [int(i) for i in arg.split(',')];    method = 'bonding_point'
            elif opt in '-t':    templateFile = arg
            elif opt in '--ramp'         :  ramp_type    = arg.lower()
            elif opt in '--no-ramp-save' :  save_rampCor = 'no'
  
    elif len(sys.argv)==2:
        if argv[0] in ['-h','--help']:    Usage();  sys.exit()
        elif os.path.isfile(argv[0]):     File = argv[0];  maskFile = argv[1]
        else:    print 'Input file does not existed: '+argv[0];  sys.exit(1)
  
    else:  Usage(); sys.exit(1)
  
    ##### Check template file
    try:
        templateFile
        templateContents = readfile.read_template(templateFile)
    except: pass
  
    try:
        yx = [int(i) for i in templateContents['pysar.unwrapError.yx'].split(',')]
        x = yx[1::2]
        y = yx[0::2]
        method = 'bonding_point'
    except: pass

    ##### Read Mask File 
    ## Priority:
    ## Input mask file > pysar.mask.file > existed Modified_Mask.h5 > existed Mask.h5
    try:       maskFile
    except:
        try:    maskFile = templateContents['pysar.mask.file']
        except:
            if   os.path.isfile('Modified_Mask.h5'):  maskFile = 'Modified_Mask.h5'
            elif os.path.isfile('Mask.h5'):           maskFile = 'Mask.h5'
            else: print 'No mask found!'; sys.exit(1)
    try:    Mask,Matr = readfile.read(maskFile);   print 'mask: '+maskFile
    except: print 'Can not open mask file: '+maskFile; sys.exit(1)
  
    ##### Output file name
    ext = os.path.splitext(File)[1]
    try:    outName
    except: outName = File.split('.')[0]+'_unwCor'+ext
  
    print '\n**************** Unwrapping Error Correction ******************'

    ####################  Triangular Consistency (Phase Closure)  ####################
    if method == 'triangular_consistency':
        print 'Phase unwrapping error correction using Triangular Consistency / Phase Closure'
  
        h5file=h5py.File(File)
        ifgramList = h5file['interferograms'].keys()
        sx = int(h5file['interferograms'][ifgramList[0]].attrs['WIDTH'])
        sy = int(h5file['interferograms'][ifgramList[0]].attrs['FILE_LENGTH'])
        curls,Triangles,C=ut.get_triangles(h5file)
        A,B = ut.design_matrix(h5file)   
        ligram,lv=np.shape(B)
        lcurls=np.shape(curls)[0]
        print 'Number of all triangles: '+  str(lcurls)
        print 'Number of interferograms: '+ str(ligram)
        #print curls
  
        curlfile='curls.h5'
        if not os.path.isfile(curlfile):
            ut.generate_curls(curlfile,h5file,Triangles,curls)
         
        thr=0.50
        curls=np.array(curls);   n1=curls[:,0];   n2=curls[:,1];   n3=curls[:,2]
  
        numPixels=sy*sx
        print 'reading interferograms...'   
        data = np.zeros((ligram,numPixels),np.float32)
        for ni in range(ligram):
            dset=h5file['interferograms'][ifgramList[ni]].get(ifgramList[ni])
            d = dset[0:dset.shape[0],0:dset.shape[1]]
            data[ni] = d.flatten(1)   
  
        print np.shape(data)
        print 'reading curls ...' 
        h5curl=h5py.File(curlfile)
        curlList=h5curl['interferograms'].keys()
        curlData = np.zeros((lcurls,numPixels),np.float32)
        for ni in range(lcurls):
            dset=h5curl['interferograms'][curlList[ni]].get(curlList[ni])
            d = dset[0:dset.shape[0],0:dset.shape[1]]
            curlData[ni] = d.flatten(1)
        pi=np.pi
        EstUnwrap=np.zeros((ligram,numPixels),np.float32)
  
        #try:
        #    maskFile=argv[1]
        #    h5Mask=h5py.File(maskFile)
        #    dset = h5Mask['mask'].get('mask')
        #    Mask=dset[0:dset.shape[0],0:dset.shape[1]]
        #except:
        #    dset = h5file['mask'].get('mask')
        #    Mask=dset[0:dset.shape[0],0:dset.shape[1]]
        
        Mask=Mask.flatten(1)

        from scipy.linalg import pinv as pinv
        for ni in range(numPixels):
            #dU = np.zeros([ligram,1])
            #print np.shape(dU)
            #print np.shape(data[:,ni])
  
            if Mask[ni]==1:
                dU = data[:,ni]
                #nan_ndx = dataPoint == 0.
                unwCurl = np.array(curlData[:,ni])
                #print unwCurl
  
                ind  = np.abs(unwCurl)>=thr;      N1 =n1[ind];      N2 =n2[ind];      N3 =n3[ind]
                indC = np.abs(unwCurl)< thr;      Nc1=n1[indC];     Nc2=n2[indC];     Nc3=n3[indC]
  
                N =np.hstack([N1, N2, N3]);       UniN =np.unique(N)
                Nc=np.hstack([Nc1,Nc2,Nc3]);      UniNc=np.unique(Nc)
  
                inter=list(set(UniNc) & set(UniN)) # intersetion
                UniNc= list(UniNc)
                for x in inter:
                    UniNc.remove(x)
  
                D=np.zeros([len(UniNc),ligram])
                for i in range(len(UniNc)):
                    D[i,UniNc[i]]=1
  
                AAA=np.vstack([-2*pi*C,D])
                #AAA1=np.hstack([AAA,np.zeros([AAA.shape[0],lv])])
                #AAA2=np.hstack([-2*pi*np.eye(ligram),B]) 
                #AAAA=np.vstack([AAA1,AAA2])
                AAAA=np.vstack([AAA,0.25*np.eye(ligram)])
  
                #print '************************'
                #print np.linalg.matrix_rank(C)
                #print np.linalg.matrix_rank(AAA) 
                #print np.linalg.matrix_rank(AAAA)
                #print '************************'
  
                #LLL=list(np.dot(C,dU)) + list(np.zeros(np.shape(UniNc)[0]))# + list(dU)
                #ind=np.isnan(AAA)
                #M1=pinv(AAA)      
                #M=np.dot(M1,LLL)
                #EstUnwrap[:,ni]=np.round(M[0:ligram])*2.0*np.pi
  
                ##########
                # with Tikhonov regularization:
                AAAA=np.vstack([AAA,0.25*np.eye(ligram)])
                LLL=list(np.dot(C,dU)) + list(np.zeros(np.shape(UniNc)[0])) + list(np.zeros(ligram))
                ind=np.isnan(AAAA)
                M1=pinv(AAAA)
                M=np.dot(M1,LLL)
                EstUnwrap[:,ni]=np.round(M[0:ligram])*2.0*np.pi
                #print M[0:ligram]
                #print np.round(M[0:ligram])
  
            else:
                EstUnwrap[:,ni]=np.zeros([ligram])
                if not np.remainder(ni,10000): print 'Processing point: %7d of %7d ' % (ni,numPixels)

        ##### Output
        dataCor = data+EstUnwrap
        unwCorFile=File.replace('.h5','')+'_unwCor.h5';  print 'writing >>> '+unwCorFile
        h5unwCor=h5py.File(unwCorFile,'w') 
        gg = h5unwCor.create_group('interferograms') 
        for i in range(ligram):
            group = gg.create_group(ifgramList[i])
            dset = group.create_dataset(ifgramList[i], data=np.reshape(dataCor[i,:],[sx,sy]).T, compression='gzip')
            for key, value in h5file['interferograms'][ifgramList[i]].attrs.iteritems():
                group.attrs[key] = value
  
        try:
            MASK=h5file['mask'].get('mask')
            gm = h5unwCor.create_group('mask')
            dset = gm.create_dataset('mask', data=MASK, compression='gzip')
        except: pass
  
        h5unwCor.close()
        h5file.close()
        h5curl.close() 


    ####################  Bonding Points (Spatial Continuity)  ####################
    elif method == 'bonding_point':
        print 'Phase unwrapping error correction using Bonding Points / Spatial Continuity'
  
        ##### Read Bridge Points Info
        try:
            x
            y
            if len(x) != len(y) or np.mod(len(x),2) != 0:
                print 'Wrong number of bridge points input: '+str(len(x))+' for x, '+str(len(y))+' for y'
                Usage();  sys.exit(1)
        except: print 'Error in reading bridge points info!';  Usage();  sys.exit(1)
        for i in range(0,len(x)):
            if Mask[y[i],x[i]] == 0:
                print '\nERROR: Connecting point ('+str(y[i])+','+str(x[i])+') is out of masked area! Select them again!\n'
                sys.exit(1)
  
        print 'Number of bonding point pairs: '+str(len(x)/2)
        print 'Bonding points coordinates:\nx: '+str(x)+'\ny: '+str(y)
  
        ## Plot Connecting Pair of Points
        if plot_bonding_points == 'yes':
            point_yx = ''
            line_yx  = ''
            n_bridge = len(x)/2
            for i in range(n_bridge):
                pair_yx = str(y[2*i])+','+str(x[2*i])+','+str(y[2*i+1])+','+str(x[2*i+1])
                if not i == n_bridge-1:
                    point_yx += pair_yx+','
                    line_yx  += pair_yx+';'
                else:
                    point_yx += pair_yx
                    line_yx  += pair_yx

            try:
                plot_cmd = 'view.py --point-yx="'+point_yx+'" --line-yx="'+line_yx+\
                           '" --nodisplay -o bonding_points.png -f '+maskFile
                print plot_cmd
                os.system(plot_cmd)
            except: pass


        ##### Ramp Info
        ramp_mask = Mask==1
        print 'estimate phase ramp during the correction'
        print 'ramp type: '+ramp_type
        if save_rampCor == 'yes':
            outName_ramp = os.path.basename(outName).split(ext)[0]+'_'+ramp_type+ext
  
        ########## PySAR ##########
        if ext == '.h5':
            ##### Read
            try:     h5file=h5py.File(File,'r')
            except:  print 'ERROR: Cannot open input file: '+File; sys.exit(1)
            k=h5file.keys()
            if 'interferograms' in k: k[0] = 'interferograms';  print 'Input file is '+k[0]
            else: print 'Input file - '+File+' - is not interferograms.';  Usage();  sys.exit(1)
            igramList = h5file[k[0]].keys()
            igramList = sorted(igramList)
  
            #### Write
            h5out = h5py.File(outName,'w')
            gg = h5out.create_group(k[0])
            print 'writing >>> '+outName
  
            if save_rampCor == 'yes':
                h5out_ramp = h5py.File(outName_ramp,'w')
                gg_ramp = h5out_ramp.create_group(k[0])
                print 'writing >>> '+outName_ramp
  
            ##### Loop
            print 'Number of interferograms: '+str(len(igramList))
            for igram in igramList:
                print igram
                data = h5file[k[0]][igram].get(igram)[:]
  
                data_ramp,ramp = rm.remove_data_surface(data,ramp_mask,ramp_type)
                #ramp = data_ramp - data
                data_rampCor = phase_bonding(data_ramp,Mask,x,y)
                dataCor = data_rampCor - ramp
  
                group = gg.create_group(igram)
                dset = group.create_dataset(igram, data=dataCor, compression='gzip')
                for key, value in h5file[k[0]][igram].attrs.iteritems():
                    group.attrs[key]=value
  
                if save_rampCor == 'yes':
                    group_ramp = gg_ramp.create_group(igram)
                    dset = group_ramp.create_dataset(igram, data=data_rampCor, compression='gzip')
                    for key, value in h5file[k[0]][igram].attrs.iteritems():
                        group_ramp.attrs[key]=value
  
            try:
                mask = h5file['mask'].get('mask');
                gm = h5out.create_group('mask')
                dset = gm.create_dataset('mask', data=mask[0:mask.shape[0],0:mask.shape[1]], compression='gzip')
            except: print 'no mask group found.'
  
            h5file.close()
            h5out.close()
            if save_rampCor == 'yes':
                h5out_ramp.close()

        ########## ROI_PAC ##########
        elif ext == '.unw':
            print 'Input file is '+ext
            a,data,atr = readfile.read_float32(File);
  
            data_ramp,ramp = rm.remove_data_surface(data,ramp_mask,ramp_type)
            #ramp = data_ramp - data
            data_rampCor = phase_bonding(data_ramp,Mask,x,y)
            dataCor = data_rampCor - ramp
  
            writefile.write(dataCor, atr, outName)
            if save_rampCor == 'yes':
                writefile.write(data_rampCor,atr,outName_ramp)
  
        else: print 'Un-supported file type: '+ext;  Usage();  sys.exit(1)
Beispiel #10
0
def main(argv):

    global method_default
    ##### Referencing methods
    method_default = 'max_coherence'
    #method = 'manual'
    #method = 'max_coherence'        ## Use phase on point with max coherence [default]
    #method = 'global_average'       ## Use Nan Mean of phase on all pixels
    #method = 'random'
    #maskFile = 'Mask.h5'

    global SeedingDone
    
    ############################## Check Inputs ##############################
    if len(sys.argv) > 2:
        try:  opts, args = getopt.getopt(argv,"h:c:f:m:y:x:l:L:t:o:r:",\
                                         ['manual','max-coherence','global-average','random'])
        except getopt.GetoptError:  Usage() ; sys.exit(1)

        for opt,arg in opts:
            if   opt in ("-h","--help"):   Usage();  sys.exit()
            elif opt == '-f':        File     = arg
            elif opt == '-m':        maskFile = arg
            elif opt == '-c':        corFile  = arg
            elif opt == '-o':        outFile  = arg

            elif opt == '-y':        ry       = int(arg)
            elif opt == '-x':        rx       = int(arg)
            elif opt == '-l':        rlat     = float(arg)
            elif opt == '-L':        rlon     = float(arg)
            elif opt == '-r':        refFile  = arg
            elif opt == '-t':        templateFile = arg

            elif opt == '--global-average' :  method = 'global_average'
            elif opt == '--manual'         :  method = 'manual'
            elif opt == '--max-coherence'  :  method = 'max_coherence'
            elif opt == '--random'         :  method = 'random'

    elif len(sys.argv)==2:
        if   argv[0]=='-h':            Usage(); sys.exit(1)
        elif os.path.isfile(argv[0]):  File = argv[0]
        else:  print 'Input file does not existed: '+argv[0];  sys.exit(1)
    elif len(sys.argv)<2:             Usage(); sys.exit(1)

    ##### Input File Info
    try:
        File
        atr = readfile.read_attributes(File)
        k = atr['FILE_TYPE']
        length = int(atr['FILE_LENGTH'])
        width  = int(atr['WIDTH'])
    except:  Usage() ; sys.exit(1)
    ext = os.path.splitext(File)[1].lower()

    try:    outFile
    except: outFile = 'Seeded_'+File
  
    ############################## Reference Point Input ####################
    try:
        refFile
        atr_ref = readfile.read_attributes(refFile)
    except: pass
  
    try:
        templateFile
        templateContents = readfile.read_template(templateFile)
    except: pass

    ### Priority
    ## lat/lon > y/x
    ## Direct Input > Reference File > Template File
    try:
        rlat
        rlon
    except:
        try:
            rlat = float(atr_ref['ref_lat'])
            rlon = float(atr_ref['ref_lon'])
        except:
            try: rlat,rlon = [float(i) for i in templateContents['pysar.seed.lalo'].split(',')]
            except: pass

    try:
        ry
        rx
    except:
        try:
            ry = int(atr_ref['ref_y'])
            rx = int(atr_ref['ref_x'])
        except:
            try: ry,rx       = [int(i)   for i in templateContents['pysar.seed.yx'].split(',')]
            except: pass

    ##### Check lalo / YX
    print '\n************** Reference Point ******************'
    try:
        rlat
        rlon
        y = sub.coord_geo2radar(rlat,atr,'lat')
        x = sub.coord_geo2radar(rlon,atr,'lon')
        0<= x <= width
        0<= y <= length
        rx = x
        ry = y
        print 'Reference point: lat = %.4f,   lon = %.4f'%(rlat,rlon)
        print '                 y   = %d,     x   = %d'%(ry,rx)
    except:
        print 'Skip input lat/lon reference point.'
        print 'Continue with the y/x reference point.'


    ######################### a. Read Mask File #########################
    ## Priority: Input mask file > pysar.mask.file 
    try:     maskFile
    except:
        try: maskFile = templateContents['pysar.mask.file']
        except:  print 'No mask found!';
    try:
        M,Matr = readfile.read(maskFile);
        print 'mask: '+maskFile
    except:
        print '---------------------------------------------------------'
        print 'WARNING: No mask, use the whole area as mask'
        print '---------------------------------------------------------'
        M = np.ones((length,width))

    ## Message
    try:
        rx
        ry
        0<= rx <= width
        0<= ry <= length
        if M[ry,rx] == 0:
            print 'Input point has 0 value in mask.'
    except: pass

    ######################### b. Stack ##################################
    stackFile = os.path.basename(File).split(ext)[0] + '_stack.h5'
    stack_file_exist = 'no'
    try:
        os.path.isfile(stackFile)
        stack,atrStack = readfile.read(stackFile)
        if width == int(atrStack['WIDTH']) and length == int(atrStack['FILE_LENGTH']):
            stack_file_exist = 'yes'
            print 'read stack from file: '+stackFile
    except: pass

    if stack_file_exist == 'no':
        print 'calculating stack of input file ...'
        stack = ut.stacking(File)
        atrStack = atr.copy()
        atrStack['FILE_TYPE'] = 'mask'
        writefile.write(stack,atrStack,stackFile)

    ## Message
    try:
        rx
        ry
        if stack[ry,rx] == 0:
            print 'Input point has nan value in data.'
    except: pass

    stack[M==0] = 0
    if np.nansum(M) == 0.0:
        print '\n*****************************************************'
        print   'ERROR:'
        print   'There is no pixel that has valid phase value in all datasets.' 
        print   'Check the file!'
        print   'Seeding failed'
        sys.exit(1)

    ######################### Check Method ##############################
    try:
        not stack[ry,rx] == 0
        method = 'input_coord'
    except:
        try:    method
        except: method = method_default
        print 'Skip input y/x reference point.'
        print 'Continue with '+method

    #h5file = h5py.File(File)

    ######################### Seeding ###################################
    ##### Sub-function
    def seed_method(method,File,stack,outFile,corFile=''):
        SeedingDone = 'no'
        next_method = method_default
        M = stack != 0

        if   method == 'manual':
            SeedingDone = seed_manual(File,stack,outFile)
            if SeedingDone == 'no':
                next_method = method_default
                print_warning(next_method)

        elif method == 'max_coherence':
            try:    SeedingDone = seed_max_coherence(File,M,outFile,corFile)
            except: SeedingDone = seed_max_coherence(File,M,outFile)
            if SeedingDone == 'no':
                next_method = 'random'
                print_warning(next_method)

        elif method == 'random':
            y,x = random_selection(stack)
            seed_xy(File,x,y,outFile)
            SeedingDone = 'yes'

        elif method == 'global_average':
            print '\n---------------------------------------------------------'
            print 'Automatically Seeding using Global Spatial Average Value '
            print '---------------------------------------------------------'
            print 'Calculating the global spatial average value for each epoch'+\
                  ' of all valid pixels ...'
            box = (0,0,width,length)
            meanList = ut.spatial_mean(File,M,box)
            seed_file(File,outFile,meanList,'','')
            SeedingDone = 'yes'

        return SeedingDone, next_method

    ##### Seeding
    SeedingDone = 'no'

    if method == 'input_coord':
        seed_xy(File,rx,ry,outFile)
        SeedingDone = 'yes'

    else:
        i = 0
        while SeedingDone == 'no' and i < 5:
            try:    SeedingDone,method = seed_method(method,File,stack,outFile,corFile)
            except: SeedingDone,method = seed_method(method,File,stack,outFile)
            i += 1
        if i >= 5:
            print 'ERROR: Seeding failed after more than '+str(i)+' times try ...'
            sys.exit(1)
Beispiel #11
0
def file_operation(fname, operator, operand, fname_out=None):
    '''Mathmathic operation of file'''

    # Basic Info
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    print 'input is '+k+' file: '+fname
    print 'operation: file %s %f' % (operator, operand)

    # default output filename
    if not fname_out:
        if   operator in ['+','plus',  'add',      'addition']:        suffix = 'plus'
        elif operator in ['-','minus', 'substract','substraction']:    suffix = 'minus'
        elif operator in ['*','times', 'multiply', 'multiplication']:  suffix = 'multiply'
        elif operator in ['/','obelus','divide',   'division']:        suffix = 'divide'
        elif operator in ['^','pow','power']:                          suffix = 'pow'
        ext = os.path.splitext(fname)[1]
        fname_out = os.path.splitext(fname)[0]+'_'+suffix+str(operand)+ext

    ##### Multiple Dataset HDF5 File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+fname_out

        if k == 'timeseries':
            print 'number of acquisitions: '+str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_out = data_operation(data, operator, operand)

                dset = group.create_dataset(date, data=data_out, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            for key,value in atr.iteritems():
                group.attrs[key] = value

        elif k in ['interferograms','wrapped','coherence']:
            print 'number of interferograms: '+str(epoch_num)
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_out = data_operation(data, operator, operand)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_out, compression='gzip')
                for key, value in h5[k][ifgram].attrs.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])

        h5.close()
        h5out.close()
        prog_bar.close()

    ##### Duo datasets non-HDF5 File
    elif k in ['.trans']:
        rg, az, atr = readfile.read(fname)
        rg_out = data_operation(rg, operator, operand)
        az_out = data_operation(az, operator, operand)
        print 'writing >>> '+fname_out
        writefile.write(rg_out, az_out, atr, fname_out)

    ##### Single Dataset File
    else:
        data, atr = readfile.read(fname)
        data_out = data_operation(data, operator, operand)
        print 'writing >>> '+fname_out
        writefile.write(data_out, atr, fname_out)

    return fname_out
Beispiel #12
0
def main(argv):
    try:
        File = argv[0]
    except:
        usage()
        sys.exit(1)

    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']

    h5file = h5py.File(File, 'r')
    print '\n************* Output to ROI_PAC format ***************'

    if k == 'velocity':
        dset = h5file['velocity'].get('velocity')
        data = dset[0:dset.shape[0], 0:dset.shape[1]]
        print "converting velocity to a 1 year interferogram."
        wvl = float(h5file[k].attrs['WAVELENGTH'])
        data = (-4 * pi / wvl) * data

        outname = File.split('.')[0] + '.unw'
        writefile.write(data, atr, outname)

    elif k == 'timeseries':
        dateList = h5file['timeseries'].keys()
        ## Input
        if len(sys.argv) == 2:
            print 'No input date specified >>> continue with the last date'
            dateList = h5file['timeseries'].keys()
            d = dateList[-1]
        elif len(sys.argv) == 3:
            d = sys.argv[2]
        elif len(sys.argv) == 4:
            ds = sorted(sys.argv[2:4])
            d_ref = ds[0]
            d = ds[1]
        else:
            usage()
            sys.exit(1)
        d = ptime.yyyymmdd(d)
        try:
            d_ref = ptime.yyyymmdd(d_ref)
        except:
            pass

        ## Data
        print 'reading ' + d + ' ... '
        data = h5file['timeseries'].get(d)[:]
        try:
            print 'reading ' + d_ref + ' ... '
            data_ref = h5file['timeseries'].get(d_ref)[:]
            data = data - data_ref
        except:
            pass
        wvl = float(atr['WAVELENGTH'])
        data *= -4 * pi / wvl

        ## outName
        try:
            master_d = d_ref
        except:
            try:
                master_d = atr['ref_date']
            except:
                master_d = atr['DATE']
        if len(master_d) == 8: master_d = master_d[2:8]
        if len(d) == 8: d = d[2:8]
        outname = master_d + '_' + d + '.unw'

        ## Attributes
        atr['FILE_TYPE'] = '.unw'
        atr['P_BASELINE_TIMESERIES'] = '0.0'
        atr['UNIT'] = 'radian'
        atr['DATE'] = master_d
        atr['DATE12'] = master_d + '-' + d

        ## Writing
        writefile.write(data, atr, outname)

    elif k in ['interferograms', 'coherence', 'wrapped']:
        ## Check input
        igramList = h5file[k].keys()
        try:
            d = sys.argv[2]
            for i in range(len(igramList)):
                if d in igramList[i]:
                    igram = igramList[i]
        except:
            igram = igramList[-1]
            print 'No input date specified >>> continue with the last date'
        ## Read and Write
        print 'reading ' + igram + ' ... '
        data = h5file[k][igram].get(igram)[:]
        atr = h5file[k][igram].attrs
        outname = igram

        print 'writing >>> ' + outname
        writefile.write(data, atr, outname)

    else:
        dset = h5file[k].get(k)
        data = dset[0:dset.shape[0], 0:dset.shape[1]]
        if k == 'temporal_coherence': outname = File.split('.')[0] + '.cor'
        else: outname = File.split('.')[0] + '.unw'

        writefile.write(data, atr, outname)

    h5file.close()
    return
Beispiel #13
0
def main(argv):

    method = 'triangular_consistency'  ## or 'bonding_point'
    ramp_type = 'plane'
    save_rampCor = 'yes'
    plot_bonding_points = 'yes'

    ##### Check Inputs
    if len(sys.argv) > 2:
        try:
            opts, args = getopt.getopt(argv, 'h:f:m:x:y:o:t:',
                                       ['ramp=', 'no-ramp-save'])
        except getopt.GetoptError:
            print 'Error while getting args'
            usage()
            sys.exit(1)

        for opt, arg in opts:
            if opt in ['-h', '--help']:
                usage()
                sys.exit()
            elif opt in '-f':
                File = arg
            elif opt in '-m':
                maskFile = arg
            elif opt in '-o':
                outName = arg
            elif opt in '-x':
                x = [int(i) for i in arg.split(',')]
                method = 'bonding_point'
            elif opt in '-y':
                y = [int(i) for i in arg.split(',')]
                method = 'bonding_point'
            elif opt in '-t':
                templateFile = arg
            elif opt in '--ramp':
                ramp_type = arg.lower()
            elif opt in '--no-ramp-save':
                save_rampCor = 'no'

    elif len(sys.argv) == 2:
        if argv[0] in ['-h', '--help']:
            usage()
            sys.exit()
        elif os.path.isfile(argv[0]):
            File = argv[0]
            maskFile = argv[1]
        else:
            print 'Input file does not existed: ' + argv[0]
            sys.exit(1)

    else:
        usage()
        sys.exit(1)

    ##### Check template file
    try:
        templateFile
        templateContents = readfile.read_template(templateFile)
    except:
        pass

    try:
        yx = [
            int(i) for i in templateContents['pysar.unwrapError.yx'].split(',')
        ]
        x = yx[1::2]
        y = yx[0::2]
        method = 'bonding_point'
    except:
        pass

    ##### Read Mask File
    ## Priority:
    ## Input mask file > pysar.mask.file > existed Modified_Mask.h5 > existed Mask.h5
    try:
        maskFile
    except:
        try:
            maskFile = templateContents['pysar.mask.file']
        except:
            if os.path.isfile('Modified_Mask.h5'):
                maskFile = 'Modified_Mask.h5'
            elif os.path.isfile('Mask.h5'):
                maskFile = 'Mask.h5'
            else:
                print 'No mask found!'
                sys.exit(1)
    try:
        Mask, Matr = readfile.read(maskFile)
        print 'mask: ' + maskFile
    except:
        print 'Can not open mask file: ' + maskFile
        sys.exit(1)

    ##### Output file name
    ext = os.path.splitext(File)[1]
    try:
        outName
    except:
        outName = File.split('.')[0] + '_unwCor' + ext

    print '\n**************** Unwrapping Error Correction ******************'

    ####################  Triangular Consistency (Phase Closure)  ####################
    if method == 'triangular_consistency':
        print 'Phase unwrapping error correction using Triangular Consistency / Phase Closure'

        h5file = h5py.File(File)
        ifgramList = h5file['interferograms'].keys()
        sx = int(h5file['interferograms'][ifgramList[0]].attrs['WIDTH'])
        sy = int(h5file['interferograms'][ifgramList[0]].attrs['FILE_LENGTH'])
        curls, Triangles, C = ut.get_triangles(h5file)
        A, B = ut.design_matrix(h5file)
        ligram, lv = np.shape(B)
        lcurls = np.shape(curls)[0]
        print 'Number of all triangles: ' + str(lcurls)
        print 'Number of interferograms: ' + str(ligram)
        #print curls

        curlfile = 'curls.h5'
        if not os.path.isfile(curlfile):
            ut.generate_curls(curlfile, h5file, Triangles, curls)

        thr = 0.50
        curls = np.array(curls)
        n1 = curls[:, 0]
        n2 = curls[:, 1]
        n3 = curls[:, 2]

        numPixels = sy * sx
        print 'reading interferograms...'
        data = np.zeros((ligram, numPixels), np.float32)
        for ni in range(ligram):
            dset = h5file['interferograms'][ifgramList[ni]].get(ifgramList[ni])
            d = dset[0:dset.shape[0], 0:dset.shape[1]]
            data[ni] = d.flatten(1)

        print np.shape(data)
        print 'reading curls ...'
        h5curl = h5py.File(curlfile)
        curlList = h5curl['interferograms'].keys()
        curlData = np.zeros((lcurls, numPixels), np.float32)
        for ni in range(lcurls):
            dset = h5curl['interferograms'][curlList[ni]].get(curlList[ni])
            d = dset[0:dset.shape[0], 0:dset.shape[1]]
            curlData[ni] = d.flatten(1)
        pi = np.pi
        EstUnwrap = np.zeros((ligram, numPixels), np.float32)

        #try:
        #    maskFile=argv[1]
        #    h5Mask=h5py.File(maskFile)
        #    dset = h5Mask['mask'].get('mask')
        #    Mask=dset[0:dset.shape[0],0:dset.shape[1]]
        #except:
        #    dset = h5file['mask'].get('mask')
        #    Mask=dset[0:dset.shape[0],0:dset.shape[1]]

        Mask = Mask.flatten(1)

        for ni in range(numPixels):
            #dU = np.zeros([ligram,1])
            #print np.shape(dU)
            #print np.shape(data[:,ni])

            if Mask[ni] == 1:
                dU = data[:, ni]
                #nan_ndx = dataPoint == 0.
                unwCurl = np.array(curlData[:, ni])
                #print unwCurl

                ind = np.abs(unwCurl) >= thr
                N1 = n1[ind]
                N2 = n2[ind]
                N3 = n3[ind]
                indC = np.abs(unwCurl) < thr
                Nc1 = n1[indC]
                Nc2 = n2[indC]
                Nc3 = n3[indC]

                N = np.hstack([N1, N2, N3])
                UniN = np.unique(N)
                Nc = np.hstack([Nc1, Nc2, Nc3])
                UniNc = np.unique(Nc)

                inter = list(set(UniNc) & set(UniN))  # intersetion
                UniNc = list(UniNc)
                for x in inter:
                    UniNc.remove(x)

                D = np.zeros([len(UniNc), ligram])
                for i in range(len(UniNc)):
                    D[i, UniNc[i]] = 1

                AAA = np.vstack([-2 * pi * C, D])
                #AAA1=np.hstack([AAA,np.zeros([AAA.shape[0],lv])])
                #AAA2=np.hstack([-2*pi*np.eye(ligram),B])
                #AAAA=np.vstack([AAA1,AAA2])
                AAAA = np.vstack([AAA, 0.25 * np.eye(ligram)])

                #print '************************'
                #print np.linalg.matrix_rank(C)
                #print np.linalg.matrix_rank(AAA)
                #print np.linalg.matrix_rank(AAAA)
                #print '************************'

                #LLL=list(np.dot(C,dU)) + list(np.zeros(np.shape(UniNc)[0]))# + list(dU)
                #ind=np.isnan(AAA)
                #M1=pinv(AAA)
                #M=np.dot(M1,LLL)
                #EstUnwrap[:,ni]=np.round(M[0:ligram])*2.0*np.pi

                ##########
                # with Tikhonov regularization:
                AAAA = np.vstack([AAA, 0.25 * np.eye(ligram)])
                LLL = list(np.dot(C, dU)) + list(np.zeros(
                    np.shape(UniNc)[0])) + list(np.zeros(ligram))
                ind = np.isnan(AAAA)
                M1 = pinv(AAAA)
                M = np.dot(M1, LLL)
                EstUnwrap[:, ni] = np.round(M[0:ligram]) * 2.0 * np.pi
                #print M[0:ligram]
                #print np.round(M[0:ligram])

            else:
                EstUnwrap[:, ni] = np.zeros([ligram])
                if not np.remainder(ni, 10000):
                    print 'Processing point: %7d of %7d ' % (ni, numPixels)

        ##### Output
        dataCor = data + EstUnwrap
        unwCorFile = File.replace('.h5', '') + '_unwCor.h5'
        print 'writing >>> ' + unwCorFile
        h5unwCor = h5py.File(unwCorFile, 'w')
        gg = h5unwCor.create_group('interferograms')
        for i in range(ligram):
            group = gg.create_group(ifgramList[i])
            dset = group.create_dataset(ifgramList[i],
                                        data=np.reshape(
                                            dataCor[i, :], [sx, sy]).T,
                                        compression='gzip')
            for key, value in h5file['interferograms'][
                    ifgramList[i]].attrs.iteritems():
                group.attrs[key] = value

        try:
            MASK = h5file['mask'].get('mask')
            gm = h5unwCor.create_group('mask')
            dset = gm.create_dataset('mask', data=MASK, compression='gzip')
        except:
            pass

        h5unwCor.close()
        h5file.close()
        h5curl.close()

    ####################  Bonding Points (Spatial Continuity)  ####################
    elif method == 'bonding_point':
        print 'Phase unwrapping error correction using Bonding Points / Spatial Continuity'

        ##### Read Bridge Points Info
        try:
            x
            y
            if len(x) != len(y) or np.mod(len(x), 2) != 0:
                print 'Wrong number of bridge points input: ' + str(
                    len(x)) + ' for x, ' + str(len(y)) + ' for y'
                usage()
                sys.exit(1)
        except:
            print 'Error in reading bridge points info!'
            usage()
            sys.exit(1)
        for i in range(0, len(x)):
            if Mask[y[i], x[i]] == 0:
                print '\nERROR: Connecting point (' + str(y[i]) + ',' + str(
                    x[i]) + ') is out of masked area! Select them again!\n'
                sys.exit(1)

        print 'Number of bonding point pairs: ' + str(len(x) / 2)
        print 'Bonding points coordinates:\nx: ' + str(x) + '\ny: ' + str(y)

        ## Plot Connecting Pair of Points
        if plot_bonding_points == 'yes':
            point_yx = ''
            line_yx = ''
            n_bridge = len(x) / 2
            for i in range(n_bridge):
                pair_yx = str(y[2 * i]) + ',' + str(x[2 * i]) + ',' + str(
                    y[2 * i + 1]) + ',' + str(x[2 * i + 1])
                if not i == n_bridge - 1:
                    point_yx += pair_yx + ','
                    line_yx += pair_yx + ';'
                else:
                    point_yx += pair_yx
                    line_yx += pair_yx

            try:
                plot_cmd = 'view.py --point-yx="'+point_yx+'" --line-yx="'+line_yx+\
                           '" --nodisplay -o bonding_points.png -f '+maskFile
                print plot_cmd
                os.system(plot_cmd)
            except:
                pass

        ##### Ramp Info
        ramp_mask = Mask == 1
        print 'estimate phase ramp during the correction'
        print 'ramp type: ' + ramp_type
        if save_rampCor == 'yes':
            outName_ramp = os.path.basename(outName).split(
                ext)[0] + '_' + ramp_type + ext

        ########## PySAR ##########
        if ext == '.h5':
            ##### Read
            try:
                h5file = h5py.File(File, 'r')
            except:
                print 'ERROR: Cannot open input file: ' + File
                sys.exit(1)
            k = h5file.keys()
            if 'interferograms' in k:
                k[0] = 'interferograms'
                print 'Input file is ' + k[0]
            else:
                print 'Input file - ' + File + ' - is not interferograms.'
                usage()
                sys.exit(1)
            igramList = sorted(h5file[k[0]].keys())

            #### Write
            h5out = h5py.File(outName, 'w')
            gg = h5out.create_group(k[0])
            print 'writing >>> ' + outName

            if save_rampCor == 'yes':
                h5out_ramp = h5py.File(outName_ramp, 'w')
                gg_ramp = h5out_ramp.create_group(k[0])
                print 'writing >>> ' + outName_ramp

            ##### Loop
            print 'Number of interferograms: ' + str(len(igramList))
            for igram in igramList:
                print igram
                data = h5file[k[0]][igram].get(igram)[:]

                data_ramp, ramp = rm.remove_data_surface(
                    data, ramp_mask, ramp_type)
                #ramp = data_ramp - data
                data_rampCor = phase_bonding(data_ramp, Mask, x, y)
                dataCor = data_rampCor - ramp

                group = gg.create_group(igram)
                dset = group.create_dataset(igram,
                                            data=dataCor,
                                            compression='gzip')
                for key, value in h5file[k[0]][igram].attrs.iteritems():
                    group.attrs[key] = value

                if save_rampCor == 'yes':
                    group_ramp = gg_ramp.create_group(igram)
                    dset = group_ramp.create_dataset(igram,
                                                     data=data_rampCor,
                                                     compression='gzip')
                    for key, value in h5file[k[0]][igram].attrs.iteritems():
                        group_ramp.attrs[key] = value

            try:
                mask = h5file['mask'].get('mask')
                gm = h5out.create_group('mask')
                dset = gm.create_dataset('mask',
                                         data=mask[0:mask.shape[0],
                                                   0:mask.shape[1]],
                                         compression='gzip')
            except:
                print 'no mask group found.'

            h5file.close()
            h5out.close()
            if save_rampCor == 'yes':
                h5out_ramp.close()

        ########## ROI_PAC ##########
        elif ext == '.unw':
            print 'Input file is ' + ext
            a, data, atr = readfile.read_float32(File)

            data_ramp, ramp = rm.remove_data_surface(data, ramp_mask,
                                                     ramp_type)
            #ramp = data_ramp - data
            data_rampCor = phase_bonding(data_ramp, Mask, x, y)
            dataCor = data_rampCor - ramp

            writefile.write(dataCor, atr, outName)
            if save_rampCor == 'yes':
                writefile.write(data_rampCor, atr, outName_ramp)

        else:
            print 'Un-supported file type: ' + ext
            usage()
            sys.exit(1)
Beispiel #14
0
def main(argv):

    try:
        file=argv[0]
        geomap=argv[1]
    except:
        Usage();sys.exit(1)
 
    ######################################################################################
    fileName=os.path.basename(file).split('.')[0]
    h5file=h5py.File(file,'r')
    atr = readfile.read_attributes(file)
    k = atr['FILE_TYPE']
    print '\n***************** Geocoding *******************'
    print 'input file: '+k
 
    #### Subsetted radar coded file
    try:
        x0 = float(atr['subset_x0'])
        y0 = float(atr['subset_y0'])
        print '\nSubsetted radar coded file:\n    creating temporary geomap file for it...'
        rg,az,rsc = readfile.read_float32(geomap)
        rg = rg - x0
        az = az - y0
        geomap = 'temp_'+geomap
        print '    writing '+geomap+'\n'
        writefile.write_float32(rg,az,geomap)
        fg = open(geomap+'.rsc','w')
        for kg in rsc.keys():    fg.write(kg+'    '+rsc[kg]+'\n')
        fg.close()
    except: pass


    ######################################################################################
    if k in ['timeseries']:
        outname='epoch_temp.unw'
 
        f = h5py.File('geo_'+file,'w')
        group = f.create_group('timeseries')
        epochList = h5file['timeseries'].keys()
        epochList = sorted(epochList)
        for epoch in epochList:
            print 'geocoding '+epoch
            data = h5file['timeseries'].get(epoch)[:]
 
            amp,unw,unwrsc = geocode_one(data,geomap,outname)
            dset = group.create_dataset(epoch, data=unw, compression='gzip')
 
        atr = geocode_attributes(atr,unwrsc)
        for key,value in atr.iteritems():
            group.attrs[key] = value

    ######################################################################################
    elif k in ['interferograms','coherence','wrapped']:
        if   k == 'interferograms': outname = k[0]+'_temp.unw'
        elif k == 'coherence'     : outname = k[0]+'_temp.cor'
        else:                       outname = k[0]+'_temp.int'
 
        f = h5py.File('geo_'+file,'w')
        gg = f.create_group('interferograms')
        igramList = h5file[k].keys()
        igramList = sorted(igramList)
        for igram in igramList:
            print 'geocoding '+igram
            data = h5file[k][igram].get(igram)[:]
 
            amp,unw,unwrsc = geocode_one(data,geomap,outname)
 
            group = gg.create_group('geo_'+igram)
            dset = group.create_dataset('geo_'+igram, data=unw, compression='gzip')
 
            atr = geocode_attributes(h5file[k][igram].attrs, unwrsc)
            for key,value in atr.iteritems():
                group.attrs[key] = value
 
        #######################  support of old format  #######################
        ### mask
        try:
            data = h5file['mask'].get('mask')[:]
            amp,unw,unwrsc = geocode_one(data,geomap,'mask_'+outname)
            gm = f.create_group('mask')
            dset = gm.create_dataset('mask', data=unw, compression='gzip')
        except:  print 'No group for mask found in the file.'
        ### meanCoherence
        try:
            data = h5file['meanCoherence'].get('meanCoherence')[:]
            amp,unw,unwrsc = geocode_one(data,geomap,'meanCoherence_'+outname)
            gm = f.create_group('meanCoherence')
            dset = gm.create_dataset('meanCoherence', data=unw, compression='gzip')
        except:  print 'No group for meanCoherence found in the file'

    ######################################################################################
    else:
        data,atr = readfile.read(file)
        outname=fileName+'.unw'
 
        amp,unw,unwrsc = geocode_one(data,geomap,outname)
        atr = geocode_attributes(atr,unwrsc)
 
        writefile.write(unw,atr,'geo_'+file)
 
 
    ######################################################################################
    try:
        atr['subset_x0']
        rmCmd='rm '+geomap;            os.system(rmCmd);       print rmCmd
        rmCmd='rm '+geomap+'.rsc';     os.system(rmCmd);       print rmCmd
    except: pass
 
    try:
        f.close()
        h5file.close()
    except: pass
Beispiel #15
0
def geocode_file_with_geo_lut(fname,
                              lut_file=None,
                              method='nearest',
                              fill_value=np.nan,
                              fname_out=None):
    '''Geocode file using ROI_PAC/Gamma lookup table file.
    Related module: scipy.interpolate.RegularGridInterpolator

    Inputs:
        fname      : string, file to be geocoded
        lut_file   : string, optional, lookup table file genereated by ROIPAC or Gamma
                     i.e. geomap_4rlks.trans           from ROI_PAC
                          sim_150911-150922.UTM_TO_RDC from Gamma
        method     : string, optional, interpolation/resampling method, supporting nearest, linear
        fill_value : value used for points outside of the interpolation domain.
                     If None, values outside the domain are extrapolated.
        fname_out  : string, optional, output geocoded filename
    Output:
        fname_out  : string, optional, output geocoded filename
    '''

    start = time.time()
    ## Default Inputs and outputs
    if not fname_out:
        fname_out = 'geo_' + fname

    # Default lookup table file:
    atr_rdr = readfile.read_attribute(fname)
    if not lut_file:
        if atr_rdr['INSAR_PROCESSOR'] == 'roipac':
            lut_file = ['geomap*lks_tight.trans', 'geomap*lks.trans']
        elif atr_rdr['INSAR_PROCESSOR'] == 'gamma':
            lut_file = ['sim*_tight.UTM_TO_RDC', 'sim*.UTM_TO_RDC']

    try:
        lut_file = ut.get_file_list(lut_file)[0]
    except:
        lut_file = None
    if not lut_file:
        sys.exit(
            'ERROR: No lookup table file found! Can not geocoded without it.')

    ## Original coordinates: row/column number in radar file
    print '------------------------------------------------------'
    print 'geocoding file: ' + fname
    len_rdr = int(atr_rdr['FILE_LENGTH'])
    wid_rdr = int(atr_rdr['WIDTH'])
    pts_rdr = (np.arange(len_rdr), np.arange(wid_rdr))

    ## New coordinates: data value in lookup table
    print 'reading lookup table file: ' + lut_file
    rg, az, atr_lut = readfile.read(lut_file)
    len_geo = int(atr_lut['FILE_LENGTH'])
    wid_geo = int(atr_lut['WIDTH'])

    # adjustment if input radar file has been subseted.
    if 'subset_x0' in atr_rdr.keys():
        x0 = float(atr_rdr['subset_x0'])
        y0 = float(atr_rdr['subset_y0'])
        rg -= x0
        az -= y0
        print '\tinput radar coord file has been subsetted, adjust lookup table value'

    # extract pixels only available in radar file (get ride of invalid corners)
    idx = (az > 0.0) * (az <= len_rdr) * (rg > 0.0) * (rg <= wid_rdr)
    pts_geo = np.hstack((az[idx].reshape(-1, 1), rg[idx].reshape(-1, 1)))
    del az, rg

    print 'geocoding using scipy.interpolate.RegularGridInterpolator ...'
    data_geo = np.empty((len_geo, wid_geo)) * fill_value
    k = atr_rdr['FILE_TYPE']
    ##### Multiple Dataset File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(fname, 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)
        print 'writing >>> ' + fname_out

        if k == 'timeseries':
            print 'number of acquisitions: ' + str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]
                RGI_func = RGI(pts_rdr,
                               data,
                               method,
                               bounds_error=False,
                               fill_value=fill_value)

                data_geo.fill(fill_value)
                data_geo[idx] = RGI_func(pts_geo)

                dset = group.create_dataset(date,
                                            data=data_geo,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=date)
            prog_bar.close()

            print 'update attributes'
            atr = geocode_attribute_with_geo_lut(atr_rdr, atr_lut)
            for key, value in atr.iteritems():
                group.attrs[key] = value

        elif k in ['interferograms', 'wrapped', 'coherence']:
            print 'number of interferograms: ' + str(epoch_num)
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]
                RGI_func = RGI(pts_rdr,
                               data,
                               method,
                               bounds_error=False,
                               fill_value=fill_value)

                data_geo.fill(fill_value)
                data_geo[idx] = RGI_func(pts_geo)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram,
                                         data=data_geo,
                                         compression='gzip')

                atr = geocode_attribute_with_geo_lut(h5[k][ifgram].attrs,
                                                     atr_lut,
                                                     print_msg=False)
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print 'reading ' + fname
        data = readfile.read(fname)[0]
        RGI_func = RGI(pts_rdr,
                       data,
                       method,
                       bounds_error=False,
                       fill_value=fill_value)

        data_geo.fill(fill_value)
        data_geo[idx] = RGI_func(pts_geo)

        print 'update attributes'
        atr = geocode_attribute_with_geo_lut(atr_rdr, atr_lut)

        print 'writing >>> ' + fname_out
        writefile.write(data_geo, atr, fname_out)

    del data_geo
    s = time.time() - start
    m, s = divmod(s, 60)
    h, m = divmod(m, 60)
    print 'Time used: %02d hours %02d mins %02d secs' % (h, m, s)
    return fname_out
Beispiel #16
0
def main(argv):
    inps = cmdLineParse()

    #print '\n********** Inversion: Time Series to Velocity ***********'
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    print 'input ' + k + ' file: ' + inps.timeseries_file
    if not k == 'timeseries':
        sys.exit('ERROR: input file is not timeseries!')
    h5file = h5py.File(inps.timeseries_file)

    #####################################
    ## Date Info
    dateListAll = sorted(h5file[k].keys())
    print '--------------------------------------------'
    print 'Dates from input file: ' + str(len(dateListAll))
    print dateListAll

    inps.ex_date = get_exclude_date(inps, dateListAll)

    dateList = sorted(list(set(dateListAll) - set(inps.ex_date)))
    print '--------------------------------------------'
    if len(dateList) == len(dateListAll):
        print 'using all dates to calculate the velocity'
    else:
        print 'Dates used to estimate the velocity: ' + str(len(dateList))
        print dateList
    print '--------------------------------------------'

    # Date Aux Info
    dates, datevector = ptime.date_list2vector(dateList)

    #####################################
    ## Inversion
    # Design matrix
    B = np.ones([len(datevector), 2])
    B[:, 0] = datevector
    #B_inv = np.linalg.pinv(B)
    B_inv = np.dot(np.linalg.inv(np.dot(B.T, B)), B.T)
    B_inv = np.array(B_inv, np.float32)

    # Loading timeseries
    print "Loading time series file: " + inps.timeseries_file + ' ...'
    width = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])
    dateNum = len(dateList)
    timeseries = np.zeros([dateNum, length * width], np.float32)
    prog_bar = ptime.progress_bar(maxValue=dateNum, prefix='loading: ')
    for i in range(dateNum):
        date = dateList[i]
        timeseries[i, :] = h5file[k].get(date)[:].flatten()
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    h5file.close()

    # Velocity Inversion
    print 'Calculating velocity ...'
    X = np.dot(B_inv, timeseries)
    velocity = np.reshape(X[0, :], [length, width])

    print 'Calculating rmse ...'
    timeseries_linear = np.dot(B, X)
    timeseries_residual = timeseries - timeseries_linear
    rmse = np.reshape(np.sqrt((np.sum((timeseries_residual)**2, 0)) / dateNum),
                      [length, width])

    print 'Calculating the standard deviation of the estimated velocity ...'
    s1 = np.sqrt(np.sum(timeseries_residual**2, 0) / (dateNum - 2))
    s2 = np.sqrt(np.sum((datevector - np.mean(datevector))**2))
    std = np.reshape(s1 / s2, [length, width])

    # SSt=np.sum((timeseries-np.mean(timeseries,0))**2,0)
    # SSres=np.sum(residual**2,0)
    # SS_REG=SSt-SSres
    # Rsquared=np.reshape(SS_REG/SSt,[length,width])
    ######################################################
    # covariance of the velocities

    #####################################
    # Output file name
    if not inps.outfile:
        inps.outfile = 'velocity.h5'

    inps.outfile_rmse = os.path.splitext(
        inps.outfile)[0] + 'Rmse' + os.path.splitext(inps.outfile)[1]
    inps.outfile_std = os.path.splitext(
        inps.outfile)[0] + 'Std' + os.path.splitext(inps.outfile)[1]
    inps.outfile_r2 = os.path.splitext(
        inps.outfile)[0] + 'R2' + os.path.splitext(inps.outfile)[1]

    # Attributes
    atr['date1'] = datevector[0]
    atr['date2'] = datevector[dateNum - 1]

    # File Writing
    print '--------------------------------------'
    atr['FILE_TYPE'] = 'velocity'
    print 'writing >>> ' + inps.outfile
    writefile.write(velocity, atr, inps.outfile)

    #atr['FILE_TYPE'] = 'rmse'
    print 'writing >>> ' + inps.outfile_rmse
    writefile.write(rmse, atr, inps.outfile_rmse)

    #atr['FILE_TYPE'] = 'rmse'
    print 'writing >>> ' + inps.outfile_std
    writefile.write(std, atr, inps.outfile_std)

    print 'Done.\n'
    return inps.outfile
Beispiel #17
0
def diff_file(file1, file2, outName=None, force=False):
    '''Subtraction/difference of two input files'''
    if not outName:
        outName = os.path.splitext(file1)[0]+'_diff_'+os.path.splitext(os.path.basename(file2))[0]+\
                  os.path.splitext(file1)[1]

    print file1 + ' - ' + file2
    # Read basic info
    atr = readfile.read_attribute(file1)
    print 'Input first file is ' + atr['PROCESSOR'] + ' ' + atr['FILE_TYPE']
    k = atr['FILE_TYPE']

    # Multi-dataset/group file
    if k in ['timeseries', 'interferograms', 'coherence', 'wrapped']:
        # Check input files type for multi_dataset/group files
        atr2 = readfile.read_attribute(file2)
        k2 = atr2['FILE_TYPE']

        h5_1 = h5py.File(file1)
        h5_2 = h5py.File(file2)
        epochList = sorted(h5_1[k].keys())
        epochList2 = sorted(h5_2[k2].keys())
        if not all(i in epochList2 for i in epochList):
            print 'ERROR: ' + file2 + ' does not contain all group of ' + file1
            if force and k in ['timeseries']:
                print 'Continue and enforce the differencing for their shared dates only!'
            else:
                sys.exit(1)

        h5out = h5py.File(outName, 'w')
        group = h5out.create_group(k)
        print 'writing >>> ' + outName

        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

    if k in ['timeseries']:
        print 'number of acquisitions: ' + str(len(epochList))
        # check reference date
        if atr['ref_date'] == atr2['ref_date']:
            ref_date = None
        else:
            ref_date = atr['ref_date']
            data2_ref = h5_2[k2].get(ref_date)[:]
            print 'consider different reference date'
        # check reference pixel
        ref_y = int(atr['ref_y'])
        ref_x = int(atr['ref_x'])
        if ref_y == int(atr2['ref_y']) and ref_x == int(atr2['ref_x']):
            ref_y = None
            ref_x = None
        else:
            print 'consider different reference pixel'

        # calculate difference in loop
        for i in range(epoch_num):
            date = epochList[i]
            data1 = h5_1[k].get(date)[:]
            try:
                data2 = h5_2[k2].get(date)[:]
                if ref_date:
                    data2 -= data2_ref
                if ref_x and ref_y:
                    data2 -= data2[ref_y, ref_x]
                data = diff_data(data1, data2)
            except:
                data = data1
            dset = group.create_dataset(date, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=date)
        for key, value in atr.iteritems():
            group.attrs[key] = value

        prog_bar.close()
        h5out.close()
        h5_1.close()
        h5_2.close()

    elif k in ['interferograms', 'coherence', 'wrapped']:
        print 'number of interferograms: ' + str(len(epochList))
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epoch_num):
            epoch1 = epochList[i]
            epoch2 = epochList2[i]
            data1 = h5_1[k][epoch1].get(epoch1)[:]
            data2 = h5_2[k2][epoch2].get(epoch2)[:]
            data = diff_data(data1, data2)
            gg = group.create_group(epoch1)
            dset = gg.create_dataset(epoch1, data=data, compression='gzip')
            for key, value in h5_1[k][epoch1].attrs.iteritems():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

        prog_bar.close()
        h5out.close()
        h5_1.close()
        h5_2.close()

    # Sing dataset file
    else:
        data1, atr1 = readfile.read(file1)
        data2, atr2 = readfile.read(file2)
        data = diff_data(data1, data2)
        print 'writing >>> ' + outName
        writefile.write(data, atr1, outName)

    return outName
Beispiel #18
0
def main(argv):

    outName = 'mask.h5'
    method  = 'threshold'

    ##### Check Inputs
    if len(sys.argv)>2:
        try:   opts, args = getopt.getopt(argv,'h:f:m:M:x:y:o:d:e:',['nonzero'])
        except getopt.GetoptError:      Usage() ; sys.exit(1)
  
        for opt,arg in opts:
            if opt in ("-h","--help"):   Usage();   sys.exit()
            elif opt == '-f':         File = arg
            elif opt == '-m':         minV = float(arg)
            elif opt == '-M':         maxV = float(arg)
            elif opt == '-y':         ysub = [int(i) for i in arg.split(':')];        ysub.sort()
            elif opt == '-x':         xsub = [int(i) for i in arg.split(':')];        xsub.sort()
            elif opt == '-o':         outName    = arg
            elif opt == '-d':         epoch_date = arg
            elif opt == '-e':         epoch_num  = int(arg) - 1
            elif opt == '--nonzero':  method     = 'nonzero'

    elif len(sys.argv)==2:
        if   argv[0] in ['-h','--help']:    Usage(); sys.exit(1)
        elif os.path.isfile(argv[0]):       File = argv[0]
        else:    print 'Input file does not existed: '+argv[0];  sys.exit(1)
    else:                                   Usage(); sys.exit(1)

    ##### Input File Info
    atr = readfile.read_attributes(File)
    print '\n****************** Generate Mask *******************'
    print 'Input file is '+atr['PROCESSOR']+' '+atr['FILE_TYPE']+': '+File
    mask = np.ones([int(atr['FILE_LENGTH']),int(atr['WIDTH'])])
    print 'Create initial mask with the same size as the input file and all = 1'

    ##### Non-zero Mask #######
    if method == 'nonzero':
        k = atr['FILE_TYPE']
        MaskZero = np.ones([int(atr['FILE_LENGTH']),int(atr['WIDTH'])])
  
        ext = os.path.splitext(File)[1].lower()
        if ext == '.h5' and k in ['interferograms','coherence','wrapped','timeseries']:
            h5file = h5py.File(File,'r')
            epochList = h5file[k].keys()
  
            for epoch in epochList:
                print epoch
                if k in ['interferograms','coherence','wrapped']:
                    data = h5file[k][epoch].get(epoch)[:]
                elif k in ['timeseries']:
                    data = h5file[k].get(epoch)
                MaskZero *= data
                MaskZero[np.isnan(data)] = 0
            h5file.close()
  
        else:
            data,atr = readfile.read(File)
            MaskZero *= data
            MaskZero[np.isnan(data)] = 0
  
        mask = np.ones([int(atr['FILE_LENGTH']),int(atr['WIDTH'])])
        mask[MaskZero==0] = 0


    ##### Threshold ##########
    else:
        ##### Read and Initiate Mask
        try:        V, atr = readfile.read(File,epoch_date)
        except:
            try:    V, atr = readfile.read(File,epoch_num)
            except: V, atr = readfile.read(File)
  
        ##### Calculating Mask
        ## threshold
        try:
            mask[V<minV]=0
            print 'all value < '+str(minV)+' = 0'
        except:  print 'No min threshold'
        try:
            mask[V>maxV]=0
            print 'all value > '+str(maxV)+' = 0'
        except:  print 'No max threshold'  
        ## nan value
        mask[np.isnan(V)]=0
  
    ## subset
    try:
        mask[0:ysub[0],:]=0
        mask[ysub[1]:mask.shape[0],:]=0
        print 'all y in [0,'+str(ysub[0])+'] and ['+str(ysub[1])+',end] = 0'
    except:  print 'No subset in y direction'
    try:
        mask[:,0:xsub[0]]=0
        mask[:,xsub[1]:mask.shape[1]]=0
        print 'all x in [0,'+str(xsub[0])+'] and ['+str(xsub[1])+',end] = 0'
    except:  print 'No subset in x direction'
   
  
    ##### Writing mask file
    atr['FILE_TYPE'] = 'mask'
    writefile.write(mask,atr,outName)
Beispiel #19
0
def filter_file(fname, filter_type, filter_par=None, fname_out=None):
    '''Filter 2D matrix with selected filter
    Inputs:
        fname       : string, name/path of file to be filtered
        filter_type : string, filter type
        filter_par  : string, optional, parameter for low/high pass filter
                      for low/highpass_avg, it's kernel size in int
                      for low/highpass_gaussain, it's sigma in float
    Output:
        fname_out   : string, optional, output file name/path
    '''

    # Basic info
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    try:    ref_yx = [int(atr['ref_y']), int(atr['ref_x'])]
    except: ref_yx = None

    filter_type = filter_type.lower()
    MSG = 'filtering '+k+' file: '+fname+' using '+filter_type+' filter'
    if filter_type.endswith('avg'):
        if not filter_par:
            filter_par = 5
        MSG += ' with kernel size of %d' % int(filter_par)
    elif filter_type.endswith('gaussian'):
        if not filter_par:
            filter_par = 3.0
        MSG += ' with sigma of %.1f' % filter_par
    print MSG

    if not fname_out:
        ext = os.path.splitext(fname)[1]
        fname_out = os.path.splitext(fname)[0]+'_'+filter_type+ext

    ##### Multiple Dataset File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+fname_out

        if k == 'timeseries':
            print 'number of acquisitions: '+str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_filt = filter_data(data, filter_type, filter_par)
                if ref_yx:
                    data_filt -= data_filt[ref_yx[0], ref_yx[1]]

                dset = group.create_dataset(date, data=data_filt, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            for key,value in atr.iteritems():
                group.attrs[key] = value

        elif k in ['interferograms','wrapped','coherence']:
            print 'number of interferograms: '+str(epoch_num)
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_filt = filter_data(data, filter_type, filter_par)
                if ref_yx and k in ['interferograms']:
                    data_filt -= data_filt[ref_yx[0], ref_yx[1]]

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_filt, compression='gzip')
                for key, value in h5[k][ifgram].attrs.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])

        h5.close()
        h5out.close()
        prog_bar.close()

    ##### Single Dataset File
    else:
        data, atr = readfile.read(fname)
        data_filt = filter_data(data, filter_type, filter_par)
        if ref_yx and k in ['.unw','velocity']:
            data_filt -= data_filt[ref_yx[0], ref_yx[1]]
        print 'writing >>> '+fname_out
        writefile.write(data_filt, atr, fname_out)

    return fname_out
Beispiel #20
0
    if save_mask == 'yes':
        mask_dis = np.zeros((length,width))
        if surfNum == 1:
            mask_dis = Mask
        else:
            i = 0
            mask_dis[ysub[2*i]:ysub[2*i+1],:] = Mask[ysub[2*i]:ysub[2*i+1],:]
            for i in range(1,surfNum):
                if ysub[2*i] < ysub[2*i-1]:
                    mask_dis[ysub[2*i]:ysub[2*i-1],:]  += Mask[ysub[2*i]:ysub[2*i-1],:]*(i+1)
                    mask_dis[ysub[2*i]:ysub[2*i-1],:]  /= 2
                    mask_dis[ysub[2*i-1]:ysub[2*i+1],:] = Mask[ysub[2*i-1]:ysub[2*i+1],:]*(i+1)
                else:
                    mask_dis[ysub[2*i]:ysub[2*i+1],:]   = Mask[ysub[2*i]:ysub[2*i+1],:]*(i+1)
        maskOutName = 'mask_'+str(surfNum)+surfType+'.h5'
        writefile.write(mask_dis,Matr,maskOutName)
        print 'save mask to mask_'+str(surfNum)+surfType+'.h5'

    ############################## Removing Phase Ramp #######################################
    for file in fileList:
        print '------------------------------------------'
        print 'input file : '+file
        if surfNum == 1:
            rm.remove_surface(file,surfType,Mask,outName)
        else:
            rm.remove_multiple_surface(file,surfType,Mask,ysub,outName)


###########################################################################################
if __name__ == '__main__':
    main(sys.argv[1:])
Beispiel #21
0
def main(argv):
    try:    File=argv[0]
    except: Usage();sys.exit(1)
  
    atr = readfile.read_attributes(File)
    k = atr['FILE_TYPE']
  
    h5file=h5py.File(File,'r')
    print '\n************* Output to ROI_PAC format ***************'
  
    if k == 'velocity':
        dset = h5file['velocity'].get('velocity')
        data = dset[0:dset.shape[0],0:dset.shape[1]]
        print "converting velocity to a 1 year interferogram."
        wvl=float(h5file[k].attrs['WAVELENGTH'])
        data=(-4*pi/wvl)*data
    
        outname=File.split('.')[0]+'.unw'
        writefile.write(data,atr,outname)
  
    elif k == 'timeseries':
        dateList=h5file['timeseries'].keys() 
        ## Input
        if   len(sys.argv)==2:
            print 'No input date specified >>> continue with the last date'
            dateList=h5file['timeseries'].keys()
            d=dateList[-1]
        elif len(sys.argv)==3:
            d=sys.argv[2]
        elif len(sys.argv)==4:
            ds=sys.argv[2:4]; ds.sort()
            d_ref = ds[0]
            d     = ds[1]
        else: Usage(); sys.exit(1)
        d = ptime.yyyymmdd(d)
        try: d_ref = ptime.yyyymmdd(d_ref)
        except: pass
    
        ## Data
        print 'reading '+d+' ... '
        data = h5file['timeseries'].get(d)[:]
        try:
            print 'reading '+d_ref+' ... '
            data_ref = h5file['timeseries'].get(d_ref)[:]
            data = data - data_ref
        except: pass
        wvl=float(atr['WAVELENGTH'])
        data *= -4*pi/wvl
    
        ## outName
        try:      master_d = d_ref
        except:
            try:    master_d = atr['ref_date']
            except: master_d = atr['DATE']
        if len(master_d)==8:  master_d=master_d[2:8]
        if len(d)==8:         d=d[2:8]
        outname = master_d+'_'+d+'.unw'
    
        ## Attributes
        atr['FILE_TYPE']             = '.unw'
        atr['P_BASELINE_TIMESERIES'] = '0.0'
        atr['UNIT']                  = 'radian'
        atr['DATE']                  = master_d
        atr['DATE12']                = master_d+'-'+d
        
        ## Writing
        writefile.write(data,atr,outname)

    elif k in ['interferograms','coherence','wrapped']:
        ## Check input
        igramList=h5file[k].keys()
        try:
            d = sys.argv[2]
            for i in range(len(igramList)):
                if d in igramList[i]:
                    igram = igramList[i]
        except:
            igram = igramList[-1];   print 'No input date specified >>> continue with the last date'
        ## Read and Write
        print 'reading '+igram+' ... '
        dset = h5file[k][igram].get(igram)
        data = dset[0:dset.shape[0],0:dset.shape[1]]
        outname = igram
        print 'writing >>> '+ outname
        writefile.write_float32(data,outname)
        f = open(outname+'.rsc','w')
        for key , value in h5file[k][igram].attrs.iteritems():
            f.write(key+'    '+str(value)+'\n')
        f.close()    
  
  
    else:
        dset = h5file[k].get(k)
        data = dset[0:dset.shape[0],0:dset.shape[1]]
        if k == 'temporal_coherence': outname=File.split('.')[0]+'.cor'
        else:                         outname=File.split('.')[0]+'.unw'
    
        writefile.write(data,atr,outname)
  
  
    h5file.close()
Beispiel #22
0
def seed_file(File,outName,refList,ref_x='',ref_y=''):
    ## Seed Input File with reference value in refList
    print 'Reference value: '
    print refList

    #####  IO Info
    atr = readfile.read_attributes(File)
    k = atr['FILE_TYPE']
    print 'file type: '+k

    ##### Multiple Dataset File
    if k in ['timeseries','interferograms','wrapped','coherence']:
        ##### Input File Info
        h5file = h5py.File(File,'r')
        epochList = h5file[k].keys()
        epochList = sorted(epochList)
        epochNum  = len(epochList)
        print 'number of epochs: '+str(epochNum)
        
        ##### Check Epoch Number
        if not epochNum == len(refList):
            print '\nERROR: Reference value has different epoch number'+\
                  'from input file.'
            print 'Reference List epoch number: '+str(refList)
            print 'Input file     epoch number: '+str(epochNum)
            sys.exit(1)
  
        ##### Output File Info
        h5out = h5py.File(outName,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+outName

    ## Loop
    if k == 'timeseries':
        for i in range(epochNum):
            epoch = epochList[i]
            print epoch
            data = h5file[k].get(epoch)[:]
            
            data -= refList[i]
  
            dset = group.create_dataset(epoch, data=data, compression='gzip')

        atr  = seed_attributes(atr,ref_x,ref_y)
        for key,value in atr.iteritems():   group.attrs[key] = value

    elif k in ['interferograms','wrapped','coherence']:
        for i in range(epochNum):
            epoch = epochList[i]
            #print epoch
            data = h5file[k][epoch].get(epoch)[:]
            atr  = h5file[k][epoch].attrs

            data -= refList[i]
            atr  = seed_attributes(atr,ref_x,ref_y)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr.iteritems():    gg.attrs[key] = value

            ut.printProgress(i+1,epochNum,'seeding:',epoch)
  
    ##### Single Dataset File
    else:
        data,atr = readfile.read(File)

        data -= refList
        atr  = seed_attributes(atr,ref_x,ref_y)

        writefile.write(data,atr,outName)
  
    ##### End & Cleaning
    try:
        h5file.close()
        h5out.close()
    except: pass

    return 1
Beispiel #23
0
def correct_lod_file(File, outFile=None):
    # Check Sensor Type
    print 'input file: ' + File
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    platform = atr['PLATFORM']
    print 'platform: ' + platform
    if not platform.lower() in ['env', 'envisat']:
        print 'No need to correct LOD for ' + platform
        sys.exit(1)

    # Output Filename
    if not outFile:
        ext = os.path.splitext(File)[1]
        outFile = os.path.splitext(File)[0] + '_LODcor' + ext

    # Get LOD phase ramp from empirical model
    width = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])
    range_resolution = float(atr['RANGE_PIXEL_SIZE'])

    r = np.linspace(0, width - 1, width)
    R = range_resolution * r * (3.87e-7)
    Ramp = np.tile(R, [length, 1])

    yref = int(atr['ref_y'])
    xref = int(atr['ref_x'])
    Ramp -= Ramp[yref][xref]

    # Correct LOD Ramp for Input File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(File, 'r')
        epochList = sorted(h5[k].keys())

        h5out = h5py.File(outFile, 'w')
        group = h5out.create_group(k)

        if k in ['interferograms', 'wrapped']:
            print 'number of interferograms: ' + str(len(epochList))
            wvl = float(atr['WAVELENGTH'])
            Ramp *= -4 * np.pi / wvl
            for epoch in epochList:
                print epoch
                data = h5[k][epoch].get(epoch)[:]
                atr = h5[k][epoch].attrs

                dates = ptime.yyyymmdd(atr['DATE12'].split('-'))
                dates = ptime.yyyymmdd2years(dates)
                dt = date[1] - date[0]
                data -= Ramp * dt

                gg = group.create_group(epoch)
                dset = gg.create_dataset(epoch, data=data, compression='gzip')
                for key, value in atr.iteritems():
                    gg.attrs[key] = value

        elif k == 'timeseries':
            print 'number of acquisitions: ' + str(len(epochList))
            tbase = [
                float(dy) / 365.25
                for dy in ptime.date_list2tbase(epochList)[0]
            ]
            for i in range(len(epochList)):
                epoch = epochList[i]
                print epoch
                data = h5[k].get(epoch)[:]

                data -= Ramp * tbase[i]

                dset = group.create_dataset(epoch,
                                            data=data,
                                            compression='gzip')
            for key, value in atr.iteritems():
                group.attrs[key] = value
        else:
            print 'No need to correct for LOD for ' + k + ' file'
            sys.exit(1)

        h5.close()
        h5out.close()

    else:
        data, atr = readfile.read(File)
        data -= Ramp
        writefile.write(data, atr, outFile)

    return outFile
Beispiel #24
0
def main(argv):
    inps = cmdLineParse()

    ##### 1. Extract the common area of two input files
    # Basic info
    atr1 = readfile.read_attribute(inps.file[0])
    atr2 = readfile.read_attribute(inps.file[1])
    if any('X_FIRST' not in i for i in [atr1, atr2]):
        sys.exit('ERROR: Not all input files are geocoded.')

    k1 = atr1['FILE_TYPE']
    print 'Input 1st file is ' + k1

    # Common AOI in lalo
    west, east, south, north = get_overlap_lalo(atr1, atr2)
    lon_step = float(atr1['X_STEP'])
    lat_step = float(atr1['Y_STEP'])
    width = int(round((east - west) / lon_step))
    length = int(round((south - north) / lat_step))

    # Read data in common AOI: LOS displacement, heading angle, incident angle
    u_los = np.zeros((2, width * length))
    heading = []
    incidence = []
    for i in range(len(inps.file)):
        fname = inps.file[i]
        print '---------------------'
        print 'reading ' + fname
        atr = readfile.read_attribute(fname)

        [x0, x1] = subset.coord_geo2radar([west, east], atr, 'lon')
        [y0, y1] = subset.coord_geo2radar([north, south], atr, 'lat')
        V = readfile.read(fname, (x0, y0, x1, y1))[0]
        u_los[i, :] = V.flatten(0)

        heading_angle = float(atr['HEADING'])
        if heading_angle < 0.:
            heading_angle += 360.
        print 'heading angle: ' + str(heading_angle)
        heading_angle *= np.pi / 180.
        heading.append(heading_angle)

        inc_angle = float(ut.incidence_angle(atr, dimension=0))
        #print 'incidence angle: '+str(inc_angle)
        inc_angle *= np.pi / 180.
        incidence.append(inc_angle)

    ##### 2. Project displacement from LOS to Horizontal and Vertical components
    # math for 3D: cos(theta)*Uz - cos(alpha)*sin(theta)*Ux + sin(alpha)*sin(theta)*Uy = Ulos
    # math for 2D: cos(theta)*Uv - sin(alpha-az)*sin(theta)*Uh = Ulos   #Uh_perp = 0.0
    # This could be easily modified to support multiple view geometry (e.g. two adjcent tracks from asc & desc) to resolve 3D

    # Design matrix
    A = np.zeros((2, 2))
    for i in range(len(inps.file)):
        A[i, 0] = np.cos(incidence[i])
        A[i, 1] = np.sin(incidence[i]) * np.sin(heading[i] - inps.azimuth)

    A_inv = np.linalg.pinv(A)
    u_vh = np.dot(A_inv, u_los)

    u_v = np.reshape(u_vh[0, :], (length, width))
    u_h = np.reshape(u_vh[1, :], (length, width))

    ##### 3. Output
    # Attributes
    atr = atr1.copy()
    atr['WIDTH'] = str(width)
    atr['FILE_LENGTH'] = str(length)
    atr['X_FIRST'] = str(west)
    atr['Y_FIRST'] = str(north)
    atr['X_STEP'] = str(lon_step)
    atr['Y_STEP'] = str(lat_step)

    print '---------------------'
    outname = inps.outfile[0]
    print 'writing   vertical component to file: ' + outname
    writefile.write(u_v, atr, outname)

    outname = inps.outfile[1]
    print 'writing horizontal component to file: ' + outname
    writefile.write(u_h, atr, outname)

    print 'Done.'
    return
Beispiel #25
0
def main(argv):

    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    print 'input file(s): ' + str(len(inps.file))
    print inps.file

    #print '\n*************** Phase Ramp Removal ***********************'
    atr = readfile.read_attribute(inps.file[0])
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    # check outfile and parallel option
    if len(inps.file) > 1:
        inps.outfile = None
    elif len(inps.file) == 1 and inps.parallel:
        inps.parallel = False
        print 'parallel processing is diabled for one input file'

    # Update mask for multiple surfaces
    if inps.ysub:
        # Read mask
        if not inps.mask_file:
            Mask_temp = readfile.read(inps.mask_file)[0]
            Mask = np.zeros((length, width))
            Mask[Mask_temp != 0] = 1
        else:
            Mask = np.ones((length, width))

        # Update mask for multiple surface from inps.ysub
        mask_multiSurface = np.zeros((length, width))
        surfNum = len(inps.ysub) / 2
        if surfNum == 1:
            mask_multiSurface = Mask
        else:
            i = 0
            mask_multiSurface[inps.ysub[2 * i]:inps.ysub[2 * i + 1], :] = Mask[
                inps.ysub[2 * i]:inps.ysub[2 * i + 1], :]
            for i in range(1, surfNum):
                if inps.ysub[2 * i] < inps.ysub[2 * i - 1]:
                    mask_multiSurface[
                        inps.ysub[2 * i]:inps.ysub[2 * i - 1], :] += Mask[
                            inps.ysub[2 * i]:inps.ysub[2 * i - 1], :] * (i + 1)
                    mask_multiSurface[inps.ysub[2 * i]:inps.ysub[2 * i -
                                                                 1], :] /= 2
                    mask_multiSurface[
                        inps.ysub[2 * i - 1]:inps.ysub[2 * i + 1], :] = Mask[
                            inps.ysub[2 * i - 1]:inps.ysub[2 * i +
                                                           1], :] * (i + 1)
                else:
                    mask_multiSurface[
                        inps.ysub[2 * i]:inps.ysub[2 * i + 1], :] = Mask[
                            inps.ysub[2 * i]:inps.ysub[2 * i + 1], :] * (i + 1)

        # Write updated mask for multiple surfaces into file
        outFile = 'mask_' + str(surfNum) + inps.surface_type + '.h5'
        atr['FILE_TYPE'] = 'mask'
        writefile.write(mask_multiSurface, atr, outFile)
        print 'saved mask to ' + outFile

    ############################## Removing Phase Ramp #######################################
    if inps.parallel:
        num_cores = multiprocessing.cpu_count()
        print 'parallel processing using %d cores ...' % (num_cores)
        Parallel(n_jobs=num_cores)(delayed(rm.remove_surface)(file, inps.surface_type, inps.mask_file, ysub=inps.ysub)\
                                   for file in inps.file)
    else:
        for File in inps.file:
            print '------------------------------------------'
            rm.remove_surface(inps.file[0], inps.surface_type, inps.mask_file,
                              inps.outfile, inps.ysub)

    print 'Done.'
    return
Beispiel #26
0
def main(argv):

    ####################### Inputs Check ########################
    try:
        opts, args = getopt.getopt(argv, "h:f:o:", ['help'])
    except getopt.GetoptError:
        usage()
        sys.exit(1)

    if len(sys.argv) > 4:
        for opt, arg in opts:
            if opt in ("-h", "--help"):
                usage()
                sys.exit()
            elif opt == '-f':
                fileList = arg.split(',')
            elif opt == '-o':
                outName = arg

    elif len(sys.argv) <= 4 and len(sys.argv) >= 3:
        fileList = [sys.argv[1], sys.argv[2]]
        try:
            outName = sys.argv[3]
        except:
            pass
    else:
        usage()
        sys.exit(1)

    print '\n****************** Add **********************'
    print 'Input files: '
    print fileList

    ext = os.path.splitext(fileList[0])[1].lower()
    try:
        outName
    except:
        outName = File1.split('.')[0] + '_plus_' + File2.split('.')[0] + ext

    ##### Read File Info / Attributes
    atr = readfile.read_attribute(fileList[0])
    print 'Input file is ' + atr['PROCESSOR'] + ' ' + atr['FILE_TYPE']
    k = atr['FILE_TYPE']

    ##### File Type Check
    if k in ['timeseries', 'interferograms', 'coherence', 'wrapped']:
        for i in range(1, len(fileList)):
            File = fileList[i]
            r = readfile.read_attribute(File)
            if not r['FILE_TYPE'] == k:
                print 'Input file type is not the same: ' + r['FILE_TYPE']
                sys.exit(1)

        h5out = h5py.File(outName, 'w')
        group = h5out.create_group(k)

        h5in = h5py.File(fileList[0])
        epochList = sorted(h5in[k].keys())

    ########################### Add file by file ########################
    if k in ['timeseries']:
        for epoch in epochList:
            print epoch
            data = np.zeros((int(atr['FILE_LENGTH']), int(atr['WIDTH'])))
            for File in fileList:
                print File
                h5file = h5py.File(File, 'r')
                d = h5file[k].get(epoch)[:]

                data = add(data, d)

            dset = group.create_dataset(epoch, data=data, compression='gzip')
        for key, value in atr.iteritems():
            group.attrs[key] = value

        h5out.close()
        h5in.close()

    elif k in ['timeseries', 'interferograms', 'coherence', 'wrapped']:
        for epoch in epochList:
            print epoch
            data = np.zeros((int(atr['FILE_LENGTH']), int(atr['WIDTH'])))
            for File in fileList:
                print File
                h5file = h5py.File(File, 'r')
                d = h5file[k][epoch].get(epoch)[:]

                data = add(data, d)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in h5in[k][epoch].attrs.iteritems():
                gg.attrs[key] = value

        h5out.close()
        h5in.close()

    ## All the other file types
    else:
        data = np.zeros((int(atr['FILE_LENGTH']), int(atr['WIDTH'])))
        for File in fileList:
            print 'loading ' + File
            d, r = readfile.read(File)
            data = add(data, d)
        writefile.write(data, atr, outName)
Beispiel #27
0
def geocode_file_with_geo_lookup_table(fname,
                                       lookup_file=None,
                                       interp_method='nearest',
                                       fname_out=None):
    '''Geocode file using ROI_PAC/Gamma lookup table file.
    Inputs:
        fname         : string, file to be geocoded
        lookup_file   : string, optional, lookup table file genereated by ROIPAC or Gamma
                        i.e. geomap_4rlks.trans           from ROI_PAC
                             sim_150911-150922.UTM_TO_RDC from Gamma
        interp_method : string, optional, interpolation/resampling method, supporting nearest, linear, cubic
        fname_out : string, optional, output geocoded filename
    Output:
        fname_out

    A faster way is as below:
    https://stackoverflow.com/questions/20915502/speedup-scipy-griddata-for-multiple-interpolations-between-two-irregular-grids
    '''
    atr_rdr = readfile.read_attribute(fname)
    if not fname_out:
        fname_out = 'geo_' + fname

    # Default values:
    if not lookup_file:
        if atr_rdr['INSAR_PROCESSOR'] == 'roipac':
            lookup_file = ['geomap*lks_tight.trans', 'geomap*lks.trans']
        elif atr_rdr['INSAR_PROCESSOR'] == 'gamma':
            lookup_file = ['sim*_tight.UTM_TO_RDC', 'sim*.UTM_TO_RDC']

    # Check lookup table file
    try:
        lookup_file = ut.get_file_list(lookup_file)[0]
    except:
        lookup_file = None
    if not lookup_file:
        sys.exit(
            'ERROR: No lookup table file found! Can not geocoded without it.')

    ##### 1. Get Y/X coordinates in radar file
    print '------------------------------------------------------'
    print 'geocoding file: ' + fname
    print 'getting Y/X coordinates from file in radar coordinates'
    len_rdr = int(atr_rdr['FILE_LENGTH'])
    wid_rdr = int(atr_rdr['WIDTH'])
    yy, xx = np.mgrid[0:len_rdr - 1:len_rdr * 1j, 0:wid_rdr - 1:wid_rdr * 1j]
    yx_rdr = np.hstack((yy.reshape(-1, 1), xx.reshape(-1, 1)))

    ##### 2. Get Y/X coordinates in geo*trans file
    print 'reading ' + lookup_file
    rg, az, atr_lut = readfile.read(lookup_file)
    len_geo = int(atr_lut['FILE_LENGTH'])
    wid_geo = int(atr_lut['WIDTH'])

    # adjustment if input radar file has been subseted.
    if 'subset_x0' in atr_rdr.keys():
        x0 = float(atr_rdr['subset_x0'])
        y0 = float(atr_rdr['subset_y0'])
        rg -= x0
        az -= y0
        print '\tinput radar coord file has been subsetted, adjust value read from lookup table file'

    # extract pixels only available in radar file (get ride of invalid corners)
    az = az.flatten()
    rg = rg.flatten()
    idx = (az > 0.0) * (az <= len_rdr) * (rg > 0.0) * (rg <= wid_rdr)
    yx_geo = np.hstack((az[idx].reshape(-1, 1), rg[idx].reshape(-1, 1)))

    print 'interpolation method: ' + interp_method
    k = atr_rdr['FILE_TYPE']

    ##### Multiple Dataset File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(fname, 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)
        print 'writing >>> ' + fname_out

        if k == 'timeseries':
            print 'number of acquisitions: ' + str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:].flatten()

                data_geo = np.zeros(len_geo * wid_geo, dtype=data.dtype)
                data_geo[idx] = griddata(yx_rdr,
                                         data,
                                         yx_geo,
                                         method=interp_method)

                dset = group.create_dataset(date,
                                            data=data_geo.reshape(
                                                (len_geo, wid_geo)),
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=date)
            prog_bar.close()

            print 'update attributes'
            atr = geocode_attribute_with_geo_lookup_table(atr_rdr, atr_lut)
            for key, value in atr.iteritems():
                group.attrs[key] = value

        elif k in ['interferograms', 'wrapped', 'coherence']:
            print 'number of interferograms: ' + str(epoch_num)
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:].flatten()

                data_geo = np.zeros(len_geo * wid_geo, dtype=data.dtype)
                data_geo[idx] = griddata(yx_rdr,
                                         data,
                                         yx_geo,
                                         method=interp_method)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram,
                                         data=data_geo.reshape(
                                             (len_geo, wid_geo)),
                                         compression='gzip')
                atr = geocode_attribute_with_geo_lookup_table(
                    h5[k][ifgram].attrs, atr_lut, print_message=False)
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print 'reading ' + fname
        data = readfile.read(fname)[0].flatten()
        print 'geocoding'
        data_geo = np.zeros(len_geo * wid_geo, dtype=data.dtype)
        data_geo[idx] = griddata(yx_rdr, data, yx_geo, method=interp_method)
        print 'update attributes'
        atr = geocode_attribute_with_geo_lookup_table(atr_rdr, atr_lut)
        print 'writing >>> ' + fname_out
        writefile.write(data_geo.reshape((len_geo, wid_geo)), atr, fname_out)

    return fname_out
Beispiel #28
0
def subset_file(File, subset_dict, outFile=None):
    '''Subset file with
    Inputs:
        File        : str, path/name of file
        outFile     : str, path/name of output file
        subset_dict : dict, subsut parameter, including the following items:
                      subset_x   : list of 2 int,   subset in x direction,   default=None
                      subset_y   : list of 2 int,   subset in y direction,   default=None
                      subset_lat : list of 2 float, subset in lat direction, default=None
                      subset_lon : list of 2 float, subset in lon direction, default=None
                      fill_value : float, optional. filled value for area outside of data coverage. default=None
                                   None/not-existed to subset within data coverage only.
    Outputs:
        outFile :  str, path/name of output file; 
                   outFile = 'subset_'+File, if File is in current directory;
                   outFile = File, if File is not in the current directory.
    '''

    # Input File Info
    try:
        atr_dict = readfile.read_attribute(File)
    except:
        return None
    width = int(atr_dict['WIDTH'])
    length = int(atr_dict['FILE_LENGTH'])
    k = atr_dict['FILE_TYPE']
    print 'subset ' + k + ' file: ' + File + ' ...'

    # Read Subset Inputs into 4-tuple box in pixel and geo coord
    pix_box, geo_box = subset_input_dict2box(subset_dict, atr_dict)

    # if fill_value exists and not None, subset data and fill assigned value for area out of its coverage.
    # otherwise, re-check subset to make sure it's within data coverage and initialize the matrix with np.nan
    outfill = False
    try:
        subset_dict['fill_value']
        if subset_dict['fill_value']:
            outfill = True
    except:
        outfill = False
    if not outfill:
        pix_box = check_box_within_data_coverage(pix_box, atr_dict)
        subset_dict['fill_value'] = np.nan

    geo_box = box_pixel2geo(pix_box, atr_dict)
    data_box = (0, 0, width, length)
    print 'data   range in y/x: ' + str(data_box)
    print 'subset range in y/x: ' + str(pix_box)
    print 'data   range in lat/lon: ' + str(box_pixel2geo(data_box, atr_dict))
    print 'subset range in lat/lon: ' + str(geo_box)

    if pix_box == data_box:
        print 'Subset range == data coverage, no need to subset. Skip.'
        return File

    # Calculate Subset/Overlap Index
    pix_box4data, pix_box4subset = get_box_overlap_index(data_box, pix_box)

    ###########################  Data Read and Write  ######################
    # Output File Name
    if not outFile:
        if os.getcwd() == os.path.dirname(os.path.abspath(File)):
            outFile = 'subset_' + os.path.basename(File)
        else:
            outFile = os.path.basename(File)
    print 'writing >>> ' + outFile

    ##### Multiple Dataset File
    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        ##### Open Input File
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epochNum = len(epochList)
        if k in multi_dataset_hdf5_file:
            print 'number of acquisitions: ' + str(epochNum)
        else:
            print 'number of interferograms: ' + str(epochNum)

        ##### Open Output File
        h5out = h5py.File(outFile)
        group = h5out.create_group(k)

    ## Loop
    if k == 'timeseries':
        for i in range(epochNum):
            epoch = epochList[i]
            ut.print_progress(i + 1, epochNum, prefix='', suffix=epoch)

            dset = h5file[k].get(epoch)
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            dset = group.create_dataset(epoch, data=data, compression='gzip')

        atr_dict = subset_attribute(atr_dict, pix_box)
        for key, value in atr_dict.iteritems():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        for i in range(epochNum):
            epoch = epochList[i]
            ut.print_progress(i + 1, epochNum, prefix='', suffix=epoch)

            dset = h5file[k][epoch].get(epoch)
            atr_dict = h5file[k][epoch].attrs
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            atr_dict = subset_attribute(atr_dict, pix_box)
            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr_dict.iteritems():
                gg.attrs[key] = value

    ##### Single Dataset File
    elif k in ['.jpeg', '.jpg', '.png', '.ras', '.bmp']:
        data, atr_dict = readfile.read(File, pix_box)
        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    elif k == '.trans':
        rg_overlap, az_overlap, atr_dict = readfile.read(File, pix_box4data)

        rg = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        rg[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = rg_overlap

        az = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        az[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = az_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(rg, az, atr_dict, outFile)
    else:
        data_overlap, atr_dict = readfile.read(File, pix_box4data)

        data = np.ones((pix_box[3] - pix_box[1],
                        pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        data[pix_box4subset[1]:pix_box4subset[3],
             pix_box4subset[0]:pix_box4subset[2]] = data_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    ##### End Cleaning
    try:
        h5file.close()
        h5out.close()
    except:
        pass

    return outFile
Beispiel #29
0
def remove_surface(File, surf_type, maskFile=None, outFile=None, ysub=None):
    start = time.time()
    atr = readfile.read_attribute(File)

    # Output File Name
    if not outFile:
        outFile = os.path.splitext(
            File)[0] + '_' + surf_type + os.path.splitext(File)[1]

    if maskFile:
        Mask = readfile.read(maskFile)[0]
        print 'read mask file: ' + maskFile
    else:
        Mask = np.ones((int(atr['FILE_LENGTH']), int(atr['WIDTH'])))
        print 'use mask of the whole area'

    ##### Input File Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print 'Input file is ' + k
    print 'remove ramp type: ' + surf_type

    ## Multiple Datasets File
    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5flat = h5py.File(outFile, 'w')
        group = h5flat.create_group(k)
        print 'writing >>> ' + outFile

    if k in ['timeseries']:
        print 'number of acquisitions: ' + str(len(epochList))
        for i in range(epoch_num):
            epoch = epochList[i]
            data = h5file[k].get(epoch)[:]

            if not ysub:
                data_n, ramp = remove_data_surface(data, Mask, surf_type)
            else:
                data_n = remove_data_multiple_surface(data, Mask, surf_type,
                                                      ysub)

            dset = group.create_dataset(epoch, data=data_n, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)
        for key, value in h5file[k].attrs.iteritems():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print 'number of interferograms: ' + str(len(epochList))
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epoch_num):
            epoch = epochList[i]
            data = h5file[k][epoch].get(epoch)[:]

            if not ysub:
                data_n, ramp = remove_data_surface(data, Mask, surf_type)
            else:
                data_n = remove_data_multiple_surface(data, Mask, surf_type,
                                                      ysub)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data_n, compression='gzip')
            for key, value in h5file[k][epoch].attrs.iteritems():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

    ## Single Dataset File
    else:
        data, atr = readfile.read(File)
        print 'Removing ' + surf_type + ' from ' + k

        if not ysub:
            data_n, ramp = remove_data_surface(data, Mask, surf_type)
        else:
            data_n = remove_data_multiple_surface(data, Mask, surf_type, ysub)

        print 'writing >>> ' + outFile
        writefile.write(data_n, atr, outFile)

    try:
        h5file.close()
        h5flat.close()
        prog_bar.close()
    except:
        pass

    print 'Remove ' + surf_type + ' took ' + str(time.time() - start) + ' secs'
    return outFile
Beispiel #30
0
def geocode_file_geo_lut(fname, lookup_file, fname_out, inps):
    '''Geocode file using ROI_PAC/Gamma lookup table file.
    Related module: scipy.interpolate.RegularGridInterpolator

    Inputs:
        fname      : string, file to be geocoded
        lookup_file   : string, optional, lookup table file genereated by ROIPAC or Gamma
                     i.e. geomap_4rlks.trans           from ROI_PAC
                          sim_150911-150922.UTM_TO_RDC from Gamma
        interp_method     : string, optional, interpolation/resampling method, supporting nearest, linear
        fill_value : value used for points outside of the interpolation domain.
        fname_out  : string, optional, output geocoded filename
    Output:
        fname_out  : string, optional, output geocoded filename
    '''

    start = time.time()
    ## Default Inputs and outputs
    if not fname_out:
        fname_out = geocode_output_filename(fname)

    ##### Interpolate value on irregular radar coordinates (from lookup table file value)
    ##### with known value on regular radar coordinates (from radar file attribute)
    ## Grid/regular coordinates from row/column number in radar file
    print '------------------------------------------------------'
    print 'geocoding file: '+fname
    atr_rdr = readfile.read_attribute(fname)
    len_rdr = int(atr_rdr['FILE_LENGTH'])
    wid_rdr = int(atr_rdr['WIDTH'])
    pts_old = (np.arange(len_rdr), np.arange(wid_rdr))

    ## Irregular coordinates from data value in lookup table
    print 'reading lookup table file: '+lookup_file
    atr_lut = readfile.read_attribute(lookup_file)
    rg = readfile.read(lookup_file, epoch='range')[0]
    az = readfile.read(lookup_file, epoch='azimuth')[0]
    len_geo = int(atr_lut['FILE_LENGTH'])
    wid_geo = int(atr_lut['WIDTH'])

    # adjustment if input radar file has been subseted.
    if 'subset_x0' in atr_rdr.keys():
        x0 = float(atr_rdr['subset_x0'])
        y0 = float(atr_rdr['subset_y0'])
        rg -= x0
        az -= y0
        print '\tinput radar coord file has been subsetted, adjust lookup table value'

    # extract pixels only available in radar file (get ride of invalid corners)
    idx = (az>0.0)*(az<=len_rdr)*(rg>0.0)*(rg<=wid_rdr)
    pts_new = np.hstack((az[idx].reshape(-1,1), rg[idx].reshape(-1,1)))
    del az, rg

    print 'geocoding using scipy.interpolate.RegularGridInterpolator ...'
    data_geo = np.empty((len_geo, wid_geo))
    data_geo.fill(inps.fill_value)
    k = atr_rdr['FILE_TYPE']
    ##### Multiple Dataset File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+fname_out

        if k in multi_dataset_hdf5_file:
            print 'number of datasets: '+str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                RGI_func = RGI(pts_old, data, method=inps.interp_method,\
                               bounds_error=False, fill_value=inps.fill_value)
                data_geo[idx] = RGI_func(pts_new)

                dset = group.create_dataset(date, data=data_geo, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            prog_bar.close()

            print 'update attributes'
            atr = update_attribute_geo_lut(atr_rdr, atr_lut)
            for key,value in atr.iteritems():
                group.attrs[key] = value

        elif k in multi_group_hdf5_file:
            print 'number of interferograms: '+str(epoch_num)
            try:    date12_list = ptime.list_ifgram2date12(epoch_list)
            except: date12_list = epoch_list
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                RGI_func = RGI(pts_old, data, method=inps.interp_method,\
                               bounds_error=False, fill_value=inps.fill_value)
                data_geo[idx] = RGI_func(pts_new)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_geo, compression='gzip')

                atr = update_attribute_geo_lut(h5[k][ifgram].attrs, atr_lut, print_msg=False)
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print 'reading '+fname
        data = readfile.read(fname)[0]
        RGI_func = RGI(pts_old, data, method=inps.interp_method,\
                       bounds_error=False, fill_value=inps.fill_value)
        data_geo[idx] = RGI_func(pts_new)

        print 'update attributes'
        atr = update_attribute_geo_lut(atr_rdr, atr_lut)

        print 'writing >>> '+fname_out
        writefile.write(data_geo, atr, fname_out)

    del data_geo
    print 'finished writing file: %s' % (fname_out)
    s = time.time()-start;  m, s = divmod(s, 60);  h, m = divmod(m, 60)
    print 'Time used: %02d hours %02d mins %02d secs' % (h, m, s)
    return fname_out
Beispiel #31
0
def unwrap_error_correction_bridging(ifgram_file, mask_file, y_list, x_list, ramp_type='plane',\
                                     ifgram_cor_file=None, save_cor_deramp_file=False):
    '''Unwrapping error correction with bridging.
    Inputs:
        ifgram_file : string, name/path of interferogram(s) to be corrected
        mask_file   : string, name/path of mask file to mark different patches 
        y/x_list    : list of int, bonding points in y/x 
        ifgram_cor_file : string, optional, output file name
        save_cor_deramp_file : bool, optional
    Output:
        ifgram_cor_file
    Example:
        y_list = [235, 270, 350, 390]
        x_list = [880, 890, 1200, 1270]
        unwrap_error_correction_bridging('unwrapIfgram.h5', 'mask_all.h5', y_list, x_list, 'quadratic')
    '''
    ##### Mask and Ramp
    mask = readfile.read(mask_file)[0]
    ramp_mask = mask == 1
    print 'estimate phase ramp during the correction'
    print 'ramp type: ' + ramp_type

    ##### Bridge Info
    # Check
    for i in range(len(x_list)):
        if mask[y_list[i], x_list[i]] == 0:
            print '\nERROR: Connecting point (%d,%d) is out of masked area! Select them again!\n' % (
                y_list[i], x_list[i])
            sys.exit(1)
    print 'Number of bridges: ' + str(len(x_list) / 2)
    print 'Bonding points coordinates:\nx: ' + str(x_list) + '\ny: ' + str(
        y_list)

    # Plot Connecting Pair of Points
    plot_bonding_points = False
    if plot_bonding_points:
        point_yx = ''
        line_yx = ''
        n_bridge = len(x) / 2
        for i in range(n_bridge):
            pair_yx = str(y[2 * i]) + ',' + str(x[2 * i]) + ',' + str(
                y[2 * i + 1]) + ',' + str(x[2 * i + 1])
            if not i == n_bridge - 1:
                point_yx += pair_yx + ','
                line_yx += pair_yx + ';'
            else:
                point_yx += pair_yx
                line_yx += pair_yx

        try:
            plot_cmd = 'view.py --point-yx="'+point_yx+'" --line-yx="'+line_yx+\
                       '" --nodisplay -o bonding_points.png -f '+maskFile
            print plot_cmd
            os.system(plot_cmd)
        except:
            pass

    # Basic info
    ext = os.path.splitext(ifgram_file)[1]
    atr = readfile.read_attribute(ifgram_file)
    k = atr['FILE_TYPE']

    try:
        ref_y = int(atr['ref_y'])
        ref_x = int(atr['ref_x'])
        print 'reference pixel in y/x: %d/%d' % (ref_y, ref_x)
    except:
        sys.exit(
            'ERROR: Can not find ref_y/x value, input file is not referenced in space!'
        )

    # output file name
    if not ifgram_cor_file:
        ifgram_cor_file = os.path.splitext(ifgram_file)[0] + '_unwCor' + ext
    ifgram_cor_deramp_file = os.path.splitext(
        ifgram_cor_file)[0] + '_' + ramp_type + ext

    ##### HDF5 file
    if ext == '.h5':
        ##### Read
        h5 = h5py.File(ifgram_file, 'r')
        ifgram_list = sorted(h5[k].keys())
        ifgram_num = len(ifgram_list)

        h5out = h5py.File(ifgram_cor_file, 'w')
        group = h5out.create_group(k)
        print 'writing >>> ' + ifgram_cor_file

        if save_cor_deramp_file:
            h5out_deramp = h5py.File(ifgram_cor_deramp_file, 'w')
            group_deramp = h5out_deramp.create_group(k)
            print 'writing >>> ' + ifgram_cor_deramp_file

        ##### Loop
        print 'Number of interferograms: ' + str(ifgram_num)
        prog_bar = ptime.progress_bar(maxValue=ifgram_num)
        date12_list = ptime.list_ifgram2date12(ifgram_list)
        for i in range(ifgram_num):
            ifgram = ifgram_list[i]
            data = h5[k][ifgram].get(ifgram)[:]
            data -= data[ref_y, ref_x]

            data_deramp, ramp = rm.remove_data_surface(data, ramp_mask,
                                                       ramp_type)
            data_derampCor = bridging_data(data_deramp, mask, x_list, y_list)

            ramp[data == 0.] = 0.
            gg = group.create_group(ifgram)
            dset = gg.create_dataset(ifgram,
                                     data=data_derampCor + ramp,
                                     compression='gzip')
            for key, value in h5[k][ifgram].attrs.iteritems():
                gg.attrs[key] = value

            if save_cor_deramp_file:
                gg_deramp = group_deramp.create_group(ifgram)
                dset = gg_deramp.create_dataset(ifgram,
                                                data=data_derampCor,
                                                compression='gzip')
                for key, value in h5[k][ifgram].attrs.iteritems():
                    gg_deramp.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

        prog_bar.close()
        h5.close()
        h5out.close()
        try:
            h5out_deramp.close()
        except:
            pass

    #### .unw file
    elif ext == '.unw':
        print 'read ' + ifgram_file
        data = readfile.read(ifgram_file)[0]
        data -= data[ref_y, ref_x]

        data_deramp, ramp = rm.remove_data_surface(data, ramp_mask, ramp_type)
        data_derampCor = bridging_data(data_deramp, mask, x_list, y_list)

        print 'writing >>> ' + ifgram_cor_file
        ramp[data == 0.] = 0.
        ifgram_cor_file = writefile.write(data_derampCor + ramp, atr,
                                          ifgram_cor_file)
        if save_cor_deramp_file:
            print 'writing >>> ' + ifgram_cor_deramp_file
            ifgram_cor_deramp_file = writefile.write(data_derampCor, atr,
                                                     ifgram_cor_deramp_file)

    else:
        sys.exit('Un-supported file type: ' + ext)

    return ifgram_cor_file, ifgram_cor_deramp_file
Beispiel #32
0
def main(argv):

    ####################### Inputs Check ########################
    try:    opts, args = getopt.getopt(argv,"h:f:o:",['help'])
    except getopt.GetoptError:    Usage() ; sys.exit(1)
  
    if len(sys.argv) > 4:
        for opt,arg in opts:
            if opt in ("-h","--help"):  Usage();  sys.exit()
            elif opt == '-f':   fileList = arg.split(',')
            elif opt == '-o':   outName  = arg
  
    elif len(sys.argv) <= 4 and len(sys.argv) >= 3:
        fileList = [sys.argv[1],sys.argv[2]]
        try: outName = sys.argv[3]
        except: pass
    else: Usage();  sys.exit(1)
  
    print '\n****************** Add **********************'
    print 'Input files: '
    print fileList
  
    ext = os.path.splitext(fileList[0])[1].lower()
    try:     outName
    except:  outName = File1.split('.')[0]+'_plus_'+File2.split('.')[0]+ext
  
  
    ##### Read File Info / Attributes
    atr  = readfile.read_attributes(fileList[0])
    print 'Input file is '+atr['PROCESSOR']+' '+atr['FILE_TYPE']
    k = atr['FILE_TYPE']
  
    ##### File Type Check
    if k in ['timeseries','interferograms','coherence','wrapped']:
        for i in range(1,len(fileList)):
            File = fileList[i]
            r = readfile.read_attributes(File)
            if not r['FILE_TYPE'] == k:
                print 'Input file type is not the same: '+r['FILE_TYPE']
                sys.exit(1)
  
        h5out = h5py.File(outName,'w')
        group = h5out.create_group(k)
  
        h5in  = h5py.File(fileList[0])
        epochList = h5in[k].keys()

    ########################### Add file by file ########################
    if k in ['timeseries']:
        for epoch in epochList:
            print epoch
            data = np.zeros((int(atr['FILE_LENGTH']),int(atr['WIDTH'])))
            for File in fileList:
                print File
                h5file = h5py.File(File,'r')
                d = h5file[k].get(epoch)[:]
  
                data = add(data,d)
  
            dset = group.create_dataset(epoch, data=data, compression='gzip')
        for key,value in atr.iteritems():   group.attrs[key] = value
  
        h5out.close()
        h5in.close()
  
    elif k in ['timeseries','interferograms','coherence','wrapped']:
        for epoch in epochList:
            print epoch
            data = np.zeros((int(atr['FILE_LENGTH']),int(atr['WIDTH'])))
            for File in fileList:
                print File
                h5file = h5py.File(File,'r')
                d = h5file[k][epoch].get(epoch)[:]
  
                data = add(data,d)
  
            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in h5in[k][epoch].attrs.iteritems():
                gg.attrs[key] = value
  
        h5out.close()
        h5in.close()
  
    ## All the other file types
    else:
        data = np.zeros((int(atr['FILE_LENGTH']),int(atr['WIDTH'])))
        for File in fileList:
            print 'loading '+File
            d,r = readfile.read(File)
            data = add(data,d)
        writefile.write(data,atr,outName)
Beispiel #33
0
def multilook_file(infile, lks_y, lks_x, outfile=None):
    lks_y = int(lks_y)
    lks_x = int(lks_x)

    ## input file info
    atr = readfile.read_attribute(infile)
    k = atr['FILE_TYPE']
    print 'multilooking ' + k + ' file ' + infile
    print 'number of looks in y / azimuth direction: %d' % lks_y
    print 'number of looks in x / range   direction: %d' % lks_x

    ## output file name
    if not outfile:
        if os.getcwd() == os.path.dirname(os.path.abspath(infile)):
            ext = os.path.splitext(infile)[1]
            outfile = os.path.splitext(infile)[0] + '_' + str(
                lks_y) + 'alks_' + str(lks_x) + 'rlks' + ext
        else:
            outfile = os.path.basename(infile)
    print 'writing >>> ' + outfile

    ###############################################################################
    ## Read/Write multi-dataset files
    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5 = h5py.File(infile, 'r')
        epochList = sorted(h5[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(outfile, 'w')
        group = h5out.create_group(k)

        if k in ['interferograms', 'coherence', 'wrapped']:
            date12_list = ptime.list_ifgram2date12(epochList)
            print 'number of interferograms: ' + str(len(epochList))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k][epoch].get(epoch)[:]
                atr = h5[k][epoch].attrs

                data_mli = multilook_matrix(data, lks_y, lks_x)
                atr_mli = multilook_attribute(atr,
                                              lks_y,
                                              lks_x,
                                              print_msg=False)

                gg = group.create_group(epoch)
                dset = gg.create_dataset(epoch,
                                         data=data_mli,
                                         compression='gzip')
                for key, value in atr_mli.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])

        elif k == 'timeseries':
            print 'number of acquisitions: ' + str(len(epochList))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k].get(epoch)[:]

                data_mli = multilook_matrix(data, lks_y, lks_x)

                dset = group.create_dataset(epoch,
                                            data=data_mli,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=epoch)
            atr = h5[k].attrs
            atr_mli = multilook_attribute(atr, lks_y, lks_x)
            for key, value in atr_mli.iteritems():
                group.attrs[key] = value

        h5.close()
        h5out.close()
        prog_bar.close()

    ## Read/Write single-dataset files
    elif k in ['.trans', '.utm_to_rdc', '.UTM_TO_RDC']:
        rg, az, atr = readfile.read(infile)
        rgmli = multilook_matrix(rg, lks_y, lks_x)
        #rgmli *= 1.0/lks_x
        azmli = multilook_matrix(az, lks_y, lks_x)
        #azmli *= 1.0/lks_y
        atr = multilook_attribute(atr, lks_y, lks_x)
        writefile.write(rgmli, azmli, atr, outfile)
    else:
        data, atr = readfile.read(infile)
        data_mli = multilook_matrix(data, lks_y, lks_x)
        atr = multilook_attribute(atr, lks_y, lks_x)
        writefile.write(data_mli, atr, outfile)

    return outfile
Beispiel #34
0
def mask_file(in_file,M,out_file=''):
    ## Mask input file with mask matrix M

    atr = readfile.read_attributes(in_file)
    k = atr['FILE_TYPE']
    print 'file type: '+k

    if out_file == '':
        ext      = os.path.splitext(in_file)[1]
        out_file = os.path.basename(in_file).split('.')[0]+'_masked'+ext

    if k in ['timeseries','interferograms','wrapped','coherence']:
        h5file = h5py.File(in_file,'r')
        epochList = h5file[k].keys()
        epochList = sorted(epochList)
        print 'number of epochs: '+str(len(epochList))

        h5out = h5py.File(out_file,'w')
        print 'writing >>> '+out_file

    ##### Multiple Dataset File
    if k == 'timeseries':
        group = h5out.create_group(k)
        for d in epochList:
            print d
            unwset = h5file[k].get(d)
            unw=unwset[0:unwset.shape[0],0:unwset.shape[1]]

            unw = mask_data(unw,M)

            dset = group.create_dataset(d, data=unw, compression='gzip')
        for key,value in atr.iteritems():   group.attrs[key] = value

    elif k in ['interferograms','wrapped','coherence']:
        gg = h5out.create_group(k)
        for igram in epochList:
            print igram
            unwset = h5file[kf[0]][igram].get(igram)
            unw=unwset[0:unwset.shape[0],0:unwset.shape[1]]

            unw = mask_data(unw,M)

            group = gg.create_group(igram)
            dset = group.create_dataset(igram, data=unw, compression='gzip')
            for key, value in h5file[k][igram].attrs.iteritems():
                group.attrs[key] = value
        try:
            mask = h5file['mask'].get('mask')
            gm = h5out.create_group('mask')
            dset = gm.create_dataset('mask', data=mask, compression='gzip')
        except: print 'no mask group found.'

    ##### Single Dataset File
    else:
        import pysar._writefile as writefile
        unw,atr = readfile.read(in_file)
        unw     = mask_data(unw,M)
        writefile.write(unw,atr,out_file)

    try:
        h5file.close()
        h5out.close()
    except: pass
Beispiel #35
0
#atr['XMAX'] = 999
#atr['YMAX'] = 999
#atr['WAVELENGTH'] = 0.0562356467937372
atr['FILE_TYPE'] = 'velocity'

for n in ts_list:
    spl = n.split('syn')
    pick_yr = spl[-1]
    year,test = pick_yr.split('-')
    year = int(year)
    summ = zeros((1000,1000))
    if year in velocities: pass
    else:
        print 'Working on '+str(year)+' year long time series'
        velocities[year] = zeros((1000,1000))
        spl_lst = glob.glob(directory+'/syn'+str(year)+'*')
        for i in spl_lst:
            velocity_file = i +'/velocity_simStd.h5'
            f = h5py.File(velocity_file,'r')
            dset = f['velocity'].get('velocity')
            vel = asarray(dset)
            velocities[year]=velocities[year]+vel
        average = velocities[year]/float(len(spl_lst))
#        average = velocities[year]
        filename = 'average_velStd_'+str(year)+'_years.h5'
        try:
            os.remove(filename)
        except OSError:
            pass
            write(average,atr,filename)
Beispiel #36
0
def correct_lod_file(File, rangeDistFile=None, outFile=None):
    # Check Sensor Type
    print 'correct Local Oscilator Drift for Envisat using an empirical model (Marinkovic and Larsen, 2013)'
    print 'input file: ' + File
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    platform = atr['PLATFORM']
    print 'platform: ' + platform
    if not platform.lower() in ['env', 'envisat']:
        print 'No need to correct LOD for ' + platform
        sys.exit(1)

    # Output Filename
    if not outFile:
        ext = os.path.splitext(File)[1]
        outFile = os.path.splitext(File)[0] + '_LODcor' + ext

    # Get LOD phase ramp from empirical model
    if not rangeDistFile:
        print 'calculate range distance from input file attributes'
        width = int(atr['WIDTH'])
        length = int(atr['FILE_LENGTH'])
        range_resolution = float(atr['RANGE_PIXEL_SIZE'])
        rangeDist1D = range_resolution * np.linspace(0, width - 1, width)
        rangeDist = np.tile(rangeDist1D, (length, 1))
    else:
        print 'read range distance from file: %s' % (rangeDistFile)
        rangeDist = readfile.read(rangeDistFile, epoch='slantRangeDistance')[0]

    yref = int(atr['ref_y'])
    xref = int(atr['ref_x'])
    rangeDist -= rangeDist[yref][xref]
    Ramp = np.array(rangeDist * 3.87e-7, np.float32)

    # Correct LOD Ramp for Input File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(File, 'r')
        epochList = sorted(h5[k].keys())
        epochNum = len(epochList)

        print 'writing >>> %s' % (outFile)
        h5out = h5py.File(outFile, 'w')
        group = h5out.create_group(k)

        prog_bar = ptime.progress_bar(maxValue=epochNum)
        if k in ['interferograms', 'wrapped']:
            Ramp *= -4 * np.pi / float(atr['WAVELENGTH'])
            print 'number of interferograms: ' + str(epochNum)
            date12List = ptime.list_ifgram2date12(epochList)
            for i in range(epochNum):
                epoch = epochList[i]
                data = h5[k][epoch].get(epoch)[:]
                atr = h5[k][epoch].attrs

                dates = ptime.yyyymmdd(atr['DATE12'].split('-'))
                dates = ptime.yyyymmdd2years(dates)
                dt = dates[1] - dates[0]
                data -= Ramp * dt

                gg = group.create_group(epoch)
                dset = gg.create_dataset(epoch, data=data, compression='gzip')
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12List[i])

        elif k == 'timeseries':
            print 'number of acquisitions: ' + str(len(epochList))
            tbase = [
                float(dy) / 365.25
                for dy in ptime.date_list2tbase(epochList)[0]
            ]
            for i in range(epochNum):
                epoch = epochList[i]
                data = h5[k].get(epoch)[:]

                data -= Ramp * tbase[i]

                dset = group.create_dataset(epoch,
                                            data=data,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=epoch)
            for key, value in atr.iteritems():
                group.attrs[key] = value
        else:
            print 'No need to correct for LOD for ' + k + ' file'
            sys.exit(1)
        prog_bar.close()
        h5.close()
        h5out.close()

    elif k in ['.unw']:
        data, atr = readfile.read(File)
        Ramp *= -4 * np.pi / float(atr['WAVELENGTH'])
        dates = ptime.yyyymmdd(atr['DATE12'].split('-'))
        dates = ptime.yyyymmdd2years(dates)
        dt = dates[1] - dates[0]
        data -= Ramp * dt
        print 'writing >>> %s' % (outFile)
        writefile.write(data, atr, outFile)
    else:
        print 'No need to correct for LOD for %s file' % (k)

    return outFile
Beispiel #37
0
def geocode_file_radar_lut(fname, lookup_file, fname_out=None, inps=None):
    '''Geocode file using lookup table file in radar coordinates (isce).
    Two solutions:
    1) scipy.interpolate.griddata, with a speed up solution from Jaime and Jeff (Stack Overflow)
        https://stackoverflow.com/questions/20915502/speedup-scipy-griddata-for-multiple-interpo
        lations-between-two-irregular-grids
    2) matplotlib.tri, interpolation from triangular grid to quad grid, which is much slower than 1).

    Inputs:
        fname       : string, file to be geocoded
        lookup_file : string, lookup table file, geometryRadar.h5
        fname_out   : string, optional, output geocoded filename
        inps        : namespace, object with the following items:
                      interp_method : string, interpolation/resampling method, supporting linear
                      fill_value    : value used for points outside of the interpolation domain
    Output:
        fname_out  : string, optional, output geocoded filename
    '''
    start = time.time()
    ## Default Inputs and outputs
    if not inps:
        inps = cmdLineParse()

    if inps.interp_method != 'linear':
        print 'ERROR: Supported interpolation method: linear'
        print 'Input method is '+inps.interp_method
        sys.exit(-1)

    if not fname_out:
        fname_out = geocode_output_filename(fname)

    ## Read lookup table file
    atr_rdr = readfile.read_attribute(fname)
    length = int(atr_rdr['FILE_LENGTH'])
    width = int(atr_rdr['WIDTH'])
    print 'reading lookup table file '+lookup_file
    lat = readfile.read(lookup_file, epoch='latitude')[0]
    lon = readfile.read(lookup_file, epoch='longitude')[0]

    #####Prepare output pixel grid: lat/lon range and step
    if os.path.isfile(inps.lalo_step):
        print 'use file %s as reference for output grid lat/lon range and step' % (inps.lalo_step)
        atr_ref = readfile.read_attribute(inps.lalo_step)
        inps.lat_step = float(atr_ref['Y_STEP'])
        inps.lon_step = float(atr_ref['X_STEP'])
        inps.lat_num = int(atr_ref['FILE_LENGTH'])
        inps.lon_num = int(atr_ref['WIDTH'])
        inps.lat0 = float(atr_ref['Y_FIRST'])
        inps.lon0 = float(atr_ref['X_FIRST'])
        inps.lat1 = inps.lat0 + inps.lat_step*inps.lat_num
        inps.lon1 = inps.lon0 + inps.lon_step*inps.lon_num
    else:
        try:
            inps.lat_step = -1*abs(float(inps.lalo_step))
            inps.lon_step = abs(float(inps.lalo_step))
            inps.lat0 = np.nanmax(lat)
            inps.lat1 = np.nanmin(lat)
            inps.lon0 = np.nanmin(lon)
            inps.lon1 = np.nanmax(lon)
            inps.lat_num = int((inps.lat1-inps.lat0)/inps.lat_step)
            inps.lon_num = int((inps.lon1-inps.lon0)/inps.lon_step)
            inps.lat_step = (inps.lat1 - inps.lat0)/inps.lat_num
            inps.lon_step = (inps.lon1 - inps.lon0)/inps.lon_num
        except ValueError:
            print 'Input lat/lon step is neither a float number nor a file in geo-coord, please try again.'

    print 'output lat range: %f - %f' % (inps.lat0, inps.lat1)
    print 'output lon range: %f - %f' % (inps.lon0, inps.lon1)
    print 'output lat_step : %f' % (inps.lat_step)
    print 'output lon_step : %f' % (inps.lon_step)
    print 'input  file size in   y/x  : %d/%d' % (length, width)
    print 'output file size in lat/lon: %d/%d' % (inps.lat_num, inps.lon_num)

    grid_lat, grid_lon = np.mgrid[inps.lat0:inps.lat1:inps.lat_num*1j,\
                                  inps.lon0:inps.lon1:inps.lon_num*1j]


    ##### Interpolate value on regular geo coordinates (from lookup table file attributes, 2D ndarray)
    ##### with known value on irregular geo coordinates (from lookup table file value, tuple of ndarray of float)

    ##Solution 1 - qhull
    print 'calculate triangulation and coordinates transformation using scipy.spatial.qhull.Delaunay ...'
    pts_old = np.hstack((lat.reshape(-1,1), lon.reshape(-1,1)))
    pts_new = np.hstack((grid_lat.reshape(-1,1), grid_lon.reshape(-1,1)))
    vtx, wts = interp_weights(pts_old, pts_new)
    del pts_old, pts_new, grid_lat, grid_lon

    ##Solution 2 - matplotlib.tri
    #triang = mtri.Triangulation(lat.flatten(),lon.flatten())

    data_geo = np.empty((inps.lat_num, inps.lon_num)).flatten()
    data_geo.fill(inps.fill_value)
    k = atr_rdr['FILE_TYPE']
    ##### Multiple Dataset File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+fname_out

        if k in multi_dataset_hdf5_file:
            print 'number of acquisitions: '+str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_geo = interpolate(data.flatten(), vtx, wts).reshape(inps.lat_num, inps.lon_num)

                dset = group.create_dataset(date, data=data_geo, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            prog_bar.close()

            print 'update attributes'
            atr = update_attribute_radar_lut(atr_rdr, inps, lat, lon)
            for key,value in atr.iteritems():
                group.attrs[key] = value

        elif k in multi_group_hdf5_file:
            print 'number of interferograms: '+str(epoch_num)
            try:    date12_list = ptime.list_ifgram2date12(epoch_list)
            except: date12_list = epoch_list
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_geo = interpolate(data.flatten(), vtx, wts).reshape(inps.lat_num, inps.lon_num)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_geo, compression='gzip')

                atr = update_attribute_radar_lut(h5[k][ifgram].attrs, inps, lat, lon, print_msg=False)
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])
            prog_bar.close()
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print 'reading '+fname
        data = readfile.read(fname)[0]

        ##Solution 1 - qhull
        data_geo = interpolate(data.flatten(), vtx, wts).reshape(inps.lat_num, inps.lon_num)

        ###Solution 2 - matplotlib.tri
        #interp_lin = mtri.LinearTriInterpolator(triang, data.flatten())
        #data_geo = interp_lin(grid_lat.flatten(), grid_lon.flatten())
        #interp_cubic = mtri.CubicTriInterpolator(triang, data, kind='geom')
        #data_geo = interp_cubic(grid_lat, grid_lon)

        print 'update attributes'
        atr = update_attribute_radar_lut(atr_rdr, inps, lat, lon)

        print 'writing >>> '+fname_out
        writefile.write(data_geo, atr, fname_out)

    del data_geo, vtx, wts
    print 'finished writing file: %s' % (fname_out)
    s = time.time()-start;  m, s = divmod(s, 60);  h, m = divmod(m, 60)
    print 'Time used: %02d hours %02d mins %02d secs' % (h, m, s)
    return fname_out
Beispiel #38
0
def seed_file_reference_value(File, outName, refList, ref_y='', ref_x=''):
    ## Seed Input File with reference value in refList
    print 'Reference value: '
    print refList

    #####  IO Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print 'file type: ' + k

    ##### Multiple Dataset File
    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        ##### Input File Info
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epochNum = len(epochList)

        ##### Check Epoch Number
        if not epochNum == len(refList):
            print '\nERROR: Reference value has different epoch number'+\
                  'from input file.'
            print 'Reference List epoch number: ' + str(refList)
            print 'Input file     epoch number: ' + str(epochNum)
            sys.exit(1)

        ##### Output File Info
        h5out = h5py.File(outName, 'w')
        group = h5out.create_group(k)
        print 'writing >>> ' + outName
        prog_bar = ptime.progress_bar(maxValue=epochNum, prefix='seeding: ')

    ## Loop
    if k == 'timeseries':
        print 'number of acquisitions: ' + str(epochNum)
        for i in range(epochNum):
            epoch = epochList[i]
            data = h5file[k].get(epoch)[:]
            data -= refList[i]
            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)
        atr = seed_attributes(atr, ref_x, ref_y)
        for key, value in atr.iteritems():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print 'number of interferograms: ' + str(epochNum)
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epochNum):
            epoch = epochList[i]
            #print epoch
            data = h5file[k][epoch].get(epoch)[:]
            atr = h5file[k][epoch].attrs

            data -= refList[i]
            atr = seed_attributes(atr, ref_x, ref_y)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr.iteritems():
                gg.attrs[key] = value

            prog_bar.update(i + 1, suffix=date12_list[i])

    ##### Single Dataset File
    else:
        print 'writing >>> ' + outName
        data, atr = readfile.read(File)
        data -= refList
        atr = seed_attributes(atr, ref_x, ref_y)
        writefile.write(data, atr, outName)

    ##### End & Cleaning
    try:
        prog_bar.close()
        h5file.close()
        h5out.close()
    except:
        pass

    return outName
Beispiel #39
0
def main(argv):
    inps = cmdLineParse()

    # Input File Info
    atr = readfile.read_attribute(inps.file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    k = atr['FILE_TYPE']
    print 'Input file is '+k+': '+inps.file

    # default output filename
    if not inps.outfile:
        if k == 'temporal_coherence':
            inps.outfile = 'maskTempCoh.h5'
        else:
            inps.outfile = 'mask.h5'
        if inps.file.startswith('geo_'):
            inps.outfile = 'geo_'+inps.outfile

    ##### Mask: Non-zero
    if inps.nonzero and k == 'interferograms':
        print 'generate mask for all pixels with non-zero value'
        inps.outfile = ut.nonzero_mask(inps.file, inps.outfile)
        return inps.outfile

    ##### Mask: Threshold 
    print 'create initial mask with the same size as the input file and all = 1'
    mask = np.ones((length, width), dtype=np.float32)

    data, atr = readfile.read(inps.file, epoch=inps.epoch)

    if inps.nonzero:
        print 'all pixels with zero value = 0'
        mask[data == 0] = 0

    # min threshold
    if inps.vmin:
        mask[data<inps.vmin] = 0
        print 'all pixels with value < %s = 0' % str(inps.vmin)

    # max threshold
    if inps.vmax:
        mask[data>inps.vmax] = 0
        print 'all pixels with value > %s = 0' % str(inps.vmax)

    # nan value
    mask[np.isnan(data)] = 0
    print 'all pixels with nan value = 0'

    # subset in Y
    if inps.subset_y:
        y0,y1 = sorted(inps.subset_y)
        mask[0:y0,:] = 0
        mask[y1:length,:] = 0
        print 'all pixels with y OUT of [%d, %d] = 0' % (y0,y1)

    # subset in x
    if inps.subset_x:
        x0,x1 = sorted(inps.subset_x)
        mask[:,0:x0] = 0
        mask[:,x1:width] = 0
        print 'all pixels with x OUT of [%d, %d] = 0' % (x0,x1)
  
    ## Write mask file
    print 'writing >>> '+inps.outfile
    atr['FILE_TYPE'] = 'mask'
    writefile.write(mask, atr, inps.outfile)
    return inps.outfile
Beispiel #40
0
def main(argv):
    inps = cmdLineParse()

    atr = readfile.read_attribute(inps.file)
    k = atr['FILE_TYPE']
    atr['PROCESSOR'] = 'roipac'
    atr['INSAR_PROCESSOR'] = 'roipac'

    h5file = h5py.File(inps.file, 'r')

    if k == 'velocity':
        dset = h5file['velocity'].get('velocity')
        data = dset[0:dset.shape[0], 0:dset.shape[1]]
        print "converting velocity to a 1 year interferogram."
        wvl = float(h5file[k].attrs['WAVELENGTH'])
        data = (-4 * pi / wvl) * data

        inps.outfile = inps.file.split('.')[0] + '.unw'
        print 'writing >>> ' + inps.outfile
        writefile.write(data, atr, inps.outfile)

    elif k in multi_dataset_hdf5_file:
        dateList = sorted(h5file[k].keys())
        try:
            inps.epoch = [date for date in dateList if inps.epoch in date][0]
        except:
            print 'No input date specified >>> continue with the last date'
            inps.epoch = dateList[-1]
        if k in ['timeseries']:
            inps.epoch = ptime.yyyymmdd(inps.epoch)

        ## Data
        print 'reading %s and %s ...' % (inps.ref_date, inps.epoch)
        data = h5file[k].get(inps.epoch)[:]
        if inps.ref_date:
            inps.ref_date = ptime.yyyymmdd(inps.ref_date)
            data -= h5file[k].get(inps.ref_date)[:]

        ## Attributes
        if k in ['timeseries']:
            wvl = float(atr['WAVELENGTH'])
            data *= -4 * pi / wvl
            atr['FILE_TYPE'] = '.unw'
            atr['P_BASELINE_TIMESERIES'] = '0.0'
            atr['UNIT'] = 'radian'
        if inps.ref_date:
            atr['DATE'] = inps.ref_date[2:8]
            atr['DATE12'] = '%s-%s' % (inps.ref_date[2:8], inps.epoch[2:8])

        ## Writing
        if not inps.outfile:
            if k in ['timeseries']:
                inps.outfile = '%s_%s.unw' % (inps.ref_date[2:8],
                                              inps.epoch[2:8])
            else:
                inps.outfile = '%s.cor' % (inps.epoch)
        print 'writing >>> ' + inps.outfile
        writefile.write(data, atr, inps.outfile)

    elif k in ['interferograms', 'coherence', 'wrapped']:
        ## Check input
        igramList = sorted(h5file[k].keys())
        try:
            inps.epoch = [igram for igram in igramList
                          if inps.epoch in igram][0]
        except:
            print 'No input interferogram specified >>> continue with the last one'
            inps.epoch = igramList[-1]

        ## Read and Write
        print 'reading ' + inps.epoch + ' ... '
        atr = dict(h5file[k][inps.epoch].attrs)
        data = h5file[k][inps.epoch].get(inps.epoch)[:]
        if k == 'interferograms':
            try:
                ref_y = int(atr['ref_y'])
                ref_x = int(atr['ref_x'])
                data -= data[ref_y, ref_x]
                print 'consider the reference pixel in y/x: %d/%d' % (ref_y,
                                                                      ref_x)
            except:
                print 'No ref_y/x info found in attributes.'
        atr['PROCESSOR'] = 'roipac'
        atr['INSAR_PROCESSOR'] = 'roipac'

        inps.outfile = inps.epoch
        print 'writing >>> ' + inps.outfile
        writefile.write(data, atr, inps.outfile)

    else:
        data = h5file[k].get(k)[:]
        if not inps.outfile:
            if k in ['temporal_coherence']:
                inps.outfile = inps.file.split('.')[0] + '.cor'
            elif k in ['dem', '.hgt', '.dem']:
                atr['FILE_TYPE'] = '.dem'
                inps.outfile = os.path.splitext(inps.file)[0] + '.dem'
            else:
                inps.outfile = inps.file.split('.')[0] + '.unw'
        print 'writing >>> ' + inps.outfile
        writefile.write(data, atr, inps.outfile)

    h5file.close()
    return
Beispiel #41
0
def main(argv):

    try:  
        File = argv[0]
        alks = int(argv[1])
        rlks = int(argv[2])
    except:
        Usage();sys.exit(1)
  
    ext = os.path.splitext(File)[1]
    try:     outName = argv[3]
    except:  outName = File.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks'+ext
  
    ################################################################################
    atr = readfile.read_attributes(File)
    k = atr['FILE_TYPE']
    print '\n***************** Multilooking *********************'
    print 'number of multilooking in azimuth / latitude  direction: '+str(alks)
    print 'number of multilooking in range   / longitude direction: '+str(rlks)
    print 'input file: '+k
  
    if k in ['interferograms','coherence','wrapped','timeseries']:
        h5file     = h5py.File(File,'r')
        h5file_mli = h5py.File(outName,'w')
  
        print 'writing >>> '+outName 
  
        if k in ['interferograms','coherence','wrapped']:
            gg = h5file_mli.create_group(k)
            igramList = h5file[k].keys()
            igramList = sorted(igramList)
  
            for igram in igramList:
                print igram
                unw = h5file[k][igram].get(igram)[:]
                unwlks = multilook(unw,alks,rlks)
                group = gg.create_group(igram)
                dset = group.create_dataset(igram, data=unwlks, compression='gzip')
  
                atr = h5file[k][igram].attrs
                atr = multilook_attributes(atr,alks,rlks)
                for key, value in atr.iteritems():   group.attrs[key] = value
  
        elif k == 'timeseries':
            dateList=h5file[k].keys()
            dateList = sorted(dateList)
  
            group = h5file_mli.create_group(k)
            for d in dateList:
                print d
                unw = h5file[k].get(d)[:]
                unwlks=multilook(unw,alks,rlks)
                dset = group.create_dataset(d, data=unwlks, compression='gzip')
  
            ## Update attributes
            atr = h5file[k].attrs
            atr = multilook_attributes(atr,alks,rlks)
            for key, value in atr.iteritems():   group.attrs[key] = value
  
        h5file.close()
        h5file_mli.close()

    ################################################################################
    else:
        ####### To multi_look geomap*.trans file, both its file size and value need to be reduced.
        if k == '.trans':
            rg,az,atr = readfile.read(File)
            rgmli = multilook(rg,alks,rlks);    #rgmli = rgmli/float(rlks)
            azmli = multilook(az,alks,rlks);    #azmli = azmli/float(alks)
            atr = multilook_attributes(atr,alks,rlks)
            writefile.write(rgmli,azmli,atr,outName)
        else:
            data,atr = readfile.read(File)
            data_mli = multilook(data,alks,rlks)
            atr = multilook_attributes(atr,alks,rlks)
            writefile.write(data_mli,atr,outName)
Beispiel #42
0
def remove_surface(File, surf_type, Mask, outName=""):
    start = time.time()
    ##### Output File Name
    if outName == "":
        ext = os.path.splitext(File)[1].lower()
        outName = os.path.basename(File).split(ext)[0] + "_" + surf_type + ext

    ##### Input File Info
    atr = readfile.read_attributes(File)
    k = atr["FILE_TYPE"]
    print "Input file is " + atr["PROCESSOR"] + " " + k

    ## Multiple Datasets File
    if k in ["interferograms", "coherence", "wrapped", "timeseries"]:
        h5file = h5py.File(File, "r")
        ifgramList = h5file[k].keys()
        ifgramList = sorted(ifgramList)
        print "number of epochs: " + str(len(ifgramList))

        h5flat = h5py.File(outName, "w")
        group = h5flat.create_group(k)
        print "writing >>> " + outName

    if k in ["timeseries"]:
        for ifgram in ifgramList:
            print "Removing " + surf_type + " from " + ifgram
            data = h5file[k].get(ifgram)[:]

            data_n, ramp = remove_data_surface(data, Mask, surf_type)

            dset = group.create_dataset(ifgram, data=data_n, compression="gzip")
        for key, value in h5file[k].attrs.iteritems():
            group.attrs[key] = value

    elif k in ["interferograms", "wrapped", "coherence"]:
        for ifgram in ifgramList:
            print "Removing " + surf_type + " from " + ifgram
            data = h5file[k][ifgram].get(ifgram)[:]

            data_n, ramp = remove_data_surface(data, Mask, surf_type)

            gg = group.create_group(ifgram)
            dset = gg.create_dataset(ifgram, data=data_n, compression="gzip")
            for key, value in h5file[k][ifgram].attrs.iteritems():
                gg.attrs[key] = value

    ## Single Dataset File
    else:
        try:
            data, atr = readfile.read(File)
        except:
            pass
        print "Removing " + surf_type + " from " + k

        data_n, ramp = remove_data_surface(data, Mask, surf_type)

        writefile.write(data_n, atr, outName)

    try:
        h5file.close()
        h5flat.close()
    except:
        pass

    print "Remove " + surf_type + " took " + str(time.time() - start) + " secs"
Beispiel #43
0
def subset_file(File,sub_x,sub_y,outfill=np.nan,outName=''):

    ##### Overlap between subset and data range
    atr = readfile.read_attributes(File)
    width  = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])
    box1 = (0,0,width,length)
    box2 = (sub_x[0],sub_y[0],sub_x[1],sub_y[1])
    idx1,idx2 = box_overlap_index(box1,box2)
    print 'data   range:'
    print box1
    print 'subset range:'
    print box2

    ###########################  Data Read and Write  ######################
    k = atr['FILE_TYPE']
    print 'file type: '+k
    if outName == '':  outName = 'subset_'+os.path.basename(File)

    ##### Multiple Dataset File
    if k in ['timeseries','interferograms','wrapped','coherence']:
        ##### Input File Info
        h5file = h5py.File(File,'r')
        epochList = h5file[k].keys()
        epochList = sorted(epochList)
        print 'number of epochs: '+str(len(epochList))

        ##### Output File Info
        h5out = h5py.File(outName,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+outName

    ## Loop
    if k == 'timeseries':
        for epoch in epochList:
            print epoch
            dset = h5file[k].get(epoch)
            data_overlap = dset[idx1[1]:idx1[3],idx1[0]:idx1[2]]

            data = np.ones((box2[3]-box2[1],box2[2]-box2[0]))*outfill
            data[idx2[1]:idx2[3],idx2[0]:idx2[2]] = data_overlap

            dset = group.create_dataset(epoch, data=data, compression='gzip')

        atr  = subset_attributes(atr,sub_y,sub_x)
        for key,value in atr.iteritems():   group.attrs[key] = value

    elif k in ['interferograms','wrapped','coherence']:
        for epoch in epochList:
            print epoch
            dset = h5file[k][epoch].get(epoch)
            atr  = h5file[k][epoch].attrs
            data_overlap = dset[idx1[1]:idx1[3],idx1[0]:idx1[2]]

            data = np.ones((box2[3]-box2[1],box2[2]-box2[0]))*outfill
            data[idx2[1]:idx2[3],idx2[0]:idx2[2]] = data_overlap

            atr  = subset_attributes(atr,sub_y,sub_x)
            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr.iteritems():    gg.attrs[key] = value

    ##### Single Dataset File
    elif k in ['.jpeg','.jpg','.png','.ras','.bmp']:
        data, atr = readfile.read(File,box2)
        writefile.write(data,atr,outName)

    elif k == '.trans':
        rg_overlap,az_overlap,atr = readfile.read(File,idx1)

        rg = np.ones((box2[3]-box2[1],box2[2]-box2[0]))*outfill
        rg[idx2[1]:idx2[3],idx2[0]:idx2[2]] = rg_overlap

        az = np.ones((box2[3]-box2[1],box2[2]-box2[0]))*outfill
        az[idx2[1]:idx2[3],idx2[0]:idx2[2]] = az_overlap

        atr = subset_attributes(atr,sub_y,sub_x)
        writefile.write(rg,az,atr,outName)
    else:
        data_overlap,atr = readfile.read(File,idx1)

        data = np.ones((box2[3]-box2[1],box2[2]-box2[0]))*outfill
        data[idx2[1]:idx2[3],idx2[0]:idx2[2]] = data_overlap

        atr = subset_attributes(atr,sub_y,sub_x)
        writefile.write(data,atr,outName)

    ##### End Cleaning
    try:
        h5file.close()
        h5out.close()
    except: pass
Beispiel #44
0
def add_files(fname_list, fname_out=None):
    '''Generate sum of all input files
    Inputs:
        fname_list - list of string, path/name of input files to be added
        fname_out  - string, optional, path/name of output file
    Output:
        fname_out  - string, path/name of output file
    Example:
        'mask_all.h5' = add_file(['mask_1.h5','mask_2.h5','mask_3.h5'], 'mask_all.h5')
    '''
    # Default output file name
    ext = os.path.splitext(fname_list[0])[1]
    if not fname_out:
        fname_out = os.path.splitext(fname_list[0])[0]
        for i in range(1, len(fname_list)):
            fname_out += '_plus_' + os.path.splitext(
                os.path.basename(fname_list[i]))[0]
        fname_out += ext

    # Basic Info
    atr = readfile.read_attribute(fname_list[0])
    k = atr['FILE_TYPE']
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    print 'First input file is ' + atr['PROCESSOR'] + ' ' + k

    ## Multi-dataset/group file
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        # File Type Check
        for i in range(1, len(fname_list)):
            ki = readfile.read_attribute(fname_list[i])['FILE_TYPE']
            if (k in multi_dataset_hdf5_file and ki in multi_dataset_hdf5_file
                    or k in multi_group_hdf5_file
                    and ki in multi_group_hdf5_file):
                pass
            else:
                print 'Input files structure are not the same: ' + k + ' v.s. ' + ki
                sys.exit(1)

        print 'writing >>> ' + fname_out
        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)

        h5 = h5py.File(fname_list[0], 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

    if k in multi_dataset_hdf5_file:
        print 'number of acquisitions: %d' % epoch_num
        for i in range(epoch_num):
            epoch = epoch_list[i]
            data = np.zeros((length, width))
            for fname in fname_list:
                h5file = h5py.File(fname, 'r')
                d = h5file[k].get(epoch)[:]
                data = add_matrix(data, d)

            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)

        for key, value in atr.iteritems():
            group.attrs[key] = value
        h5out.close()
        h5.close()
        prog_bar.close()

    elif k in multi_group_hdf5_file:
        print 'number of interferograms: %d' % epoch_num
        date12_list = ptime.list_ifgram2date12(epoch_list)
        for i in range(epoch_num):
            epoch = epoch_list[i]
            data = np.zeros((length, width))
            for fname in fname_list:
                h5file = h5py.File(fname, 'r')
                temp_k = h5file.keys()[0]
                temp_epoch_list = sorted(h5file[temp_k].keys())
                d = h5file[temp_k][temp_epoch_list[i]].get(
                    temp_epoch_list[i])[:]
                data = add_matrix(data, d)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in h5[k][epoch].attrs.iteritems():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])
        h5out.close()
        h5.close()
        prog_bar.close()

    ## Single dataset files
    else:
        data = np.zeros((length, width))
        for fname in fname_list:
            print 'loading ' + fname
            d, r = readfile.read(fname)
            data = add_matrix(data, d)

        print 'writing >>> ' + fname_out
        writefile.write(data, atr, fname_out)

    return fname_out