Ejemplo n.º 1
0
def main():
    tstfile1 = auxil.select_infile(filt='.tst', title='Test results file, first classifier') 
    if not tstfile1:
        return
    tstfile2 = auxil.select_infile(filt='.tst', title='Test results file, second classifier') 
    if not tstfile2:
        return    
    print '========================='
    print '     McNemar test'
    print '========================='
    with open(tstfile1,'r') as f1:
        with open(tstfile2,'r') as f2:
            line = ''
            for i in range(4):
                line += f1.readline()
            print 'first classifier:\n'+line    
            line = f1.readline().split()
            n1 = int(line[0]) 
            K1 = int(line[1]) 
            line = ''               
            for i in range(4):
                line += f2.readline()
            print 'second classifier:\n'+line   
            line = f2.readline().split()    
            n2 = int(line[0]) 
            K2 = int(line[1])
            if (n1 != n2) or (K1 != K2):
                print 'test files are incompatible'
                return
            print 'test observbations: %i'%n1
            print 'classes: %i'%K1
#          calculate McNemar
            y10 = 0.0
            y01 = 0.0
            for i in range(n1):
                line = f1.readline()
                k = map(int,line.split())
                k1A = k[0]
                k2A = k[1]
                line = f2.readline()
                k = map(int,line.split())
                k1B = k[0]
                k2B = k[1]
                if (k1A != k2A) and (k1B == k2B):
                    y10 += 1
                if (k1A == k2A) and (k1B != k2B):
                    y01 += 1        
    f1.close()
    f2.close()
    McN = (np.abs(y01-y10))**2/(y10+y01)
    print 'first classifier: %i'%int(y10)      
    print 'second classifier: %i'%int(y01)
    print 'McNemar statistic: %f'%McN
    print 'P-value: %f'%(1-stats.chi2.cdf(McN,1))
Ejemplo n.º 2
0
def main():
    gdal.AllRegister()
    infile = auxil.select_infile()
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        bands = inDataset.RasterCount
    else:
        return
#  spectral and spatial subsets
    pos = auxil.select_pos(bands)
    bands = len(pos)
    x0, y0, rows, cols = auxil.select_dims([0, 0, rows, cols])
    #  data matrix for difference images
    D = zeros((rows * cols, bands))
    i = 0
    for b in pos:
        band = inDataset.GetRasterBand(b)
        tmp = band.ReadAsArray(x0,y0,cols,rows)\
                              .astype(float)
        D[:,i] = (tmp-(roll(tmp,1,axis=0)+\
                 roll(tmp,1,axis=1))/2).ravel()
        i += 1


#  noise covariance matrix
    S_N = mat(D).T * mat(D) / (rows * cols - 1)
    print 'Noise covariance matrix, file %s' % infile
    print S_N
Ejemplo n.º 3
0
def main(): 
    gdal.AllRegister()
    infile = auxil.select_infile() 
    if infile:                  
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        return
#  transposed data matrix
    m = rows*cols
    G = zeros((bands,m))                                  
    for b in range(bands):
        band = inDataset.GetRasterBand(b+1)
        tmp = band.ReadAsArray(0,0,cols,rows)\
                              .astype(float).ravel()
        G[b,:] = tmp - mean(tmp) 
    G = mat(G)           
#  covariance matrix
    S = G*G.T/(m-1)   
#  diagonalize and sort eigenvectors  
    lamda,W = linalg.eigh(S)
    idx = argsort(lamda)[::-1]
    lamda = lamda[idx]
    W = W[:,idx]                    
#  get principal components and reconstruct
    r = 3
    Y = W.T*G    
    G_r = W[:,:r]*Y[:r,:]
#  reconstruction error covariance matrix
    print  (G-G_r)*(G-G_r).T/(m-1) 
#  Equation (3.45)       
    print  W[:,r:]*diag(lamda[r:])*W[:,r:].T                       
    inDataset = None        
Ejemplo n.º 4
0
def main(): 
    gdal.AllRegister()
    infile = auxil.select_infile() 
    if infile:                  
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
    else:
        return   
#  read first image band
    rasterBand = inDataset.GetRasterBand(4)
    band = rasterBand.ReadAsArray(0,0,cols,rows)\
                                  .astype(uint8)       
#  sobel edge detection, window size 7x7
    sobelx = cv.Sobel(band, cv.CV_32F, 1, 0, ksize=5) 
    sobely = cv.Sobel(band, cv.CV_32F, 0, 1, ksize=5)       
#  write to disk       
    outfile,fmt = auxil.select_outfilefmt() 
    if outfile:
        driver = gdal.GetDriverByName(fmt)   
        outDataset = driver.Create(outfile,
                        cols,rows,2,GDT_Byte)         
        outBand = outDataset.GetRasterBand(1)
        outBand.WriteArray(sobelx,0,0)
        outBand.FlushCache()
        outBand = outDataset.GetRasterBand(2)
        outBand.WriteArray(sobely,0,0) 
        outBand.FlushCache() 
        outDataset = None    
    inDataset = None        
Ejemplo n.º 5
0
def main(): 
    gdal.AllRegister()
    infile = auxil.select_infile() 
    if infile:                  
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
    else:
        return   
#  read first image band
    rasterBand = inDataset.GetRasterBand(4)
    band = rasterBand.ReadAsArray(0,0,cols,rows)\
                                  .astype(uint8)       
#  canny edge detection, hysteresis thresholds 50, 150
    result = cv.Canny(band, 50, 150)        
#  write to disk       
    outfile,fmt = auxil.select_outfilefmt() 
    if outfile:
        driver = gdal.GetDriverByName(fmt)   
        outDataset = driver.Create(outfile,
                        cols,rows,1,GDT_Byte)         
        outBand = outDataset.GetRasterBand(1)
        outBand.WriteArray(result,0,0) 
        outBand.FlushCache() 
        outDataset = None    
    inDataset = None        
Ejemplo n.º 6
0
def main(): 
    gdal.AllRegister()
    infile = auxil.select_infile() 
    if infile:                  
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        return
#  spectral and spatial subsets    
    pos =  auxil.select_pos(bands)
    bands = len(pos)    
    x0,y0,rows,cols=auxil.select_dims([0,0,rows,cols])   
#  data matrix for difference images
    D = zeros((rows*cols,bands))
    i = 0                                   
    for b in pos:
        band = inDataset.GetRasterBand(b)
        tmp = band.ReadAsArray(x0,y0,cols,rows)\
                              .astype(float)
        D[:,i] = (tmp-(roll(tmp,1,axis=0)+\
                 roll(tmp,1,axis=1))/2).ravel()
        i += 1       
#  noise covariance matrix
    S_N = mat(D).T*mat(D)/(rows*cols-1)
    print 'Noise covariance matrix, file %s'%infile
    print S_N
Ejemplo n.º 7
0
def main(): 
    gdal.AllRegister()
    infile = auxil.select_infile() 
    if infile:                  
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
    else:
        return   
#  read first image band
    rasterBand = inDataset.GetRasterBand(1)
    band = rasterBand.ReadAsArray(0,0,cols,rows)\
                                  .astype(uint8)       
#  corner detection, window size 7x7
    result = cv.cornerMinEigenVal(band, 7)        
#  write to disk       
    outfile,fmt = auxil.select_outfilefmt() 
    if outfile:
        driver = gdal.GetDriverByName(fmt)   
        outDataset = driver.Create(outfile,
                        cols,rows,1,GDT_Float32)         
        outBand = outDataset.GetRasterBand(1)
        outBand.WriteArray(result,0,0) 
        outBand.FlushCache() 
        outDataset = None    
    inDataset = None        
Ejemplo n.º 8
0
def main():

    gdal.AllRegister()
    infile = auxil.select_infile()
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        bands = inDataset.RasterCount
    else:
        return


#  spectral and spatial subsets
    pos = auxil.select_pos(bands)
    x0, y0, rows, cols = auxil.select_dims([0, 0, rows, cols])

    #  BSQ array
    image = zeros((len(pos), rows, cols))
    k = 0
    for b in pos:
        band = inDataset.GetRasterBand(b)
        image[k,:,:]=band.ReadAsArray(x0,y0,cols,rows)\
                                        .astype(float)
        k += 1
    inDataset = None

    #  display first band
    band0 = image[0, :, :]
    mn = amin(band0)
    mx = amax(band0)
    plt.imshow((band0 - mn) / (mx - mn), cmap='gray')
    plt.show()
Ejemplo n.º 9
0
def main():

#   input directory    
    in_path = auxil.select_directory(title="Choosing the Image file directory")
    shp = auxil.select_infile()

#   imagery dataset
    lista = os.listdir(in_path)
    #print in_path
    GQ = []
    data_list=[]
    imageList = []
    outputName = ""
    i = 0
    for k in range(len(lista)):
        GQ.append(str(lista[k]))
#

    for k in GQ:
        #print k[-4:]
        try:
            if str(k[-4:]) == ".tif" or str(k[-4:]) == ".TIF":
                data_list.append(k)
        except StandardError, e:
            print "Something is going wrong!"
Ejemplo n.º 10
0
def main(): 
    gdal.AllRegister()
    infile = auxil.select_infile(filt='*.xml') 
    if infile:                  
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        return                               
    pos =  auxil.select_pos(bands) 
    x0,y0,rows,cols=auxil.select_dims([0,0,rows,cols])
#  BSQ array
    image = zeros((len(pos),rows,cols),dtype=complex64) 
    k = 0                                   
    for b in pos:
        band = inDataset.GetRasterBand(b)
        image[k,:,:]=band.ReadAsArray(x0,y0,cols,rows)\
                                       .astype(complex)
        k += 1
    inDataset = None
#  display magnitude in linear 2% stretch    
    band0 = abs(image[0,:,:]) 
    band0 = auxil.lin2pcstr(band0)
    mn = amin(band0)
    mx = amax(band0)
    plt.imshow((band0-mn)/(mx-mn), cmap='gray') 
    plt.show()                           
Ejemplo n.º 11
0
def main():
    gdal.AllRegister()
    infile = auxil.select_infile()
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
    else:
        return


#  read first image band
    rasterBand = inDataset.GetRasterBand(1)
    band = rasterBand.ReadAsArray(0,0,cols,rows)\
                                  .astype(uint8)
    #  corner detection, window size 7x7
    result = cv.cornerMinEigenVal(band, 7)
    #  write to disk
    outfile, fmt = auxil.select_outfilefmt()
    if outfile:
        driver = gdal.GetDriverByName(fmt)
        outDataset = driver.Create(outfile, cols, rows, 1, GDT_Float32)
        outBand = outDataset.GetRasterBand(1)
        outBand.WriteArray(result, 0, 0)
        outBand.FlushCache()
        outDataset = None
    inDataset = None
Ejemplo n.º 12
0
def main():  
        
    gdal.AllRegister() 
    infile = auxil.select_infile() 
    if infile:                  
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        return  
    
#  spectral and spatial subsets    
    pos =  auxil.select_pos(bands) 
    x0,y0,rows,cols = auxil.select_dims([0,0,rows,cols])

#  BSQ array
    image = zeros((len(pos),rows,cols)) 
    k = 0                                   
    for b in pos:
        band = inDataset.GetRasterBand(b)
        image[k,:,:]=band.ReadAsArray(x0,y0,cols,rows)\
                                        .astype(float)
        k += 1
    inDataset = None

#  display first band    
    band0 = image[0,:,:]   
    mn = amin(band0)
    mx = amax(band0)
    plt.imshow((band0-mn)/(mx-mn), cmap='gray' )  
    plt.show()                        
Ejemplo n.º 13
0
def main():    
    gdal.AllRegister()
    infile = auxil.select_infile() 
    if infile:                  
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        return    
    pos =  auxil.select_pos(bands)
    bands = len(pos)    
    x0,y0,rows,cols=auxil.select_dims([0,0,rows,cols])   
    K = auxil.select_integer(6,msg='Number clusters')        
    G = zeros((rows*cols,len(pos))) 
    k = 0                                   
    for b in pos:
        band = inDataset.GetRasterBand(b)
        G[:,k] = band.ReadAsArray(x0,y0,cols,rows)\
                              .astype(float).ravel()
        k += 1        
    centers, _ = kmeans(G,K)
    labels, _ = vq(G,centers)      
    outfile,fmt = auxil.select_outfilefmt() 
    if outfile:
        driver = gdal.GetDriverByName(fmt)   
        outDataset = driver.Create(outfile,
                        cols,rows,1,GDT_Byte)         
        outBand = outDataset.GetRasterBand(1)
        outBand.WriteArray(reshape(labels,(rows,cols))\
                                              ,0,0) 
        outBand.FlushCache() 
        outDataset = None    
    inDataset = None        
Ejemplo n.º 14
0
def main():

    #   input directory
    in_path = auxil.select_directory(title="Choosing the Image file directory")
    shp = auxil.select_infile()

    #   imagery dataset
    lista = os.listdir(in_path)
    #print in_path
    GQ = []
    data_list = []
    imageList = []
    outputName = ""
    i = 0
    for k in range(len(lista)):
        GQ.append(str(lista[k]))


#

    for k in GQ:
        #print k[-4:]
        try:
            if str(k[-4:]) == ".tif" or str(k[-4:]) == ".TIF":
                data_list.append(k)
        except StandardError, e:
            print "Something is going wrong!"
Ejemplo n.º 15
0
def main(): 
    gdal.AllRegister()
#  read first band of an MS image   
    infile = auxil.select_infile() 
    if infile:                  
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
    else:
        return   
    rasterBand = inDataset.GetRasterBand(1) 
    band = rasterBand.ReadAsArray(0,0,cols,rows)                              
#  find and display contours    
    edges = cv.Canny(band, 20, 80)    
    contours,hierarchy = cv.findContours(edges,\
             cv.RETR_LIST,cv.CHAIN_APPROX_NONE)
    arr = zeros((rows,cols),dtype=uint8)
    cv.drawContours(arr, contours, -1, 255)
    plt.imshow(arr,cmap='gray') ;  plt.show()
#  determine Hu moments        
    num_contours = len(hierarchy[0])    
    hus = zeros((num_contours,7),dtype=float32)
    for i in range(num_contours): 
        arr = arr*0  
        cv.drawContours(arr, contours, i, 1)                      
        m = cv.moments(arr)
        hus[i,:] = cv.HuMoments(m).ravel()
#  plot histograms of logarithms of the Hu moments        
    for i in range(7): 
        idx = where(hus[:,i]>0)  
        hist,_ = histogram(log(hus[idx,i]),50)    
        plt.plot(range(50), hist, 'k-'); plt.show()        
Ejemplo n.º 16
0
def main(): 
    print '========================='
    print '     Register SAR'
    print '========================='
    print time.asctime()  
    path = auxil.select_directory('Working directory')
    if path:
        os.chdir(path)        
    file0=auxil.select_infile(title='Base image') 
    if not file0:                   
        return  
    file1=auxil.select_infile(title='Warp image') 
    if not file1:                  
        return       
    outfile,fmt = auxil.select_outfilefmt() 
    if not outfile:
        return  
    if registerSAR(file0,file1,outfile,fmt):
        print 'done' 
    else:
        print 'registerSAR failed'        
Ejemplo n.º 17
0
def main():
    print '========================='
    print '     Register SAR'
    print '========================='
    print time.asctime()
    path = auxil.select_directory('Working directory')
    if path:
        os.chdir(path)
    file0 = auxil.select_infile(title='Base image')
    if not file0:
        return
    file1 = auxil.select_infile(title='Warp image')
    if not file1:
        return
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
    if registerSAR(file0, file1, outfile, fmt):
        print 'done'
    else:
        print 'registerSAR failed'
Ejemplo n.º 18
0
def main(): 
    gdal.AllRegister()
    infile = auxil.select_infile() 
    if infile:                  
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        return
#  data matrix
    n = rows*cols
    Z = zeros((n,bands))                                  
    for b in range(bands):
        band = inDataset.GetRasterBand(b+1)
        tmp = band.ReadAsArray(0,0,cols,rows)\
                              .astype(float).ravel()
        Z[:,b] = tmp - mean(tmp) 
    Z = mat(Z)           
#  covariance matrix
    S = Z.T*Z/n 
#  diagonalize and sort eigenvectors  
    lamda,V = linalg.eigh(S)
    idx = argsort(lamda)[::-1]
    lamda = lamda[idx]
    V = V[:,idx]                    
#  get principal components and reconstruct
    r = 2
    X = Z*V    
    Z_r = X[:,:r]*V[:,:r].T
    recon = reshape(array(Z_r),(rows,cols,bands))

#  write to disk       
    outfile,fmt = auxil.select_outfilefmt() 
    if outfile:
        driver = gdal.GetDriverByName(fmt)   
        outDataset = driver.Create(outfile,
                        cols,rows,bands,GDT_Float32)
        projection = inDataset.GetProjection()
        geotransform = inDataset.GetGeoTransform()
        if geotransform is not None:
            gt = list(geotransform)  #geotransform is a tuple
            gt[0] = gt[0] + gt[1]
            gt[3] = gt[3] + gt[5]
            outDataset.SetGeoTransform(tuple(gt))
        if projection is not None:
            outDataset.SetProjection(projection)        
        for k in range(bands):        
            outBand = outDataset.GetRasterBand(k+1)
            outBand.WriteArray(recon[:,:,k],0,0) 
            outBand.FlushCache() 
        outDataset = None    
Ejemplo n.º 19
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Input directory')
    if path:
        os.chdir(path)


#  input image, convert to ENVI format
    infile = auxil.select_infile(title='Image file')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        projection = inDataset.GetProjection()
        geotransform = inDataset.GetGeoTransform()
        driver = gdal.GetDriverByName('ENVI')
        enviDataset=driver\
           .CreateCopy('entmp',inDataset)
        inDataset = None
        enviDataset = None
    else:
        return
    outfile, outfmt= \
           auxil.select_outfilefmt(title='Output file')
    #  RX-algorithm
    img = envi.open('entmp.hdr')
    arr = img.load()
    rx = RX(background=calc_stats(arr))
    res = rx(arr)
    #  output
    driver = gdal.GetDriverByName(outfmt)
    outDataset = driver.Create(outfile,cols,rows,1,\
                                    GDT_Float32)
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(np.asarray(res, np.float32), 0, 0)
    outBand.FlushCache()
    outDataset = None
    print 'Result written to %s' % outfile
Ejemplo n.º 20
0
def main(): 
    gdal.AllRegister()
    infile = auxil.select_infile() 
    if infile:                  
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        return
    
    band = inDataset.GetRasterBand(3)
    band3 = band.ReadAsArray(0,0,cols,rows)
    
    
    band = inDataset.GetRasterBand(4)
    band4 = band.ReadAsArray(0,0,cols,rows)
    band4 = array(band4,dtype=float32)
    
    NDVI = (band4-band3)/(band4+band3)
    
#  write to disk       
    outfile,fmt = auxil.select_outfilefmt() 
    if outfile:
        driver = gdal.GetDriverByName(fmt)   
        outDataset = driver.Create(outfile,
                        cols,rows,1,GDT_Float32)
        projection = inDataset.GetProjection()
        geotransform = inDataset.GetGeoTransform()
        if geotransform is not None:
            outDataset.SetGeoTransform(geotransform)
        if projection is not None:
            outDataset.SetProjection(projection)              
        outBand = outDataset.GetRasterBand(1)
        outBand.WriteArray(NDVI,0,0) 
        outBand.FlushCache() 
        outDataset = None    
    inDataset = None        
Ejemplo n.º 21
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory("Input directory")
    if path:
        os.chdir(path)
    #  input image, convert to ENVI format
    infile = auxil.select_infile(title="Image file")
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        projection = inDataset.GetProjection()
        geotransform = inDataset.GetGeoTransform()
        driver = gdal.GetDriverByName("ENVI")
        enviDataset = driver.CreateCopy("entmp", inDataset)
        inDataset = None
        enviDataset = None
    else:
        return
    outfile, outfmt = auxil.select_outfilefmt(title="Output file")
    #  RX-algorithm
    img = envi.open("entmp.hdr")
    arr = img.load()
    rx = RX(background=calc_stats(arr))
    res = rx(arr)
    #  output
    driver = gdal.GetDriverByName(outfmt)
    outDataset = driver.Create(outfile, cols, rows, 1, GDT_Float32)
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(np.asarray(res, np.float32), 0, 0)
    outBand.FlushCache()
    outDataset = None
    print "Result written to %s" % outfile
Ejemplo n.º 22
0
def main():
    gdal.AllRegister()
    #  read first band of an MS image
    infile = auxil.select_infile()
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
    else:
        return
    rasterBand = inDataset.GetRasterBand(1)
    band = rasterBand.ReadAsArray(0, 0, cols, rows)
    #  find and display contours
    edges = cv.Canny(band, 20, 80)
    contours,hierarchy = cv.findContours(edges,\
             cv.RETR_LIST,cv.CHAIN_APPROX_NONE)
    arr = zeros((rows, cols), dtype=uint8)
    cv.drawContours(arr, contours, -1, 255)
    plt.imshow(arr, cmap='gray')
    plt.show()
    #  determine Hu moments
    num_contours = len(hierarchy[0])
    hus = zeros((num_contours, 7), dtype=float32)
    for i in range(num_contours):
        arr = arr * 0
        cv.drawContours(arr, contours, i, 1)
        m = cv.moments(arr)
        hus[i, :] = cv.HuMoments(m).ravel()


#  plot histograms of logarithms of the Hu moments
    for i in range(7):
        idx = where(hus[:, i] > 0)
        hist, _ = histogram(log(hus[idx, i]), 50)
        plt.plot(range(50), hist, 'k-')
        plt.show()
Ejemplo n.º 23
0
def main():        
    gdal.AllRegister()
    infile = auxil.select_infile() 
    if infile:                  
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
    else:
        return
    band = inDataset.GetRasterBand(1)  
    image = band.ReadAsArray(0,0,cols,rows) \
                               .astype(float)
#  arrays of i and j values    
    a = reshape(range(rows*cols),(rows,cols))
    i = a % cols
    j = a / cols
#  shift Fourier transform to center    
    image = (-1)**(i+j)*image
#  compute power spectrum and display    
    image = log(abs(fft.fft2(image))**2)
    mn = amin(image)
    mx = amax(image)
    plt.imshow((image-mn)/(mx-mn), cmap='gray' )  
    plt.show()                        
Ejemplo n.º 24
0
 def openNewFile(self):
     imagePath = auxil.select_infile(title='Choose Image.');
     imagePanel.updateImage(imagePath);
     imagePanel.listOfids = [];
     imagePanel.listOfCoordinates = [];
Ejemplo n.º 25
0
def main(): 
    gdal.AllRegister()
    path = auxil.select_directory('Working directory')
    if path:
        os.chdir(path)        
    file0=auxil.select_infile(title='Base image') 
    if file0:                   
        inDataset0 = gdal.Open(file0,GA_ReadOnly)     
        cols0 = inDataset0.RasterXSize
        rows0 = inDataset0.RasterYSize
        print 'Base image: %s'%file0    
    else:
        return     
    rasterBand = inDataset0.GetRasterBand(1)
    span0 = rasterBand.ReadAsArray(0,0,cols0,rows0)
    rasterBand = inDataset0.GetRasterBand(4)
    span0 += 2*rasterBand.ReadAsArray(0,0,cols0,rows0)
    rasterBand = inDataset0.GetRasterBand(6)
    span0 += rasterBand.ReadAsArray(0,0,cols0,rows0)  
    span0 = log(real(span0))      
    inDataset0 = None   
    file1=auxil.select_infile(title='Warp image') 
    if file1:                  
        inDataset1 = gdal.Open(file1,GA_ReadOnly)     
        cols1 = inDataset1.RasterXSize
        rows1 = inDataset1.RasterYSize
        print 'Warp image: %s'%file1    
    else:
        return   
    outfile,fmt = auxil.select_outfilefmt() 
    if not outfile:
        return   
    image1 = zeros((6,rows1,cols1),dtype=cfloat)                                   
    for k in range(6):
        band = inDataset1.GetRasterBand(k+1)
        image1[k,:,:]=band\
          .ReadAsArray(0,0,cols1,rows1).astype(cfloat)    
    inDataset1 = None 
    span1 = sum(image1[[0,3,5] ,:,:],axis=0)\
                                        +image1[3,:,:]                   
    span1 = log(real(span1))                
    scale,angle,shift = auxil.similarity(span0, span1)    
    tmp_real = zeros((6,rows0,cols0))
    tmp_imag = zeros((6,rows0,cols0))
    for k in range(6): 
        bn1 = real(image1[k,:,:])                   
        bn2 = ndii.zoom(bn1, 1.0/scale)
        bn2 = ndii.rotate(bn2, angle)
        bn2 = ndii.shift(bn2, shift)
        tmp_real[k,:,:] = bn2[0:rows0,0:cols0] 
        bn1 = imag(image1[k,:,:])                   
        bn2 = ndii.zoom(bn1, 1.0/scale)
        bn2 = ndii.rotate(bn2, angle)
        bn2 = ndii.shift(bn2, shift)
        tmp_imag[k,:,:] = bn2[0:rows0,0:cols0] 
    image2 = tmp_real + 1j*tmp_imag                  
    driver = gdal.GetDriverByName(fmt)   
    outDataset = driver.Create(outfile,
                    cols0,rows0,6,GDT_CFloat32)
    for k in range(6):        
        outBand = outDataset.GetRasterBand(k+1)
        outBand.WriteArray(image2[k,:,:],0,0) 
        outBand.FlushCache()
    outDataset = None
    print 'Warped image written to: %s'%outfile        
Ejemplo n.º 26
0
import auxil.auxil as auxil
import numpy as np 
from osgeo import gdal   
from osgeo.gdalconst import GA_ReadOnly,GDT_Float32
import matplotlib.pyplot as plt
from pylab import *
import gc
import os


# Open a pre-classified image
in_path = auxil.select_infile(title="Choosing the input file directory")
gdal.AllRegister()
raw_image = gdal.Open(in_path,GA_ReadOnly)
try:
    cols = raw_image.RasterXSize
    rows = raw_image.RasterYSize
    bands = raw_image.RasterCount
except StandardError, e:
    print "Error: It is not an image"



# Get the spatial reference of the input image
projInfo = raw_image.GetProjection()
transInfo = raw_image.GetGeoTransform()


# Read the image as an array
pre_classified = raw_image.ReadAsArray(0, 0, cols, rows)
Ejemplo n.º 27
0
def main():      
    gdal.AllRegister()
    path = auxil.select_directory('Input directory')
    if path:
        os.chdir(path)        
#  input image    
    infile = auxil.select_infile(title='Image file') 
    if infile:                   
        inDataset = gdal.Open(infile,GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
        projection = inDataset.GetProjection()
        geotransform = inDataset.GetGeoTransform()
        if geotransform is not None:
            gt = list(geotransform) 
        else:
            print 'No geotransform available'
            return       
        imsr = osr.SpatialReference()  
        imsr.ImportFromWkt(projection)      
    else:
        return  
    pos =  auxil.select_pos(bands)   
    if not pos:
        return
    N = len(pos) 
    rasterBands = [] 
    for b in pos:
        rasterBands.append(inDataset.GetRasterBand(b)) 
#  training algorithm
    trainalg = auxil.select_integer(1,msg='1:Maxlike,2:Backprop,3:Congrad,4:SVM') 
    if not trainalg:
        return           
#  training data (shapefile)      
    trnfile = auxil.select_infile(filt='.shp',title='Train shapefile')
    if trnfile:
        trnDriver = ogr.GetDriverByName('ESRI Shapefile')
        trnDatasource = trnDriver.Open(trnfile,0)
        trnLayer = trnDatasource.GetLayer() 
        trnsr = trnLayer.GetSpatialRef()             
    else:
        return     
    tstfile = auxil.select_outfile(filt='.tst', title='Test results file') 
    if not tstfile:
        print 'No test output'      
#  outfile
    outfile, outfmt = auxil.select_outfilefmt(title='Classification file')   
    if not outfile:
        return                   
    if trainalg in (2,3,4):
#      class probabilities file, hidden neurons
        probfile, probfmt = auxil.select_outfilefmt(title='Probabilities file')
    else:
        probfile = None     
    if trainalg in (2,3):    
        L = auxil.select_integer(8,'Number of hidden neurons')    
        if not L:
            return                  
#  coordinate transformation from training to image projection   
    ct= osr.CoordinateTransformation(trnsr,imsr) 
#  number of classes    
    K = 1
    feature = trnLayer.GetNextFeature() 
    while feature:
        classid = feature.GetField('CLASS_ID')
        if int(classid)>K:
            K = int(classid)
        feature = trnLayer.GetNextFeature() 
    trnLayer.ResetReading()    
    K += 1       
    print '========================='
    print 'supervised classification'
    print '========================='
    print time.asctime()    
    print 'image:    '+infile
    print 'training: '+trnfile  
    if trainalg == 1:
        print 'Maximum Likelihood'
    elif trainalg == 2:
        print 'Neural Net (Backprop)'
    elif trainalg ==3:
        print 'Neural Net (Congrad)'
    else:
        print 'Support Vector Machine'               
#  loop through the polygons    
    Gs = [] # train observations
    ls = [] # class labels
    classnames = '{unclassified'
    classids = set()
    print 'reading training data...'
    for i in range(trnLayer.GetFeatureCount()):
        feature = trnLayer.GetFeature(i)
        classid = str(feature.GetField('CLASS_ID'))
        classname  = feature.GetField('CLASS_NAME')
        if classid not in classids:
            classnames += ',   '+ classname
        classids = classids | set(classid)        
        l = [0 for i in range(K)]
        l[int(classid)] = 1.0
        polygon = feature.GetGeometryRef()
#      transform to same projection as image        
        polygon.Transform(ct)  
#      convert to a Shapely object            
        poly = shapely.wkt.loads(polygon.ExportToWkt())
#      transform the boundary to pixel coords in numpy        
        bdry = np.array(poly.boundary) 
        bdry[:,0] = bdry[:,0]-gt[0]
        bdry[:,1] = bdry[:,1]-gt[3]
        GT = np.mat([[gt[1],gt[2]],[gt[4],gt[5]]])
        bdry = bdry*np.linalg.inv(GT) 
#      polygon in pixel coords        
        polygon1 = asPolygon(bdry)
#      raster over the bounding rectangle        
        minx,miny,maxx,maxy = map(int,list(polygon1.bounds))  
        pts = [] 
        for i in range(minx,maxx+1):
            for j in range(miny,maxy+1): 
                pts.append((i,j))             
        multipt =  MultiPoint(pts)   
#      intersection as list              
        intersection = np.array(multipt.intersection(polygon1),dtype=np.int).tolist()
#      cut out the bounded image cube               
        cube = np.zeros((maxy-miny+1,maxx-minx+1,len(rasterBands)))
        k=0
        for band in rasterBands:
            cube[:,:,k] = band.ReadAsArray(minx,miny,maxx-minx+1,maxy-miny+1)
            k += 1
#      get the training vectors
        for (x,y) in intersection:         
            Gs.append(cube[y-miny,x-minx,:])
            ls.append(l)   
        polygon = None
        polygon1 = None            
        feature.Destroy()  
    trnDatasource.Destroy() 
    classnames += '}'
    m = len(ls)       
    print str(m) + ' training pixel vectors were read in' 
    Gs = np.array(Gs) 
    ls = np.array(ls)
#  stretch the pixel vectors to [-1,1] for ffn
    maxx = np.max(Gs,0)
    minx = np.min(Gs,0)
    for j in range(N):
        Gs[:,j] = 2*(Gs[:,j]-minx[j])/(maxx[j]-minx[j]) - 1.0 
#  random permutation of training data
    idx = np.random.permutation(m)
    Gs = Gs[idx,:] 
    ls = ls[idx,:]     
#  setup output datasets 
    driver = gdal.GetDriverByName(outfmt)    
    outDataset = driver.Create(outfile,cols,rows,1,GDT_Byte) 
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection) 
    outBand = outDataset.GetRasterBand(1) 
    if probfile:
        driver = gdal.GetDriverByName(probfmt)    
        probDataset = driver.Create(probfile,cols,rows,K,GDT_Byte) 
        if geotransform is not None:
            probDataset.SetGeoTransform(tuple(gt))
        if projection is not None:
            probDataset.SetProjection(projection)  
        probBands = [] 
        for k in range(K):
            probBands.append(probDataset.GetRasterBand(k+1))         
    if tstfile:
#  train on 2/3 training examples         
        Gstrn = Gs[0:2*m//3,:]
        lstrn = ls[0:2*m//3,:] 
        Gstst = Gs[2*m//3:,:]  
        lstst = ls[2*m//3:,:]    
    else:
        Gstrn = Gs
        lstrn = ls         
    if   trainalg == 1:
        classifier = sc.Maxlike(Gstrn,lstrn)
    elif trainalg == 2:
        classifier = sc.Ffnbp(Gstrn,lstrn,L)
    elif trainalg == 3:
        classifier = sc.Ffncg(Gstrn,lstrn,L)
    elif trainalg == 4:
        classifier = sc.Svm(Gstrn,lstrn)         
            
    print 'training on %i pixel vectors...' % np.shape(Gstrn)[0]
    start = time.time()
    result = classifier.train()
    print 'elapsed time %s' %str(time.time()-start) 
    if result:
        if trainalg in [2,3]:
            cost = np.log10(result)  
            ymax = np.max(cost)
            ymin = np.min(cost) 
            xmax = len(cost)      
            plt.plot(range(xmax),cost,'k')
            plt.axis([0,xmax,ymin-1,ymax])
            plt.title('Log(Cross entropy)')
            plt.xlabel('Epoch')              
#      classify the image           
        print 'classifying...'
        start = time.time()
        tile = np.zeros((cols,N))    
        for row in range(rows):
            for j in range(N):
                tile[:,j] = rasterBands[j].ReadAsArray(0,row,cols,1)
                tile[:,j] = 2*(tile[:,j]-minx[j])/(maxx[j]-minx[j]) - 1.0               
            cls, Ms = classifier.classify(tile)  
            outBand.WriteArray(np.reshape(cls,(1,cols)),0,row)
            if probfile:
                Ms = np.byte(Ms*255)
                for k in range(K):
                    probBands[k].WriteArray(np.reshape(Ms[k,:],(1,cols)),0,row)
        outBand.FlushCache()
        print 'elapsed time %s' %str(time.time()-start)
        outDataset = None
        inDataset = None      
        if probfile:
            for probBand in probBands:
                probBand.FlushCache() 
            probDataset = None
            print 'class probabilities written to: %s'%probfile   
        K =  lstrn.shape[1]+1                     
        if (outfmt == 'ENVI') and (K<19):
#          try to make an ENVI classification header file            
            hdr = header.Header() 
            headerfile = outfile+'.hdr'
            f = open(headerfile)
            line = f.readline()
            envihdr = ''
            while line:
                envihdr += line
                line = f.readline()
            f.close()         
            hdr.read(envihdr)
            hdr['file type'] ='ENVI Classification'
            hdr['classes'] = str(K)
            classlookup = '{0'
            for i in range(1,3*K):
                classlookup += ', '+str(str(ctable[i]))
            classlookup +='}'    
            hdr['class lookup'] = classlookup
            hdr['class names'] = classnames
            f = open(headerfile,'w')
            f.write(str(hdr))
            f.close()             
        print 'thematic map written to: %s'%outfile
        if trainalg in [2,3]:
            print 'please close the cross entropy plot to continue'
            plt.show()
        if tstfile:
            with open(tstfile,'w') as f:
                print >>f, 'FFN test results for %s'%infile
                print >>f, time.asctime()
                print >>f, 'Classification image: %s'%outfile
                print >>f, 'Class probabilities image: %s'%probfile
                print >>f, lstst.shape[0],lstst.shape[1]
                classes, _ = classifier.classify(Gstst)
                labels = np.argmax(lstst,axis=1)+1
                for i in range(len(classes)):
                    print >>f, classes[i], labels[i]              
                f.close()
                print 'test results written to: %s'%tstfile
        print 'done'
    else:
        print 'an error occured' 
        return 
Ejemplo n.º 28
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
    infile = auxil.select_infile(title='Select an image')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        bands = inDataset.RasterCount
    else:

        return
    pos = auxil.select_pos(bands)
    if not pos:
        return
    bands = len(pos)
    dims = auxil.select_dims([0, 0, cols, rows])
    if dims:
        x0, y0, cols, rows = dims
    else:
        return
    class_image = np.zeros((rows, cols), dtype=np.byte)
    K = auxil.select_integer(6, 'Number of clusters')
    max_scale = auxil.select_integer(2, 'Maximum scaling factor')
    max_scale = min((max_scale, 3))
    min_scale = auxil.select_integer(0, 'Minimum scaling factor')
    min_scale = min((max_scale, min_scale))
    T0 = auxil.select_float(0.5, 'Initial annealing temperature')
    beta = auxil.select_float(0.5, 'Spatial mixing parameter')
    outfile, outfmt = auxil.select_outfilefmt(
        'Select output classification file')
    if not outfile:
        return
    probfile, probfmt = auxil.select_outfilefmt(
        'Select output probability file (optional)')
    print '========================='
    print '     EM clustering'
    print '========================='
    print 'infile:   %s' % infile
    print 'clusters: %i' % K
    print 'T0:       %f' % T0
    print 'beta:     %f' % beta

    start = time.time()
    #  read in image and compress
    DWTbands = []
    for b in pos:
        band = inDataset.GetRasterBand(b)
        DWTband = auxil.DWTArray(
            band.ReadAsArray(x0, y0, cols, rows).astype(float), cols, rows)
        for i in range(max_scale):
            DWTband.filter()
        DWTbands.append(DWTband)
    rows, cols = DWTbands[0].get_quadrant(0).shape
    G = np.transpose(
        np.array([
            DWTbands[i].get_quadrant(0, float=True).ravel()
            for i in range(bands)
        ]))
    #  initialize membership matrix
    n = G.shape[0]
    U = np.random.random((K, n))
    den = np.sum(U, axis=0)
    for j in range(K):
        U[j, :] = U[j, :] / den
#  cluster at minimum scale
    try:
        U, Ms, Cs, Ps, pdens = em(G, U, T0, beta, rows, cols)
    except:
        print 'em failed'
        return
#  sort clusters wrt partition density
    idx = np.argsort(pdens)
    idx = idx[::-1]
    U = U[idx, :]
    #  clustering at increasing scales
    for i in range(max_scale - min_scale):
        #      expand U and renormalize
        U = np.reshape(U, (K, rows, cols))
        rows = rows * 2
        cols = cols * 2
        U = ndi.zoom(U, (1, 2, 2))
        U = np.reshape(U, (K, rows * cols))
        idx = np.where(U < 0.0)
        U[idx] = 0.0
        den = np.sum(U, axis=0)
        for j in range(K):
            U[j, :] = U[j, :] / den
#      expand the image
        for i in range(bands):
            DWTbands[i].invert()
        G = np.transpose(
            np.array([
                DWTbands[i].get_quadrant(0, float=True).ravel()
                for i in range(bands)
            ]))
        #      cluster
        unfrozen = np.where(np.max(U, axis=0) < 0.90)
        try:
            U, Ms, Cs, Ps, pdens = em(G,
                                      U,
                                      0.0,
                                      beta,
                                      rows,
                                      cols,
                                      unfrozen=unfrozen)
        except:
            print 'em failed'
            return
    print 'Cluster mean vectors'
    print Ms
    print 'Cluster covariance matrices'
    for k in range(K):
        print 'cluster: %i' % k
        print Cs[k]
#  up-sample class memberships if necessary
    if min_scale > 0:
        U = np.reshape(U, (K, rows, cols))
        f = 2**min_scale
        rows = rows * f
        cols = cols * f
        U = ndi.zoom(U, (1, f, f))
        U = np.reshape(U, (K, rows * cols))
        idx = np.where(U < 0.0)
        U[idx] = 0.0
        den = np.sum(U, axis=0)
        for j in range(K):
            U[j, :] = U[j, :] / den


#  classify
    labels = np.byte(np.argmax(U, axis=0) + 1)
    class_image[0:rows, 0:cols] = np.reshape(labels, (rows, cols))
    rows1, cols1 = class_image.shape
    #  write to disk
    driver = gdal.GetDriverByName(outfmt)
    outDataset = driver.Create(outfile, cols1, rows1, 1, GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0 * gt[1]
        gt[3] = gt[3] + y0 * gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(class_image, 0, 0)
    outBand.FlushCache()
    outDataset = None
    #  write class membership probability file if desired
    if probfile:
        driver = gdal.GetDriverByName(probfmt)
        outDataset = driver.Create(probfile, cols, rows, K, GDT_Byte)
        if geotransform is not None:
            outDataset.SetGeoTransform(tuple(gt))
        if projection is not None:
            outDataset.SetProjection(projection)
        for k in range(K):
            probs = np.reshape(U[k, :], (rows, cols))
            probs = np.byte(probs * 255)
            outBand = outDataset.GetRasterBand(k + 1)
            outBand.WriteArray(probs, 0, 0)
            outBand.FlushCache()
        outDataset = None
        print 'class probabilities written to: %s' % probfile
    inDataset = None
    if (outfmt == 'ENVI') and (K < 19):
        #  try to make an ENVI classification header file
        hdr = header.Header()
        headerfile = outfile + '.hdr'
        f = open(headerfile)
        line = f.readline()
        envihdr = ''
        while line:
            envihdr += line
            line = f.readline()
        f.close()
        hdr.read(envihdr)
        hdr['file type'] = 'ENVI Classification'
        hdr['classes'] = str(K + 1)
        classlookup = '{0'
        for i in range(1, 3 * (K + 1)):
            classlookup += ', ' + str(str(ctable[i]))
        classlookup += '}'
        hdr['class lookup'] = classlookup
        hdr['class names'] = ['class %i' % i for i in range(K + 1)]
        f = open(headerfile, 'w')
        f.write(str(hdr))
        f.close()
    print 'classification written to: ' + outfile
    print 'elapsed time: ' + str(time.time() - start)
    print '--done------------------------'
Ejemplo n.º 29
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Input directory')
    if path:
        os.chdir(path)
#  input image
    infile = auxil.select_infile(title='Image file')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        bands = inDataset.RasterCount
        projection = inDataset.GetProjection()
        geotransform = inDataset.GetGeoTransform()
        if geotransform is not None:
            gt = list(geotransform)
        else:
            print 'No geotransform available'
            return
        imsr = osr.SpatialReference()
        imsr.ImportFromWkt(projection)
    else:
        return
    pos = auxil.select_pos(bands)
    if not pos:
        return
    N = len(pos)
    rasterBands = []
    for b in pos:
        rasterBands.append(inDataset.GetRasterBand(b))
#  training algorithm
    trainalg = auxil.select_integer(1,
                                    msg='1:Maxlike,2:Backprop,3:Congrad,4:SVM')
    if not trainalg:
        return
#  training data (shapefile)
    trnfile = auxil.select_infile(filt='.shp', title='Train shapefile')
    if trnfile:
        trnDriver = ogr.GetDriverByName('ESRI Shapefile')
        trnDatasource = trnDriver.Open(trnfile, 0)
        trnLayer = trnDatasource.GetLayer()
        trnsr = trnLayer.GetSpatialRef()
    else:
        return
    tstfile = auxil.select_outfile(filt='.tst', title='Test results file')
    if not tstfile:
        print 'No test output'
#  outfile
    outfile, outfmt = auxil.select_outfilefmt(title='Classification file')
    if not outfile:
        return
    if trainalg in (2, 3, 4):
        #      class probabilities file, hidden neurons
        probfile, probfmt = auxil.select_outfilefmt(title='Probabilities file')
    else:
        probfile = None
    if trainalg in (2, 3):
        L = auxil.select_integer(8, 'Number of hidden neurons')
        if not L:
            return
#  coordinate transformation from training to image projection
    ct = osr.CoordinateTransformation(trnsr, imsr)
    #  number of classes
    K = 1
    feature = trnLayer.GetNextFeature()
    while feature:
        classid = feature.GetField('CLASS_ID')
        if int(classid) > K:
            K = int(classid)
        feature = trnLayer.GetNextFeature()
    trnLayer.ResetReading()
    K += 1
    print '========================='
    print 'supervised classification'
    print '========================='
    print time.asctime()
    print 'image:    ' + infile
    print 'training: ' + trnfile
    if trainalg == 1:
        print 'Maximum Likelihood'
    elif trainalg == 2:
        print 'Neural Net (Backprop)'
    elif trainalg == 3:
        print 'Neural Net (Congrad)'
    else:
        print 'Support Vector Machine'
#  loop through the polygons
    Gs = []  # train observations
    ls = []  # class labels
    classnames = '{unclassified'
    classids = set()
    print 'reading training data...'
    for i in range(trnLayer.GetFeatureCount()):
        feature = trnLayer.GetFeature(i)
        classid = str(feature.GetField('CLASS_ID'))
        classname = feature.GetField('CLASS_NAME')
        if classid not in classids:
            classnames += ',   ' + classname
        classids = classids | set(classid)
        l = [0 for i in range(K)]
        l[int(classid)] = 1.0
        polygon = feature.GetGeometryRef()
        #      transform to same projection as image
        polygon.Transform(ct)
        #      convert to a Shapely object
        poly = shapely.wkt.loads(polygon.ExportToWkt())
        #      transform the boundary to pixel coords in numpy
        bdry = np.array(poly.boundary)
        bdry[:, 0] = bdry[:, 0] - gt[0]
        bdry[:, 1] = bdry[:, 1] - gt[3]
        GT = np.mat([[gt[1], gt[2]], [gt[4], gt[5]]])
        bdry = bdry * np.linalg.inv(GT)
        #      polygon in pixel coords
        polygon1 = asPolygon(bdry)
        #      raster over the bounding rectangle
        minx, miny, maxx, maxy = map(int, list(polygon1.bounds))
        pts = []
        for i in range(minx, maxx + 1):
            for j in range(miny, maxy + 1):
                pts.append((i, j))
        multipt = MultiPoint(pts)
        #      intersection as list
        intersection = np.array(multipt.intersection(polygon1),
                                dtype=np.int).tolist()
        #      cut out the bounded image cube
        cube = np.zeros((maxy - miny + 1, maxx - minx + 1, len(rasterBands)))
        k = 0
        for band in rasterBands:
            cube[:, :, k] = band.ReadAsArray(minx, miny, maxx - minx + 1,
                                             maxy - miny + 1)
            k += 1
#      get the training vectors
        for (x, y) in intersection:
            Gs.append(cube[y - miny, x - minx, :])
            ls.append(l)
        polygon = None
        polygon1 = None
        feature.Destroy()
    trnDatasource.Destroy()
    classnames += '}'
    m = len(ls)
    print str(m) + ' training pixel vectors were read in'
    Gs = np.array(Gs)
    ls = np.array(ls)
    #  stretch the pixel vectors to [-1,1] for ffn
    maxx = np.max(Gs, 0)
    minx = np.min(Gs, 0)
    for j in range(N):
        Gs[:, j] = 2 * (Gs[:, j] - minx[j]) / (maxx[j] - minx[j]) - 1.0
#  random permutation of training data
    idx = np.random.permutation(m)
    Gs = Gs[idx, :]
    ls = ls[idx, :]
    #  setup output datasets
    driver = gdal.GetDriverByName(outfmt)
    outDataset = driver.Create(outfile, cols, rows, 1, GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    if probfile:
        driver = gdal.GetDriverByName(probfmt)
        probDataset = driver.Create(probfile, cols, rows, K, GDT_Byte)
        if geotransform is not None:
            probDataset.SetGeoTransform(tuple(gt))
        if projection is not None:
            probDataset.SetProjection(projection)
        probBands = []
        for k in range(K):
            probBands.append(probDataset.GetRasterBand(k + 1))
    if tstfile:
        #  train on 2/3 training examples
        Gstrn = Gs[0:2 * m // 3, :]
        lstrn = ls[0:2 * m // 3, :]
        Gstst = Gs[2 * m // 3:, :]
        lstst = ls[2 * m // 3:, :]
    else:
        Gstrn = Gs
        lstrn = ls
    if trainalg == 1:
        classifier = sc.Maxlike(Gstrn, lstrn)
    elif trainalg == 2:
        classifier = sc.Ffnbp(Gstrn, lstrn, L)
    elif trainalg == 3:
        classifier = sc.Ffncg(Gstrn, lstrn, L)
    elif trainalg == 4:
        classifier = sc.Svm(Gstrn, lstrn)

    print 'training on %i pixel vectors...' % np.shape(Gstrn)[0]
    start = time.time()
    result = classifier.train()
    print 'elapsed time %s' % str(time.time() - start)
    if result:
        if trainalg in [2, 3]:
            cost = np.log10(result)
            ymax = np.max(cost)
            ymin = np.min(cost)
            xmax = len(cost)
            plt.plot(range(xmax), cost, 'k')
            plt.axis([0, xmax, ymin - 1, ymax])
            plt.title('Log(Cross entropy)')
            plt.xlabel('Epoch')


#      classify the image
        print 'classifying...'
        start = time.time()
        tile = np.zeros((cols, N))
        for row in range(rows):
            for j in range(N):
                tile[:, j] = rasterBands[j].ReadAsArray(0, row, cols, 1)
                tile[:, j] = 2 * (tile[:, j] - minx[j]) / (maxx[j] -
                                                           minx[j]) - 1.0
            cls, Ms = classifier.classify(tile)
            outBand.WriteArray(np.reshape(cls, (1, cols)), 0, row)
            if probfile:
                Ms = np.byte(Ms * 255)
                for k in range(K):
                    probBands[k].WriteArray(np.reshape(Ms[k, :], (1, cols)), 0,
                                            row)
        outBand.FlushCache()
        print 'elapsed time %s' % str(time.time() - start)
        outDataset = None
        inDataset = None
        if probfile:
            for probBand in probBands:
                probBand.FlushCache()
            probDataset = None
            print 'class probabilities written to: %s' % probfile
        K = lstrn.shape[1] + 1
        if (outfmt == 'ENVI') and (K < 19):
            #          try to make an ENVI classification header file
            hdr = header.Header()
            headerfile = outfile + '.hdr'
            f = open(headerfile)
            line = f.readline()
            envihdr = ''
            while line:
                envihdr += line
                line = f.readline()
            f.close()
            hdr.read(envihdr)
            hdr['file type'] = 'ENVI Classification'
            hdr['classes'] = str(K)
            classlookup = '{0'
            for i in range(1, 3 * K):
                classlookup += ', ' + str(str(ctable[i]))
            classlookup += '}'
            hdr['class lookup'] = classlookup
            hdr['class names'] = classnames
            f = open(headerfile, 'w')
            f.write(str(hdr))
            f.close()
        print 'thematic map written to: %s' % outfile
        if trainalg in [2, 3]:
            print 'please close the cross entropy plot to continue'
            plt.show()
        if tstfile:
            with open(tstfile, 'w') as f:
                print >> f, 'FFN test results for %s' % infile
                print >> f, time.asctime()
                print >> f, 'Classification image: %s' % outfile
                print >> f, 'Class probabilities image: %s' % probfile
                print >> f, lstst.shape[0], lstst.shape[1]
                classes, _ = classifier.classify(Gstst)
                labels = np.argmax(lstst, axis=1) + 1
                for i in range(len(classes)):
                    print >> f, classes[i], labels[i]
                f.close()
                print 'test results written to: %s' % tstfile
        print 'done'
    else:
        print 'an error occured'
        return
Ejemplo n.º 30
0
def dispms(filename=None,dims=None,rgb=None,enhance=None):
    gdal.AllRegister()
    if filename == None:        
        filename = auxil.select_infile(title='Choose an image to display') 
    if filename:                   
        inDataset = gdal.Open(filename,GA_ReadOnly)     
        cols =  inDataset.RasterXSize
        rows =  inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        return 
    if dims == None:
        dims = auxil.select_dims([0,0,cols,rows])
    if dims:
        x0,y0,cols,rows = dims
    else:
        return
    if rgb == None:
        rgb = auxil.select_rgb(bands)
    if rgb:
        r,g,b = rgb
    else:
        return
    if enhance == None:
        enhance = auxil.select_enhance('3')
    if enhance == '1':
        enhance = 'linear255'
    elif enhance == '2':
        enhance = 'linear'
    elif enhance == '3':
        enhance = 'linear2pc'
    elif enhance == '4':
        enhance = 'equalization'
    else:
        return    
    redband   = inDataset.GetRasterBand(r).ReadAsArray(x0,y0,cols,rows)
    greenband = inDataset.GetRasterBand(g).ReadAsArray(x0,y0,cols,rows)  
    blueband  = inDataset.GetRasterBand(b).ReadAsArray(x0,y0,cols,rows)
    if str(redband.dtype) == 'uint8':
        dt = 1
    elif str(redband.dtype) == 'uint16':
        dt = 2
    elif str(redband.dtype) == 'int16':
        dt = 2        
    elif str(redband.dtype) == 'float32':
        dt = 4
    elif str(redband.dtype) == 'float64':
        dt = 6
    else:
        print 'Unrecognized format'
        return   
    redband = redband.tostring()
    greenband = greenband.tostring()
    blueband = blueband.tostring()
    if dt != 1: 
        redband   = auxil.byte_stretch(redband,dtype=dt)
        greenband = auxil.byte_stretch(greenband,dtype=dt)
        blueband  = auxil.byte_stretch(blueband,dtype=dt)        
    r,g,b = auxil.stretch(redband,greenband,blueband,enhance)                                                                                                                   
    bip = ''
    for i in range(cols*rows):
        bip += r[i]+g[i]+b[i]
    im = Image.fromstring('RGB', (cols,rows), bip, 'raw', ('RGB',3*cols,1))
    print 'close image to finish' 
    im.show()
    print 'done'
Ejemplo n.º 31
0
def main():
    print '================================'
    print 'Complex Wishart Change Detection'
    print '================================'
    print time.asctime()
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
#  first SAR image
    infile1 = auxil.select_infile(title='Choose first SAR image')
    if infile1:
        inDataset1 = gdal.Open(infile1, GA_ReadOnly)
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize
        bands = inDataset1.RasterCount
    else:
        return
    m = auxil.select_integer(5, msg='Number of looks')
    if not m:
        return
    print 'first filename:  %s' % infile1
    print 'number of looks: %i' % m
    #  second SAR image
    infile2 = auxil.select_infile(title='Choose second SAR image')
    if not infile2:
        return
    n = auxil.select_integer(5, msg='Number of looks')
    if not n:
        return
    print 'second filename:  %s' % infile2
    print 'number of looks: %i' % n
    #  output file
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return


#  significance level
    sig = auxil.select_float(0.01, 'Choose significance level')
    print 'Signifcane level: %f' % sig
    start = time.time()
    print 'co-registering...'
    registerSAR.registerSAR(infile1, infile2, 'warp.tif', 'GTiff')
    infile2 = 'warp.tif'
    inDataset2 = gdal.Open(infile2, GA_ReadOnly)
    cols2 = inDataset2.RasterXSize
    rows2 = inDataset2.RasterYSize
    bands2 = inDataset2.RasterCount
    if (bands != bands2) or (cols != cols2) or (rows != rows2):
        print 'Size mismatch'
        return
    if bands == 9:
        print 'Quad polarimetry'
        #      C11 (k1)
        b = inDataset1.GetRasterBand(1)
        k1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C12  (a1)
        b = inDataset1.GetRasterBand(2)
        a1 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset1.GetRasterBand(3)
        im = b.ReadAsArray(0, 0, cols, rows)
        a1 = m * (a1 + 1j * im)
        #      C13  (rho1)
        b = inDataset1.GetRasterBand(4)
        rho1 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset1.GetRasterBand(5)
        im = b.ReadAsArray(0, 0, cols, rows)
        rho1 = m * (rho1 + 1j * im)
        #      C22 (xsi1)
        b = inDataset1.GetRasterBand(6)
        xsi1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C23 (b1)
        b = inDataset1.GetRasterBand(7)
        b1 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset1.GetRasterBand(8)
        im = b.ReadAsArray(0, 0, cols, rows)
        b1 = m * (b1 + 1j * im)
        #      C33 (zeta1)
        b = inDataset1.GetRasterBand(9)
        zeta1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C11 (k2)
        b = inDataset2.GetRasterBand(1)
        k2 = n * b.ReadAsArray(0, 0, cols, rows)
        #      C12  (a2)
        b = inDataset2.GetRasterBand(2)
        a2 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset2.GetRasterBand(3)
        im = b.ReadAsArray(0, 0, cols, rows)
        a2 = n * (a2 + 1j * im)
        #      C13  (rho2)
        b = inDataset2.GetRasterBand(4)
        rho2 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset2.GetRasterBand(5)
        im = b.ReadAsArray(0, 0, cols, rows)
        rho2 = n * (rho2 + 1j * im)
        #      C22 (xsi2)
        b = inDataset2.GetRasterBand(6)
        xsi2 = n * b.ReadAsArray(0, 0, cols, rows)
        #      C23 (b2)
        b = inDataset2.GetRasterBand(7)
        b2 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset2.GetRasterBand(8)
        im = b.ReadAsArray(0, 0, cols, rows)
        b2 = n * (b2 + 1j * im)
        #      C33 (zeta2)
        b = inDataset2.GetRasterBand(9)
        zeta2 = n * b.ReadAsArray(0, 0, cols, rows)
        k3 = k1 + k2
        a3 = a1 + a2
        rho3 = rho1 + rho2
        xsi3 = xsi1 + xsi2
        b3 = b1 + b2
        zeta3 = zeta1 + zeta2
        det1 = k1 * xsi1 * zeta1 + 2 * np.real(
            a1 * b1 * np.conj(rho1)) - xsi1 * (abs(rho1)**2) - k1 * (
                abs(b1)**2) - zeta1 * (abs(a1)**2)
        det2 = k2 * xsi2 * zeta2 + 2 * np.real(
            a2 * b2 * np.conj(rho2)) - xsi2 * (abs(rho2)**2) - k2 * (
                abs(b2)**2) - zeta2 * (abs(a2)**2)
        det3 = k3 * xsi3 * zeta3 + 2 * np.real(
            a3 * b3 * np.conj(rho3)) - xsi3 * (abs(rho3)**2) - k3 * (
                abs(b3)**2) - zeta3 * (abs(a3)**2)
        p = 3
        f = p**2
        cst = p * ((n + m) * np.log(n + m) - n * np.log(n) - m * np.log(m))
        rho = 1. - (2. * p**2 - 1.) * (1. / n + 1. / m - 1. /
                                       (n + m)) / (6. * p)
        omega2 = -(p * p / 4.) * (1. - 1. / rho)**2 + p**2 * (p**2 - 1.) * (
            1. / n**2 + 1. / m**2 - 1. / (n + m)**2) / (24. * rho**2)
    elif bands == 4:
        print 'Dual polarimetry'
        #      C11 (k1)
        b = inDataset1.GetRasterBand(1)
        k1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C12  (a1)
        b = inDataset1.GetRasterBand(2)
        a1 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset1.GetRasterBand(3)
        im = b.ReadAsArray(0, 0, cols, rows)
        a1 = m * (a1 + 1j * im)
        #      C22 (xsi1)
        b = inDataset1.GetRasterBand(4)
        xsi1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C11 (k2)
        b = inDataset2.GetRasterBand(1)
        k2 = n * b.ReadAsArray(0, 0, cols, rows)
        #      C12  (a2)
        b = inDataset2.GetRasterBand(2)
        a2 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset2.GetRasterBand(3)
        im = b.ReadAsArray(0, 0, cols, rows)
        a2 = n * (a2 + 1j * im)
        #      C22 (xsi2)
        b = inDataset2.GetRasterBand(4)
        xsi2 = n * b.ReadAsArray(0, 0, cols, rows)
        k3 = k1 + k2
        a3 = a1 + a2
        xsi3 = xsi1 + xsi2
        det1 = k1 * xsi1 - abs(a1)**2
        det2 = k2 * xsi2 - abs(a2)**2
        det3 = k3 * xsi3 - abs(a3)**2
        p = 2
        cst = p * ((n + m) * np.log(n + m) - n * np.log(n) - m * np.log(m))
        f = p**2
        rho = 1 - (2 * f - 1) * (1. / n + 1. / m - 1. / (n + m)) / (6. * p)
        omega2 = -f / 4. * (1 - 1. / rho)**2 + f * (f - 1) * (
            1. / n**2 + 1. / m**2 - 1. / (n + m)**2) / (24. * rho**2)
    elif bands == 1:
        print 'Single polarimetry'
        #      C11 (k1)
        b = inDataset1.GetRasterBand(1)
        k1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C11 (k2)
        b = inDataset2.GetRasterBand(1)
        k2 = n * b.ReadAsArray(0, 0, cols, rows)
        k3 = k1 + k2
        det1 = k1
        det2 = k2
        det3 = k3
        p = 1
        cst = p * ((n + m) * np.log(n + m) - n * np.log(n) - m * np.log(m))
        f = p**2
        rho = 1 - (2. * f - 1) * (1. / n + 1. / m - 1. / (n + m)) / (6. * p)
        omega2 = -f / 4. * (1 - 1. / rho)**2 + f * (f - 1) * (
            1. / n**2 + 1. / m**2 - 1. / (n + m)**2) / (24. * rho**2)
    else:
        print 'Incorrect number of bands'
        return
    idx = np.where(det1 <= 0.0)
    det1[idx] = 0.0001
    idx = np.where(det2 <= 0.0)
    det2[idx] = 0.0001
    idx = np.where(det3 <= 0.0)
    det3[idx] = 0.0001
    lnQ = cst + m * np.log(det1) + n * np.log(det2) - (n + m) * np.log(det3)
    #  test statistic
    Z = -2 * rho * lnQ
    #  change probabilty
    P = (1. - omega2) * stats.chi2.cdf(Z, [f]) + omega2 * stats.chi2.cdf(
        Z, [f + 4])
    P = ndimage.filters.median_filter(P, size=(3, 3))
    #  change map
    a255 = np.ones((rows, cols), dtype=np.byte) * 255
    a0 = a255 * 0
    c11 = np.log(k1 + 0.0001)
    min1 = np.min(c11)
    max1 = np.max(c11)
    c11 = (c11 - min1) * 255.0 / (max1 - min1)
    c11 = np.where(c11 < 0, a0, c11)
    c11 = np.where(c11 > 255, a255, c11)
    c11 = np.where(P > (1.0 - sig), a0, c11)
    cmap = np.where(P > (1.0 - sig), a255, c11)
    #  write to file system
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols, rows, 2, GDT_Float32)
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    projection = inDataset1.GetProjection()
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(Z, 0, 0)
    outBand.FlushCache()
    outBand = outDataset.GetRasterBand(2)
    outBand.WriteArray(P, 0, 0)
    outBand.FlushCache()
    outDataset = None
    print 'test statistic and probabilities written to: %s' % outfile
    basename = os.path.basename(outfile)
    name, ext = os.path.splitext(basename)
    outfile = outfile.replace(name, name + '_cmap')
    outDataset = driver.Create(outfile, cols, rows, 3, GDT_Byte)
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    projection = inDataset1.GetProjection()
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(cmap, 0, 0)
    outBand.FlushCache()
    outBand = outDataset.GetRasterBand(2)
    outBand.WriteArray(c11, 0, 0)
    outBand.FlushCache()
    outBand = outDataset.GetRasterBand(3)
    outBand.WriteArray(c11, 0, 0)
    outBand.FlushCache()
    outDataset = None
    print 'change map image written to: %s' % outfile
    print 'elapsed time: ' + str(time.time() - start)
Ejemplo n.º 32
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
#  SAR image
    infile = auxil.select_infile(title='Choose SAR image')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        bands = inDataset.RasterCount
    else:
        return
#  spatial subset
    x0, y0, rows, cols = auxil.select_dims([0, 0, rows, cols])
    #  number of looks
    m = auxil.select_integer(5, msg='Number of looks')
    if not m:
        return
#  number of iterations
    niter = auxil.select_integer(1, msg='Number of iterations')
    #  output file
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return


#  process diagonal bands only
    driver = gdal.GetDriverByName(fmt)
    if bands == 9:
        outDataset = driver.Create(outfile, cols, rows, 3, GDT_Float32)
        inimage = np.zeros((3, rows, cols))
        band = inDataset.GetRasterBand(1)
        inimage[0] = band.ReadAsArray(x0, y0, cols, rows)
        band = inDataset.GetRasterBand(6)
        inimage[1] = band.ReadAsArray(x0, y0, cols, rows)
        band = inDataset.GetRasterBand(9)
        inimage[2] = band.ReadAsArray(x0, y0, cols, rows)
    elif bands == 4:
        outDataset = driver.Create(outfile, cols, rows, 2, GDT_Float32)
        inimage = np.zeros((2, rows, cols))
        band = inDataset.GetRasterBand(1)
        inimage[0] = band.ReadAsArray(x0, y0, cols, rows)
        band = inDataset.GetRasterBand(4)
        inimage[1] = band.ReadAsArray(x0, y0, cols, rows)
    else:
        outDataset = driver.Create(outfile, cols, rows, 1, GDT_Float32)
        inimage = inDataset.GetRasterBand(1)
    outimage = np.copy(inimage)
    print '========================='
    print '    GAMMA MAP FILTER'
    print '========================='
    print time.asctime()
    print 'infile:  %s' % infile
    print 'number of looks: %i' % m
    print 'number of iterations: %i' % niter
    start = time.time()
    itr = 0
    while itr < niter:
        print 'iteration %i' % (itr + 1)
        if bands == 9:
            for k in range(3):
                outimage[k] = gamma_filter(k, inimage, outimage, rows, cols, m)
        elif bands == 4:
            for k in range(2):
                outimage[k] = gamma_filter(k, inimage, outimage, rows, cols, m)
        else:
            outimage = gamma_filter(0, inimage, outimage, rows, cols, m)
        itr += 1
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0 * gt[1]
        gt[3] = gt[3] + y0 * gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    projection = inDataset.GetProjection()
    if projection is not None:
        outDataset.SetProjection(projection)
    if bands == 9:
        for k in range(3):
            outBand = outDataset.GetRasterBand(k + 1)
            outBand.WriteArray(outimage[k], 0, 0)
            outBand.FlushCache()
    elif bands == 4:
        for k in range(2):
            outBand = outDataset.GetRasterBand(k + 1)
            outBand.WriteArray(outimage[k], 0, 0)
            outBand.FlushCache()
    else:
        outBand = outDataset.GetRasterBand(1)
        outBand.WriteArray(outimage, 0, 0)
        outBand.FlushCache()
    outDataset = None
    print 'result written to: ' + outfile
    print 'elapsed time: ' + str(time.time() - start)
Ejemplo n.º 33
0
def main():  
    gdal.AllRegister()
    path = auxil.select_directory('Choose input directory')
    if path:
        os.chdir(path)        
#  input image    
    infile = auxil.select_infile(title='Choose image file') 
    if infile:                   
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
        projection = inDataset.GetProjection()
        geotransform = inDataset.GetGeoTransform()
        if geotransform is not None:
            gt = list(geotransform) 
        else:
            print 'No geotransform available'
            return       
        imsr = osr.SpatialReference()  
        imsr.ImportFromWkt(projection)      
    else:
        return  
    pos =  auxil.select_pos(bands)  
    if not pos:
        return
    N = len(pos) 
    rasterBands = [] 
    for b in pos:
        rasterBands.append(inDataset.GetRasterBand(b)) 
#  training data (shapefile)      
    trnfile = auxil.select_infile(filt='.shp',title='Choose train shapefile')
    if trnfile:
        trnDriver = ogr.GetDriverByName('ESRI Shapefile')
        trnDatasource = trnDriver.Open(trnfile,0)
        trnLayer = trnDatasource.GetLayer() 
        trnsr = trnLayer.GetSpatialRef()             
    else:
        return
#  hidden neurons
    L = auxil.select_integer(8,'number of hidden neurons')    
    if not L:
        return
#  outfile
    outfile, fmt = auxil.select_outfilefmt()   
    if not outfile:
        return     
#  coordinate transformation from training to image projection   
    ct= osr.CoordinateTransformation(trnsr,imsr) 
#  number of classes    
    feature = trnLayer.GetNextFeature() 
    while feature:
        classid = feature.GetField('CLASS_ID')
        feature = trnLayer.GetNextFeature() 
    trnLayer.ResetReading()    
    K = int(classid)+1       
    print '========================='
    print '       ffncg'
    print '========================='
    print time.asctime()    
    print 'image:    '+infile
    print 'training: '+trnfile          
#  loop through the polygons    
    Gs = [] # train observations
    ls = [] # class labels
    print 'reading training data...'
    for i in range(trnLayer.GetFeatureCount()):
        feature = trnLayer.GetFeature(i)
        classid = feature.GetField('CLASS_ID')
        l = [0 for i in range(K)]
        l[int(classid)] = 1.0
        polygon = feature.GetGeometryRef()
#      transform to same projection as image        
        polygon.Transform(ct)  
#      convert to a Shapely object            
        poly = shapely.wkt.loads(polygon.ExportToWkt())
#      transform the boundary to pixel coords in numpy        
        bdry = np.array(poly.boundary) 
        bdry[:,0] = bdry[:,0]-gt[0]
        bdry[:,1] = bdry[:,1]-gt[3]
        GT = np.mat([[gt[1],gt[2]],[gt[4],gt[5]]])
        bdry = bdry*np.linalg.inv(GT) 
#      polygon in pixel coords        
        polygon1 = asPolygon(bdry)
#      raster over the bounding rectangle        
        minx,miny,maxx,maxy = map(int,list(polygon1.bounds))  
        pts = [] 
        for i in range(minx,maxx+1):
            for j in range(miny,maxy+1): 
                pts.append((i,j))             
        multipt =  MultiPoint(pts)   
#      intersection as list              
        intersection = np.array(multipt.intersection(polygon1),dtype=np.int).tolist()
#      cut out the bounded image cube               
        cube = np.zeros((maxy-miny+1,maxx-minx+1,len(rasterBands)))
        k=0
        for band in rasterBands:
            cube[:,:,k] = band.ReadAsArray(minx,miny,maxx-minx+1,maxy-miny+1)
            k += 1
#      get the training vectors
        for (x,y) in intersection:         
            Gs.append(cube[y-miny,x-minx,:])
            ls.append(l)   
        polygon = None
        polygon1 = None            
        feature.Destroy()  
    trnDatasource.Destroy() 
    m = len(ls)       
    print str(m) + ' training pixel vectors were read in' 
    Gs = np.array(Gs) 
    ls = np.array(ls)
#  stretch the pixel vectors to [-1,1]
    maxx = np.max(Gs,0)
    minx = np.min(Gs,0)
    for j in range(N):
        Gs[:,j] = 2*(Gs[:,j]-minx[j])/(maxx[j]-minx[j]) - 1.0 
#  random permutation of training data
    idx = np.random.permutation(m)
    Gs = Gs[idx,:] 
    ls = ls[idx,:]     
#  setup output dataset 
    driver = gdal.GetDriverByName(fmt)    
    outDataset = driver.Create(outfile,cols,rows,1,GDT_Byte) 
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection) 
    outBand = outDataset.GetRasterBand(1) 
#  train on 9/10 training examples         
    Gstrn = Gs[0:9*m//10,:]
    lstrn = ls[0:9*m//10,:]
    affn = Ffncg(Gstrn,lstrn,L)
    print 'training on %i pixel vectors...' % np.shape(Gstrn)[0]
    start = time.time()
    cost = affn.train(epochs=epochs)
    print 'elapsed time %s' %str(time.time()-start) 
    if cost is not None:
#        cost = np.log10(cost)  
        ymax = np.max(cost)
        ymin = np.min(cost) 
        xmax = len(cost)      
        plt.plot(range(xmax),cost,'k')
        plt.axis([0,xmax,ymin-1,ymax])
        plt.title('Cross entropy')
        plt.xlabel('Epoch')              
#      classify the image           
        print 'classifying...'
        tile = np.zeros((cols,N))    
        for row in range(rows):
            for j in range(N):
                tile[:,j] = rasterBands[j].ReadAsArray(0,row,cols,1)
                tile[:,j] = 2*(tile[:,j]-minx[j])/(maxx[j]-minx[j]) - 1.0 
            cls, _ = affn.classify(tile)  
            outBand.WriteArray(np.reshape(cls,(1,cols)),0,row)
        outBand.FlushCache()
        outDataset = None
        inDataset = None  
        print 'thematic map written to: ' + outfile
        print 'please close the cross entropy plot to continue'
        plt.show()
    else:
        print 'an error occured' 
        return 
    
    print 'submitting cross-validation to multyvac'    
    start = time.time()
    jid = mv.submit(traintst,Gs,ls,L,_layer='ms_image_analysis')  
    print 'submission time: %s' %str(time.time()-start)
    start = time.time()    
    job = mv.get(jid)
    result = job.get_result(job) 
    
    
    print 'execution time: %s' %str(time.time()-start)      
    print 'misclassification rate: %f' %np.mean(result)
    print 'standard deviation:     %f' %np.std(result)         
    print '--------done---------------------'       
Ejemplo n.º 34
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)      
#  reference image    
    file1 = auxil.select_infile(title='Choose reference image') 
    if file1:                  
        inDataset1 = gdal.Open(file1,GA_ReadOnly)     
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize    
        bands = inDataset1.RasterCount
    else:
        return
    pos1 =  auxil.select_pos(bands) 
    if not pos1:
        return   
    dims = auxil.select_dims([0,0,cols,rows])
    if dims:
        x10,y10,cols1,rows1 = dims
    else:
        return 
#  target image     
    file2 = auxil.select_infile(title='Choose target image') 
    if file2:                  
        inDataset2 = gdal.Open(file2,GA_ReadOnly)     
        cols = inDataset2.RasterXSize
        rows = inDataset2.RasterYSize    
        bands = inDataset2.RasterCount
    else:
        return   
    pos2 =  auxil.select_pos(bands)   
    if not pos2:
        return 
    dims=auxil.select_dims([0,0,cols,rows])  
    if dims:
        x20,y20,cols2,rows2 = dims
    else:
        return  
#  match dimensions       
    bands = len(pos2)
    if (rows1 != rows2) or (cols1 != cols2) or (len(pos1) != bands):
        sys.stderr.write("Size mismatch")
        sys.exit(1)             
#  iMAD image     
    file3 = auxil.select_infile(title='Choose iMAD image') 
    if file3:                  
        inDataset3 = gdal.Open(file3,GA_ReadOnly)     
        cols = inDataset3.RasterXSize
        rows = inDataset3.RasterYSize    
        imadbands = inDataset3.RasterCount
    else:
        return   
    dims=auxil.select_dims([0,0,cols,rows])  
    if dims:
        x30,y30,cols,rows = dims
    else:
        return     
    if (rows1 != rows) or (cols1 != cols):
        sys.stderr.write("Size mismatch")
        sys.exit(1)    
#  outfile
    outfile, fmt = auxil.select_outfilefmt()   
    if not outfile:
        return    
#  full scene
    fsfile = auxil.select_infile(title='Choose full target scene if desired')               
#  no-change threshold    
    ncpThresh = auxil.select_ncp(0.95)    
    if ncpThresh is None:
        return                 
    chisqr = inDataset3.GetRasterBand(imadbands).ReadAsArray(x30,y30,cols,rows).ravel()
    ncp = 1 - stats.chi2.cdf(chisqr,[imadbands-1])
    idx = np.where(ncp>ncpThresh)[0]
#  split train/test in ratio 2:1 
    tmp = np.asarray(range(len(idx)))
    tst = idx[np.where(np.mod(tmp,3) == 0)]
    trn = idx[np.where(np.mod(tmp,3) > 0)]
    
    print '========================================='
    print '             RADCAL'
    print '========================================='
    print time.asctime()     
    print 'reference: '+file1
    print 'target   : '+file2
    print 'no-change probability threshold: '+str(ncpThresh)
    print 'no-change pixels (train): '+str(len(trn))
    print 'no-change pixels (test): '+str(len(tst))           
    driver = gdal.GetDriverByName(fmt)    
    outDataset = driver.Create(outfile,cols,rows,bands,GDT_Float32) 
    projection = inDataset1.GetProjection()
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x10*gt[1]
        gt[3] = gt[3] + y10*gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)      
    aa = []
    bb = []  
    i = 1
    for k in pos1:
        x = inDataset1.GetRasterBand(k).ReadAsArray(x10,y10,cols,rows).astype(float).ravel()
        y = inDataset2.GetRasterBand(k).ReadAsArray(x20,y20,cols,rows).astype(float).ravel() 
        b,a,R = auxil.orthoregress(y[trn],x[trn])
        print '--------------------'
        print 'spectral band:      ', k
        print 'slope:              ', b
        print 'intercept:          ', a
        print 'correlation:        ', R
        print 'means(tgt,ref,nrm): ', np.mean(y[tst]),np.mean(x[tst]),np.mean(a+b*y[tst])
        print 't-test, p-value:    ', stats.ttest_rel(x[tst], a+b*y[tst])
        print 'vars(tgt,ref,nrm)   ', np.var(y[tst]),np.var(x[tst]),np.var(a+b*y[tst])
        print 'F-test, p-value:    ', auxil.fv_test(x[tst], a+b*y[tst])
        aa.append(a)
        bb.append(b)   
        outBand = outDataset.GetRasterBand(i)
        outBand.WriteArray(np.resize(a+b*y,(rows,cols)),0,0) 
        outBand.FlushCache()
        if i <= 10:
            plt.figure(i)    
            ymax = max(y[idx]) 
            xmax = max(x[idx])      
            plt.plot(y[idx],x[idx],'k.',[0,ymax],[a,a+b*ymax],'k-')
            plt.axis([0,ymax,0,xmax])
            plt.title('Band '+str(k))
            plt.xlabel('Target')
            plt.ylabel('Reference')        
        i += 1
    outDataset = None
    print 'result written to: '+outfile        
    if fsfile is not None:
        path = os.path.dirname(fsfile)
        basename = os.path.basename(fsfile)
        root, ext = os.path.splitext(basename)
        fsoutfile = path+'/'+root+'_norm'+ext        
        print 'normalizing '+fsfile+'...' 
        fsDataset = gdal.Open(fsfile,GA_ReadOnly)
        cols = fsDataset.RasterXSize
        rows = fsDataset.RasterYSize    
        driver = fsDataset.GetDriver()
        outDataset = driver.Create(fsoutfile,cols,rows,bands,GDT_Float32)
        projection = fsDataset.GetProjection()
        geotransform = fsDataset.GetGeoTransform()
        if geotransform is not None:
            outDataset.SetGeoTransform(geotransform)
        if projection is not None:
            outDataset.SetProjection(projection) 
        j = 0
        for k in pos2:
            inBand = fsDataset.GetRasterBand(k)
            outBand = outDataset.GetRasterBand(j+1)
            for i in range(rows):
                y = inBand.ReadAsArray(0,i,cols,1)
                outBand.WriteArray(aa[j]+bb[j]*y,0,i) 
            outBand.FlushCache() 
            j += 1      
        outDataset = None    
        print 'result written to: '+fsoutfile
    plt.show()
    print '-------done-----------------------------'
Ejemplo n.º 35
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
#  get (spatial subset of) the C11 or C33 file first
    file1 = auxil.select_infile(
        title='Choose one componenst (C11, C22 or C33) ')
    if file1:
        inDataset1 = gdal.Open(file1, GA_ReadOnly)
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize
        bands = inDataset1.RasterCount
    else:
        return
    inDataset = None
    #  spatial subset
    x0, y0, cols, rows = auxil.select_dims([0, 0, cols, rows])
    #  output file
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
#  output image
    outim = np.zeros((9, rows, cols), dtype=np.float32)
    #  get list of all files
    files = os.listdir(path)
    for afile in files:
        if re.search('hdr|sml', afile):
            continue
#      single polarimetry
        if re.search('pwr_geo', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[0, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None


#      dual and quad polarimetry
        elif re.search('hh_hh_geo|C11\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[0, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
        elif re.search('re_hh_hv_geo|C12_real\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[1, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
        elif re.search('im_hh_hv_geo|C12_imag\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[2, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
        elif re.search('re_hh_vv_geo|C13_real\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[3, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
        elif re.search('im_hh_vv_geo|C13_imag\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[4, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
        elif re.search('hv_hv_geo|C22\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[5, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
        elif re.search('re_hv_vv_geo|C23_real\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[6, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
        elif re.search('im_hv_vv_geo|C23_imag\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[7, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
        elif re.search('vv_vv_geo|C33\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[8, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
    outim = np.nan_to_num(outim)
    idx = np.where(np.sum(np.abs(outim), axis=(1, 2)) > 0)[0]
    if idx == []:
        print 'no polarimetric bands found'
        return
    bands = len(idx)
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols, rows, bands, GDT_Float32)
    projection = inDataset1.GetProjection()
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0 * gt[1]
        gt[3] = gt[3] + y0 * gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)
    for k in range(bands):
        outBand = outDataset.GetRasterBand(k + 1)
        outBand.WriteArray(outim[idx[k], :, :], 0, 0)
        outBand.FlushCache()
    outDataset = None
    print '%i-band polarimetric image written to: %s' % (bands, outfile)
Ejemplo n.º 36
0
def main():
    tstfile = auxil.select_infile(filt='.tst', title='Test results file') 
    if not tstfile:
        return
    print '========================='
    print 'classification statistics'
    print '========================='
    with open(tstfile,'r') as f:
        line = ''
        for i in range(4):
            line += f.readline()
        print line    
        line = f.readline().split()
        n = int(line[0]) 
        K = int(line[1])
        CT = np.zeros((K+2,K+2))
#      fill the contingency table
        y = 0.0
        line = f.readline()
        while line:
            k = map(int,line.split())
            k1 = k[0]-1
            k2 = k[1]-1
            CT[k1,k2] += 1
            if k1 != k2:
                y += 1
            line = f.readline()
        f.close()
        CT[K,:] = np.sum(CT, axis=0)
        CT[:,K] = np.sum(CT, axis=1)
        for i in range(K):
            CT[K+1,i] = CT[i,i]/CT[K,i]
            CT[i,K+1] = CT[i,i]/CT[i,K]      
#      overall misclassification rate
        sigma = np.sqrt(y*(n-y)/n**3)
        low = (y+1.921-1.96*np.sqrt(0.96+y*(n-y)/n))/(3.842+n)
        high= (y+1.921+1.96*np.sqrt(0.96+y*(n-y)/n))/(3.842+n)
        print 'Misclassification rate: %f'%(y/n)
        print 'Standard deviation: %f'%sigma
        print 'Conf. interval (95 percent): [%f , %f]'%(low, high)
#      Kappa coefficient
        t1 = float(n-y)/n
        t2 = np.sum(CT[K,0:K]*np.transpose(CT[0:K,K]))/n**2
        Kappa = (t1 - t2)/(1 - t2)
        t3 = 0.0
        for i in range(K):
            t3 = t3 + CT[i,i]*(CT[K,i]+CT[i,K])
        t3 = t3/n**2
        t4 = 0.0
        for i in range(K):
            for j in range(K):
                t4 += CT[j,i]*(CT[K,j]+CT[i,K])**2
        t4 = t4/n**3
        sigma2 = t1*(1-t1)/(1-t2)**2
        sigma2 = sigma2 + 2*(1-t1)*(2*t1*t2-t3)/(1-t2)**3
        sigma2 = sigma2 + ((1-t1)**2)*(t4-4*t2**2)/(1-t2)**4
        sigma = np.sqrt(sigma2/n)
        print 'Kappa coefficient: %f'%Kappa
        print 'Standard deviation: %f'%sigma
        print 'Contingency Table'
        with printoptions(precision=3, linewidth = 200, suppress=True):
            print CT
Ejemplo n.º 37
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)        
#  SAR image    
    infile = auxil.select_infile(title='Choose SAR image') 
    if infile:                   
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        return
#  spatial subset    
    x0,y0,rows,cols=auxil.select_dims([0,0,rows,cols])    
#  number of looks
    m = auxil.select_integer(5,msg='Number of looks')
    if not m:
        return
#  output file
    outfile,fmt = auxil.select_outfilefmt() 
    if not outfile:
        return       
#  get filter weights from span image
    b = np.ones((rows,cols))
    band = inDataset.GetRasterBand(1)
    span = band.ReadAsArray(x0,y0,cols,rows).ravel()
    if bands==9:      
        band = inDataset.GetRasterBand(6)
        span += band.ReadAsArray(x0,y0,cols,rows).ravel()
        band = inDataset.GetRasterBand(9)
        span += band.ReadAsArray(x0,y0,cols,rows).ravel()
    elif bands==4:
        band = inDataset.GetRasterBand(4)
        span += band.ReadAsArray(x0,y0,cols,rows).ravel()    
    edge_idx = np.zeros((rows,cols),dtype=int)
    print '========================='
    print '       MMSE_FILTER'
    print '========================='
    print time.asctime()
    print 'infile:  %s'%infile
    print 'number of looks: %i'%m     
    print 'Determining filter weights from span image'    
    start = time.time()
    print 'row: ',
    sys.stdout.flush()     
    for j in range(3,rows-3):
        if j%50 == 0:
            print '%i '%j, 
            sys.stdout.flush()
        windex = get_windex(j,cols)
        for i in range(3,cols-3):            
            wind = np.reshape(span[windex],(7,7))         
#          3x3 compression
            w = congrid.congrid(wind,(3,3),method='spline',centre=True)
#          get appropriate edge mask
            es = [np.sum(edges[p]*w) for p in range(4)]
            idx = np.argmax(es)  
            if idx == 0:
                if np.abs(w[1,1]-w[1,0]) < np.abs(w[1,1]-w[1,2]):
                    edge_idx[j,i] = 0
                else:
                    edge_idx[j,i] = 4
            elif idx == 1:
                if np.abs(w[1,1]-w[2,0]) < np.abs(w[1,1]-w[0,2]):
                    edge_idx[j,i] = 1
                else:
                    edge_idx[j,i] = 5                
            elif idx == 2:
                if np.abs(w[1,1]-w[0,1]) < np.abs(w[1,1]-w[2,1]):
                    edge_idx[j,i] = 6
                else:
                    edge_idx[j,i] = 2  
            elif idx == 3:
                if np.abs(w[1,1]-w[0,0]) < np.abs(w[1,1]-w[2,2]):
                    edge_idx[j,i] = 7
                else:
                    edge_idx[j,i] = 3 
            edge = templates[edge_idx[j,i]]  
            wind = wind.ravel()[edge]
            gbar = np.mean(wind)
            varg = np.var(wind)
            if varg > 0:
                b[j,i] = np.max( ((1.0 - gbar**2/(varg*m))/(1.0+1.0/m), 0.0) )        
            windex += 1
    print ' done'        
#  filter the image
    outim = np.zeros((rows,cols),dtype=np.float32)
    driver = gdal.GetDriverByName(fmt)    
    outDataset = driver.Create(outfile,cols,rows,bands,GDT_Float32)
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0*gt[1]
        gt[3] = gt[3] + y0*gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    projection = inDataset.GetProjection()        
    if projection is not None:
        outDataset.SetProjection(projection) 
    print 'Filtering covariance matrix elememnts'  
    for k in range(1,bands+1):
        print 'band: %i'%(k)
        band = inDataset.GetRasterBand(k)
        band = band.ReadAsArray(0,0,cols,rows)
        gbar = band*0.0
#      get window means
        for j in range(3,rows-3):        
            windex = get_windex(j,cols)
            for i in range(3,cols-3):
                wind = band.ravel()[windex]
                edge = templates[edge_idx[j,i]]
                wind = wind[edge]
                gbar[j,i] = np.mean(wind)
                windex += 1
#      apply adaptive filter and write to disk
        outim = np.reshape(gbar + b*(band-gbar),(rows,cols))   
        outBand = outDataset.GetRasterBand(k)
        outBand.WriteArray(outim,0,0) 
        outBand.FlushCache() 
    outDataset = None
    print 'result written to: '+outfile 
    print 'elapsed time: '+str(time.time()-start)                 
Ejemplo n.º 38
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
    infile = auxil.select_infile(title='Select an image')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        bands = inDataset.RasterCount
    else:
        return
    pos = auxil.select_pos(bands)
    if not pos:
        return
    dims = auxil.select_dims([0, 0, cols, rows])
    if dims:
        x0, y0, cols, rows = dims
    else:
        return
    m = auxil.select_integer(1000, 'Select training sample size')
    K = auxil.select_integer(6, 'Select number of clusters')
    outfile, outfmt = auxil.select_outfilefmt()
    if not outfile:
        return
    kernel = auxil.select_integer(1, 'Select kernel: 0=linear, 1=Gaussian')
    print '========================='
    print '       kkmeans'
    print '========================='
    print 'infile:  ' + infile
    print 'samples: ' + str(m)
    if kernel == 0:
        print 'kernel:  ' + 'linear'
    else:
        print 'kernel:  ' + 'Gaussian'
    start = time.time()
    #  input data matrix
    XX = np.zeros((cols * rows, bands))
    k = 0
    for b in pos:
        band = inDataset.GetRasterBand(b)
        band = band.ReadAsArray(x0, y0, cols, rows).astype(float)
        XX[:, k] = np.ravel(band)
        k += 1
#  training data matrix
    idx = np.fix(np.random.random(m) * (cols * rows)).astype(np.integer)
    X = XX[idx, :]
    print 'kernel matrix...'
    # uncentered kernel matrix
    KK, gma = auxil.kernelMatrix(X, kernel=kernel)
    if gma is not None:
        print 'gamma: ' + str(round(gma, 6))


#  initial (random) class labels
    labels = np.random.randint(K, size=m)
    #  iteration
    change = True
    itr = 0
    onesm = np.mat(np.ones(m, dtype=float))
    while change and (itr < 100):
        change = False
        U = np.zeros((K, m))
        for i in range(m):
            U[labels[i], i] = 1
        M = np.diag(1.0 / (np.sum(U, axis=1) + 1.0))
        MU = np.mat(np.dot(M, U))
        Z = (onesm.T) * np.diag(MU * KK * (MU.T)) - 2 * KK * (MU.T)
        Z = np.array(Z)
        labels1 = (np.argmin(Z, axis=1) % K).ravel()
        if np.sum(labels1 != labels):
            change = True
        labels = labels1
        itr += 1
    print 'iterations: %i' % itr
    #  classify image
    print 'classifying...'
    i = 0
    A = np.diag(MU * KK * (MU.T))
    A = np.tile(A, (cols, 1))
    class_image = np.zeros((rows, cols), dtype=np.byte)
    while i < rows:
        XXi = XX[i * cols:(i + 1) * cols, :]
        KKK, _ = auxil.kernelMatrix(X, XXi, gma=gma, kernel=kernel)
        Z = A - 2 * (KKK.T) * (MU.T)
        Z = np.array(Z)
        labels = np.argmin(Z, axis=1).ravel()
        class_image[i, :] = (labels % K) + 1
        i += 1
    sys.stdout.write("\n")
    #  write to disk
    driver = gdal.GetDriverByName(outfmt)
    outDataset = driver.Create(outfile, cols, rows, 1, GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0 * gt[1]
        gt[3] = gt[3] + y0 * gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(class_image, 0, 0)
    outBand.FlushCache()
    outDataset = None
    inDataset = None
    if (outfmt == 'ENVI') and (K < 19):
        #  try to make an ENVI classification header file
        hdr = header.Header()
        headerfile = outfile + '.hdr'
        f = open(headerfile)
        line = f.readline()
        envihdr = ''
        while line:
            envihdr += line
            line = f.readline()
        f.close()
        hdr.read(envihdr)
        hdr['file type'] = 'ENVI Classification'
        hdr['classes'] = str(K)
        classlookup = '{0'
        for i in range(1, 3 * K):
            classlookup += ', ' + str(str(ctable[i]))
        classlookup += '}'
        hdr['class lookup'] = classlookup
        hdr['class names'] = [str(i + 1) for i in range(K)]
        f = open(headerfile, 'w')
        f.write(str(hdr))
        f.close()
    print 'result written to: ' + outfile
    print 'elapsed time: ' + str(time.time() - start)
    print '--done------------------------'
Ejemplo n.º 39
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
#  reference image
    file1 = auxil.select_infile(title='Choose reference image')
    if file1:
        inDataset1 = gdal.Open(file1, GA_ReadOnly)
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize
        bands = inDataset1.RasterCount
    else:
        return
    pos1 = auxil.select_pos(bands)
    if not pos1:
        return
    dims = auxil.select_dims([0, 0, cols, rows])
    if dims:
        x10, y10, cols1, rows1 = dims
    else:
        return
#  target image
    file2 = auxil.select_infile(title='Choose target image')
    if file2:
        inDataset2 = gdal.Open(file2, GA_ReadOnly)
        cols = inDataset2.RasterXSize
        rows = inDataset2.RasterYSize
        bands = inDataset2.RasterCount
    else:
        return
    pos2 = auxil.select_pos(bands)
    if not pos2:
        return
    dims = auxil.select_dims([0, 0, cols, rows])
    if dims:
        x20, y20, cols2, rows2 = dims
    else:
        return
#  match dimensions
    bands = len(pos2)
    if (rows1 != rows2) or (cols1 != cols2) or (len(pos1) != bands):
        sys.stderr.write("Size mismatch")
        sys.exit(1)
#  iMAD image
    file3 = auxil.select_infile(title='Choose iMAD image')
    if file3:
        inDataset3 = gdal.Open(file3, GA_ReadOnly)
        cols = inDataset3.RasterXSize
        rows = inDataset3.RasterYSize
        imadbands = inDataset3.RasterCount
    else:
        return
    dims = auxil.select_dims([0, 0, cols, rows])
    if dims:
        x30, y30, cols, rows = dims
    else:
        return
    if (rows1 != rows) or (cols1 != cols):
        sys.stderr.write("Size mismatch")
        sys.exit(1)
#  outfile
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return


#  full scene
    fsfile = auxil.select_infile(title='Choose full target scene if desired')
    #  no-change threshold
    ncpThresh = auxil.select_ncp(0.95)
    if ncpThresh is None:
        return
    chisqr = inDataset3.GetRasterBand(imadbands).ReadAsArray(
        x30, y30, cols, rows).ravel()
    ncp = 1 - stats.chi2.cdf(chisqr, [imadbands - 1])
    idx = np.where(ncp > ncpThresh)[0]
    #  split train/test in ratio 2:1
    tmp = np.asarray(range(len(idx)))
    tst = idx[np.where(np.mod(tmp, 3) == 0)]
    trn = idx[np.where(np.mod(tmp, 3) > 0)]

    print '========================================='
    print '             RADCAL'
    print '========================================='
    print time.asctime()
    print 'reference: ' + file1
    print 'target   : ' + file2
    print 'no-change probability threshold: ' + str(ncpThresh)
    print 'no-change pixels (train): ' + str(len(trn))
    print 'no-change pixels (test): ' + str(len(tst))
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols, rows, bands, GDT_Float32)
    projection = inDataset1.GetProjection()
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x10 * gt[1]
        gt[3] = gt[3] + y10 * gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)
    aa = []
    bb = []
    i = 1
    for k in pos1:
        x = inDataset1.GetRasterBand(k).ReadAsArray(
            x10, y10, cols, rows).astype(float).ravel()
        y = inDataset2.GetRasterBand(k).ReadAsArray(
            x20, y20, cols, rows).astype(float).ravel()
        b, a, R = auxil.orthoregress(y[trn], x[trn])
        print '--------------------'
        print 'spectral band:      ', k
        print 'slope:              ', b
        print 'intercept:          ', a
        print 'correlation:        ', R
        print 'means(tgt,ref,nrm): ', np.mean(y[tst]), np.mean(
            x[tst]), np.mean(a + b * y[tst])
        print 't-test, p-value:    ', stats.ttest_rel(x[tst], a + b * y[tst])
        print 'vars(tgt,ref,nrm)   ', np.var(y[tst]), np.var(
            x[tst]), np.var(a + b * y[tst])
        print 'F-test, p-value:    ', auxil.fv_test(x[tst], a + b * y[tst])
        aa.append(a)
        bb.append(b)
        outBand = outDataset.GetRasterBand(i)
        outBand.WriteArray(np.resize(a + b * y, (rows, cols)), 0, 0)
        outBand.FlushCache()
        if i <= 10:
            plt.figure(i)
            ymax = max(y[idx])
            xmax = max(x[idx])
            plt.plot(y[idx], x[idx], 'k.', [0, ymax], [a, a + b * ymax], 'k-')
            plt.axis([0, ymax, 0, xmax])
            plt.title('Band ' + str(k))
            plt.xlabel('Target')
            plt.ylabel('Reference')
        i += 1
    outDataset = None
    print 'result written to: ' + outfile
    if fsfile is not None:
        path = os.path.dirname(fsfile)
        basename = os.path.basename(fsfile)
        root, ext = os.path.splitext(basename)
        fsoutfile = path + '/' + root + '_norm' + ext
        print 'normalizing ' + fsfile + '...'
        fsDataset = gdal.Open(fsfile, GA_ReadOnly)
        cols = fsDataset.RasterXSize
        rows = fsDataset.RasterYSize
        driver = fsDataset.GetDriver()
        outDataset = driver.Create(fsoutfile, cols, rows, bands, GDT_Float32)
        projection = fsDataset.GetProjection()
        geotransform = fsDataset.GetGeoTransform()
        if geotransform is not None:
            outDataset.SetGeoTransform(geotransform)
        if projection is not None:
            outDataset.SetProjection(projection)
        j = 0
        for k in pos2:
            inBand = fsDataset.GetRasterBand(k)
            outBand = outDataset.GetRasterBand(j + 1)
            for i in range(rows):
                y = inBand.ReadAsArray(0, i, cols, 1)
                outBand.WriteArray(aa[j] + bb[j] * y, 0, i)
            outBand.FlushCache()
            j += 1
        outDataset = None
        print 'result written to: ' + fsoutfile
    plt.show()
    print '-------done-----------------------------'
Ejemplo n.º 40
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
#    path = 'd:\\imagery\\CRC\\Chapters6-7'
    if path:
        os.chdir(path)   
    infile = auxil.select_infile(title='Select a class probability image') 
    if infile:                   
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        K = inDataset.RasterCount
    else:
        return
    outfile, fmt = auxil.select_outfilefmt()  
    if not outfile:
        return   
    print '========================='
    print '       PLR_reclass'
    print '========================='
    print 'infile:  %s'%infile
    start = time.time() 
    prob_image = np.zeros((K,rows,cols))
    for k in range (K):
        band = inDataset.GetRasterBand(k+1)
        prob_image[k,:,:] = band.ReadAsArray(0,0,cols,rows).astype(float)                                   
    class_image = np.zeros((rows,cols),dtype=np.byte)  
    print 'reclassifying...'
    for i in range(rows):
        if i % 50 == 0:
            print '%i rows processed'%i
        for j in range(cols):
            cls = np.where(prob_image[:,i,j]==np.amax(prob_image[:,i,j]))[0][0]
            if isinstance(cls,int):
                class_image[i,j] = cls+1               
#  write to disk
    driver = gdal.GetDriverByName(fmt)    
    outDataset = driver.Create(outfile,cols,rows,1,GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    if projection is not None:
        outDataset.SetProjection(projection)               
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(class_image,0,0) 
    outBand.FlushCache() 
    outDataset = None
    inDataset = None
    if (fmt == 'ENVI') and (K<19):
#          try to make an ENVI classification header file 
        classnames = '{unclassified '   
        for i in range(K):
            classnames += ', '+str(i+1)
        classnames += '}'       
        hdr = header.Header() 
        headerfile = outfile+'.hdr'
        f = open(headerfile)
        line = f.readline()
        envihdr = ''
        while line:
            envihdr += line
            line = f.readline()
        f.close()         
        hdr.read(envihdr)
        hdr['file type'] ='ENVI Classification'
        hdr['classes'] = str(K+1)
        classlookup = '{0'
        for i in range(1,3*(K+1)):
            classlookup += ', '+str(str(auxil.ctable[i]))
        classlookup +='}'    
        hdr['class lookup'] = classlookup
        hdr['class names'] = classnames
        f = open(headerfile,'w')
        f.write(str(hdr))
        f.close()       
    print 'result written to: '+outfile    
    print 'elapsed time: '+str(time.time()-start)                        
    print '--done------------------------'  
Ejemplo n.º 41
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path) 
    infile = auxil.select_infile(title='Select an image') 
    if infile:                   
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        
        return
    pos =  auxil.select_pos(bands) 
    if not pos:
        return   
    bands = len(pos)
    dims = auxil.select_dims([0,0,cols,rows])
    if dims:
        x0,y0,cols,rows = dims
    else:
        return   
    class_image = np.zeros((rows,cols),dtype=np.byte)
    K = auxil.select_integer(6,'Number of clusters')
    max_scale = auxil.select_integer(2,'Maximum scaling factor')
    max_scale = min((max_scale,3))
    min_scale = auxil.select_integer(0,'Minimum scaling factor')
    min_scale = min((max_scale,min_scale))
    T0 = auxil.select_float(0.5,'Initial annealing temperature')
    beta = auxil.select_float(0.5,'Spatial mixing parameter')            
    outfile, outfmt = auxil.select_outfilefmt('Select output classification file')  
    if not outfile:
        return
    probfile, probfmt = auxil.select_outfilefmt('Select output probability file (optional)')  
    print '========================='
    print '     EM clustering'
    print '========================='
    print 'infile:   %s'%infile
    print 'clusters: %i'%K
    print 'T0:       %f'%T0
    print 'beta:     %f'%beta         

    start = time.time()                                     
#  read in image and compress 
    DWTbands = []               
    for b in pos:
        band = inDataset.GetRasterBand(b)
        DWTband = auxil.DWTArray(band.ReadAsArray(x0,y0,cols,rows).astype(float),cols,rows)
        for i in range(max_scale):
            DWTband.filter()
        DWTbands.append(DWTband)
    rows,cols = DWTbands[0].get_quadrant(0).shape    
    G = np.transpose(np.array([DWTbands[i].get_quadrant(0,float=True).ravel() for i in range(bands)]))
#  initialize membership matrix    
    n = G.shape[0]
    U = np.random.random((K,n))
    den = np.sum(U,axis=0)
    for j in range(K):
        U[j,:] = U[j,:]/den
#  cluster at minimum scale
    try:
        U,Ms,Cs,Ps,pdens = em(G,U,T0,beta,rows,cols)
    except:
        print 'em failed' 
        return     
#  sort clusters wrt partition density
    idx = np.argsort(pdens)  
    idx = idx[::-1]
    U = U[idx,:]
#  clustering at increasing scales
    for i in range(max_scale-min_scale):
#      expand U and renormalize         
        U = np.reshape(U,(K,rows,cols))  
        rows = rows*2
        cols = cols*2
        U = ndi.zoom(U,(1,2,2))
        U = np.reshape(U,(K,rows*cols)) 
        idx = np.where(U<0.0)
        U[idx] = 0.0
        den = np.sum(U,axis=0)        
        for j in range(K):
            U[j,:] = U[j,:]/den
#      expand the image
        for i in range(bands):
            DWTbands[i].invert()
        G = np.transpose(np.array([DWTbands[i].get_quadrant(0,float=True).ravel() for i in range(bands)]))  
#      cluster
        unfrozen = np.where(np.max(U,axis=0) < 0.90)
        try:
            U,Ms,Cs,Ps,pdens = em(G,U,0.0,beta,rows,cols,unfrozen=unfrozen)
        except:
            print 'em failed' 
            return                         
    print 'Cluster mean vectors'
    print Ms
    print 'Cluster covariance matrices'
    for k in range(K):
        print 'cluster: %i'%k
        print Cs[k]
#  up-sample class memberships if necessary
    if min_scale>0:
        U = np.reshape(U,(K,rows,cols))
        f = 2**min_scale  
        rows = rows*f
        cols = cols*f
        U = ndi.zoom(U,(1,f,f))
        U = np.reshape(U,(K,rows*cols)) 
        idx = np.where(U<0.0)
        U[idx] = 0.0
        den = np.sum(U,axis=0)        
        for j in range(K):
            U[j,:] = U[j,:]/den        
#  classify
    labels = np.byte(np.argmax(U,axis=0)+1)
    class_image[0:rows,0:cols] = np.reshape(labels,(rows,cols))
    rows1,cols1 = class_image.shape
#  write to disk
    driver = gdal.GetDriverByName(outfmt)    
    outDataset = driver.Create(outfile,cols1,rows1,1,GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0*gt[1]
        gt[3] = gt[3] + y0*gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)               
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(class_image,0,0) 
    outBand.FlushCache() 
    outDataset = None   
#  write class membership probability file if desired  
    if probfile:
        driver = gdal.GetDriverByName(probfmt)    
        outDataset = driver.Create(probfile,cols,rows,K,GDT_Byte) 
        if geotransform is not None:
            outDataset.SetGeoTransform(tuple(gt)) 
        if projection is not None:
            outDataset.SetProjection(projection)  
        for k in range(K):
            probs = np.reshape(U[k,:],(rows,cols))
            probs = np.byte(probs*255)
            outBand = outDataset.GetRasterBand(k+1)
            outBand.WriteArray(probs,0,0)
            outBand.FlushCache()    
        outDataset = None    
        print 'class probabilities written to: %s'%probfile                                  
    inDataset = None
    if (outfmt == 'ENVI') and (K<19):
#  try to make an ENVI classification header file            
        hdr = header.Header() 
        headerfile = outfile+'.hdr'
        f = open(headerfile)
        line = f.readline()
        envihdr = ''
        while line:
            envihdr += line
            line = f.readline()
        f.close()         
        hdr.read(envihdr)
        hdr['file type'] ='ENVI Classification'
        hdr['classes'] = str(K+1)
        classlookup = '{0'
        for i in range(1,3*(K+1)):
            classlookup += ', '+str(str(ctable[i]))
        classlookup +='}'    
        hdr['class lookup'] = classlookup
        hdr['class names'] = ['class %i'%i for i in range(K+1)]
        f = open(headerfile,'w')
        f.write(str(hdr))
        f.close()                 
    print 'classification written to: '+outfile       
    print 'elapsed time: '+str(time.time()-start)                        
    print '--done------------------------'  
Ejemplo n.º 42
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)        
#  MS image    
    file1 = auxil.select_infile(title='Choose MS image') 
    if file1:                   
        inDataset1 = gdal.Open(file1,GA_ReadOnly)     
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize    
        bands = inDataset1.RasterCount
    else:
        return
    pos1 =  auxil.select_pos(bands) 
    if not pos1:
        return   
    num_bands = len(pos1)
    dims = auxil.select_dims([0,0,cols,rows])
    if dims:
        x10,y10,cols1,rows1 = dims
    else:
        return 
#  PAN image     
    file2 = auxil.select_infile(title='Choose PAN image') 
    if file2:                  
        inDataset2 = gdal.Open(file2,GA_ReadOnly)       
        bands = inDataset2.RasterCount
    else:
        return   
    if bands>1:
        print 'Must be a single band (panchromatic) image'
        return 
    geotransform1 = inDataset1.GetGeoTransform()
    geotransform2 = inDataset2.GetGeoTransform()        
#  outfile
    outfile, fmt = auxil.select_outfilefmt()  
    if not outfile:
        return 
#  resolution ratio      
    ratio = auxil.select_integer(4, 'Resolution ratio (2 or 4)') 
    if not ratio:
        return        
#  MS registration band    
    k1 = auxil.select_integer(1, 'MS band for registration') 
    if not k1:
        return  
#  fine adjust
    roll = auxil.select_integer(0, 'Fine adjust (-2 ... 2)') 
    if roll is None:
        return        
    print '========================='
    print '   DWT Pansharpening'
    print '========================='
    print time.asctime()     
    print 'MS  file: '+file1
    print 'PAN file: '+file2       
#  image arrays
    band = inDataset1.GetRasterBand(1)
    tmp = band.ReadAsArray(0,0,1,1)
    dt = tmp.dtype
    MS = np.asarray(np.zeros((num_bands,rows1,cols1)),dtype=dt) 
    k = 0                                   
    for b in pos1:
        band = inDataset1.GetRasterBand(b)
        MS[k,:,:] = band.ReadAsArray(x10,y10,cols1,rows1)
        k += 1
#  if integer assume 11bit quantization otherwise must be byte   
    if MS.dtype == np.int16:
        fact = 8.0
        MS = auxil.byteStretch(MS,(0,2**11)) 
    else:
        fact = 1.0
#  read in corresponding spatial subset of PAN image    
    if (geotransform1 is None) or (geotransform2 is None):
        print 'Image not georeferenced, aborting' 
        return
#  upper left corner pixel in PAN    
    gt1 = list(geotransform1)               
    gt2 = list(geotransform2)
    ulx1 = gt1[0] + x10*gt1[1]
    uly1 = gt1[3] + y10*gt1[5]
    x20 = int(round(((ulx1 - gt2[0])/gt2[1])))
    y20 = int(round(((uly1 - gt2[3])/gt2[5])))
    cols2 = cols1*ratio
    rows2 = rows1*ratio
    band = inDataset2.GetRasterBand(1)
    PAN = band.ReadAsArray(x20,y20,cols2,rows2)        
#  if integer assume 11-bit quantization, otherwise must be byte    
    if PAN.dtype == np.int16:
        PAN = auxil.byteStretch(PAN,(0,2**11))                                   
#  compress PAN to resolution of MS image  
    panDWT = auxil.DWTArray(PAN,cols2,rows2)          
    r = ratio
    while r > 1:
        panDWT.filter()
        r /= 2
    bn0 = panDWT.get_quadrant(0) 
    lines0,samples0 = bn0.shape    
    bn1 = MS[k1-1,:,:]  
#  register (and subset) MS image to compressed PAN image 
    (scale,angle,shift) = auxil.similarity(bn0,bn1)
    tmp = np.zeros((num_bands,lines0,samples0))
    for k in range(num_bands): 
        bn1 = MS[k,:,:]                    
        bn2 = ndii.zoom(bn1, 1.0/scale)
        bn2 = ndii.rotate(bn2, angle)
        bn2 = ndii.shift(bn2, shift)
        tmp[k,:,:] = bn2[0:lines0,0:samples0]        
    MS = tmp   
    if roll != 0:
#  fine adjust                            
        PAN = np.roll(PAN,roll,axis=0)
        PAN = np.roll(PAN,roll,axis=1)
        panDWT = auxil.DWTArray(PAN,cols2,rows2)          
        r = ratio
        while r > 1:
            panDWT.filter()
            r /= 2                   
#  compress pan once more, extract wavelet quadrants, and restore
    panDWT.filter()  
    fgpan = panDWT.get_quadrant(1)
    gfpan = panDWT.get_quadrant(2)
    ggpan = panDWT.get_quadrant(3)    
    panDWT.invert()       
#  output array            
    sharpened = np.zeros((num_bands,rows2,cols2),dtype=np.float32)     
    aa = np.zeros(3)
    bb = np.zeros(3)       
    print 'Wavelet correlations:'                                   
    for i in range(num_bands):
#      make copy of panDWT and inject ith ms band                
        msDWT = copy.deepcopy(panDWT)
        msDWT.put_quadrant(MS[i,:,:],0)
#      compress once more                 
        msDWT.filter()
#      determine wavelet normalization coefficents                
        ms = msDWT.get_quadrant(1)    
        aa[0],bb[0],R = auxil.orthoregress(fgpan.ravel(), ms.ravel())
        Rs = 'Band '+str(i+1)+': %8.3f'%R
        ms = msDWT.get_quadrant(2)
        aa[1],bb[1],R = auxil.orthoregress(gfpan.ravel(), ms.ravel())
        Rs += '%8.3f'%R                     
        ms = msDWT.get_quadrant(3)
        aa[2],bb[2],R = auxil.orthoregress(ggpan.ravel(), ms.ravel()) 
        Rs += '%8.3f'%R    
        print Rs         
#      restore once and normalize wavelet coefficients
        msDWT.invert() 
        msDWT.normalize(aa,bb)   
#      restore completely and collect result
        r = 1
        while r < ratio:
            msDWT.invert()
            r *= 2                            
        sharpened[i,:,:] = msDWT.get_quadrant(0)      
    sharpened *= fact    
#  write to disk       
    driver = gdal.GetDriverByName(fmt)   
    outDataset = driver.Create(outfile,cols2,rows2,num_bands,GDT_Float32)
    projection1 = inDataset1.GetProjection()
    if projection1 is not None:
        outDataset.SetProjection(projection1)        
    gt1 = list(geotransform1)
    gt1[0] += x10*ratio  
    gt1[3] -= y10*ratio
    gt1[1] = gt2[1]
    gt1[2] = gt2[2]
    gt1[4] = gt2[4]
    gt1[5] = gt2[5]
    outDataset.SetGeoTransform(tuple(gt1))   
    for k in range(num_bands):        
        outBand = outDataset.GetRasterBand(k+1)
        outBand.WriteArray(sharpened[k,:,:],0,0) 
        outBand.FlushCache() 
    outDataset = None    
    print 'Result written to %s'%outfile    
    inDataset1 = None
    inDataset2 = None                      
Ejemplo n.º 43
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)        
#  SAR image    
    infile = auxil.select_infile(title='Choose SAR image') 
    if infile:                   
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        return
#  spatial subset    
    x0,y0,rows,cols=auxil.select_dims([0,0,rows,cols])    
#  number of looks
    m = auxil.select_integer(5,msg='Number of looks')
    if not m:
        return
#  number of iterations
    niter = auxil.select_integer(1,msg='Number of iterations')    
#  output file
    outfile,fmt = auxil.select_outfilefmt() 
    if not outfile:
        return       
#  process diagonal bands only
    driver = gdal.GetDriverByName(fmt) 
    if bands == 9:   
        outDataset = driver.Create(outfile,cols,rows,3,GDT_Float32)
        inimage = np.zeros((3,rows,cols))
        band = inDataset.GetRasterBand(1)
        inimage[0] = band.ReadAsArray(x0,y0,cols,rows)     
        band = inDataset.GetRasterBand(6)
        inimage[1] = band.ReadAsArray(x0,y0,cols,rows)
        band = inDataset.GetRasterBand(9)
        inimage[2] = band.ReadAsArray(x0,y0,cols,rows)        
    elif bands == 4:
        outDataset = driver.Create(outfile,cols,rows,2,GDT_Float32)        
        inimage = np.zeros((2,rows,cols))
        band = inDataset.GetRasterBand(1)
        inimage[0] = band.ReadAsArray(x0,y0,cols,rows)     
        band = inDataset.GetRasterBand(4)
        inimage[1] = band.ReadAsArray(x0,y0,cols,rows) 
    else:
        outDataset = driver.Create(outfile,cols,rows,1,GDT_Float32)
        inimage = inDataset.GetRasterBand(1)  
    outimage = np.copy(inimage)
    print '========================='
    print '    GAMMA MAP FILTER'
    print '========================='
    print time.asctime()
    print 'infile:  %s'%infile
    print 'number of looks: %i'%m   
    print 'number of iterations: %i'%niter         
    start = time.time() 
    itr = 0
    while itr < niter:
        print 'iteration %i'%(itr+1) 
        if bands == 9:
            for k in range(3):
                outimage[k] = gamma_filter(k,inimage,outimage,rows,cols,m)
        elif bands == 4:
            for k in range(2):
                outimage[k] = gamma_filter(k,inimage,outimage,rows,cols,m)   
        else:
            outimage = gamma_filter(0,inimage,outimage,rows,cols,m)                  
        itr += 1   
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0*gt[1]
        gt[3] = gt[3] + y0*gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    projection = inDataset.GetProjection()        
    if projection is not None:
        outDataset.SetProjection(projection) 
    if bands == 9:
        for k in range(3):    
            outBand = outDataset.GetRasterBand(k+1)
            outBand.WriteArray(outimage[k],0,0) 
            outBand.FlushCache() 
    elif bands == 4:
        for k in range(2):    
            outBand = outDataset.GetRasterBand(k+1)
            outBand.WriteArray(outimage[k],0,0) 
            outBand.FlushCache() 
    else:
        outBand = outDataset.GetRasterBand(1)
        outBand.WriteArray(outimage,0,0) 
        outBand.FlushCache()                     
    outDataset = None
    print 'result written to: '+outfile 
    print 'elapsed time: '+str(time.time()-start)                 
Ejemplo n.º 44
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    #    path = 'd:\\imagery\\CRC\\Chapters6-7'
    if path:
        os.chdir(path)
    infile = auxil.select_infile(title='Select a class probability image')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        K = inDataset.RasterCount
    else:
        return
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
    print '========================='
    print '       PLR_reclass'
    print '========================='
    print 'infile:  %s' % infile
    start = time.time()
    prob_image = np.zeros((K, rows, cols))
    for k in range(K):
        band = inDataset.GetRasterBand(k + 1)
        prob_image[k, :, :] = band.ReadAsArray(0, 0, cols, rows).astype(float)
    class_image = np.zeros((rows, cols), dtype=np.byte)
    print 'reclassifying...'
    for i in range(rows):
        if i % 50 == 0:
            print '%i rows processed' % i
        for j in range(cols):
            cls = np.where(prob_image[:, i, j] == np.amax(prob_image[:, i,
                                                                     j]))[0][0]
            if isinstance(cls, int):
                class_image[i, j] = cls + 1


#  write to disk
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols, rows, 1, GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(class_image, 0, 0)
    outBand.FlushCache()
    outDataset = None
    inDataset = None
    if (fmt == 'ENVI') and (K < 19):
        #          try to make an ENVI classification header file
        classnames = '{unclassified '
        for i in range(K):
            classnames += ', ' + str(i + 1)
        classnames += '}'
        hdr = header.Header()
        headerfile = outfile + '.hdr'
        f = open(headerfile)
        line = f.readline()
        envihdr = ''
        while line:
            envihdr += line
            line = f.readline()
        f.close()
        hdr.read(envihdr)
        hdr['file type'] = 'ENVI Classification'
        hdr['classes'] = str(K + 1)
        classlookup = '{0'
        for i in range(1, 3 * (K + 1)):
            classlookup += ', ' + str(str(auxil.ctable[i]))
        classlookup += '}'
        hdr['class lookup'] = classlookup
        hdr['class names'] = classnames
        f = open(headerfile, 'w')
        f.write(str(hdr))
        f.close()
    print 'result written to: ' + outfile
    print 'elapsed time: ' + str(time.time() - start)
    print '--done------------------------'
Ejemplo n.º 45
0
def main(): 
    gdal.AllRegister()
    infile = auxil.select_infile() 
    if infile:                  
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        return
    
#  spectral and spatial subsets    
    pos =  auxil.select_pos(bands)
    bands = len(pos)    
    x0,y0,rows,cols=auxil.select_dims([0,0,rows,cols])
    
#  data matrix
    G = zeros((rows*cols,len(pos))) 
    k = 0                                   
    for b in pos:
        band = inDataset.GetRasterBand(b)
        tmp = band.ReadAsArray(x0,y0,cols,rows)\
                              .astype(float).ravel()
        G[:,k] = tmp - mean(tmp)
        k += 1
        
#  covariance matrix
    C = mat(G).T*mat(G)/(cols*rows-1)
    
#  diagonalize    
    lams,U = linalg.eigh(C)
     
#  sort
    idx = argsort(lams)[::-1]
    lams = lams[idx]
    U = U[:,idx]         
               
#  project
    PCs = reshape(array(G*U),(rows,cols,bands))   
    
#  write to disk       
    outfile,fmt = auxil.select_outfilefmt() 
    if outfile:
        driver = gdal.GetDriverByName(fmt)   
        outDataset = driver.Create(outfile,
                        cols,rows,bands,GDT_Float32)
        projection = inDataset.GetProjection()
        geotransform = inDataset.GetGeoTransform()
        if geotransform is not None:
            gt = list(geotransform)
            gt[0] = gt[0] + x0*gt[1]
            gt[3] = gt[3] + y0*gt[5]
            outDataset.SetGeoTransform(tuple(gt))
        if projection is not None:
            outDataset.SetProjection(projection)        
        for k in range(bands):        
            outBand = outDataset.GetRasterBand(k+1)
            outBand.WriteArray(PCs[:,:,k],0,0) 
            outBand.FlushCache() 
        outDataset = None    
    inDataset = None        
Ejemplo n.º 46
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Working directory')
    if path:
        os.chdir(path)
    file1 = auxil.select_infile(title='Base image')
    if file1:
        inDataset1 = gdal.Open(file1, GA_ReadOnly)
        cols1 = inDataset1.RasterXSize
        rows1 = inDataset1.RasterYSize
        print 'Base image: %s' % file1
    else:
        return
    file2 = auxil.select_infile(title='Warp image')
    if file2:
        inDataset2 = gdal.Open(file2, GA_ReadOnly)
        cols2 = inDataset2.RasterXSize
        rows2 = inDataset2.RasterYSize
        bands2 = inDataset2.RasterCount
        print 'Warp image: %s' % file2
    else:
        return
    file3 = auxil.select_infile(title='GCP file',\
                                  filt='pts')
    if file3:
        pts1, pts2 = parse_gcp(file3)
    else:
        return
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
    image2 = zeros((bands2, rows2, cols2))
    for k in range(bands2):
        band = inDataset2.GetRasterBand(k + 1)
        image2[k, :, :] = band.ReadAsArray(0, 0, cols2, rows2)
    inDataset2 = None
    n = len(pts1)
    y = pts1.ravel()
    A = zeros((2 * n, 4))
    for i in range(n):
        A[2 * i, :] = [pts2[i, 0], -pts2[i, 1], 1, 0]
        A[2 * i + 1, :] = [pts2[i, 1], pts2[i, 0], 0, 1]
    a, b, x0, y0 = linalg.lstsq(A, y)[0]
    R = array([[a, -b], [b, a]])
    warped = zeros((bands2, rows1, cols1), dtype=uint8)
    for k in range(bands2):
        tmp = ndimage.affine_transform(image2[k, :, :], R)
        warped[k, :, :] = tmp[-y0:-y0 + rows1, -x0:-x0 + cols1]
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols1, rows1, bands2, GDT_Byte)
    geotransform = inDataset1.GetGeoTransform()
    projection = inDataset1.GetProjection()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    if projection is not None:
        outDataset.SetProjection(projection)
    for k in range(bands2):
        outBand = outDataset.GetRasterBand(k + 1)
        outBand.WriteArray(warped[k, :, :], 0, 0)
        outBand.FlushCache()
    outDataset = None
    inDataset1 = None
    print 'Warped image written to: %s' % outfile
Ejemplo n.º 47
0
def main(): 
    gdal.AllRegister()
    infile = auxil.select_infile() 
    if infile:                  
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        return
    
#  spectral and spatial subsets    
    pos =  auxil.select_pos(bands)
    bands = len(pos)    
    x0,y0,rows,cols=auxil.select_dims([0,0,rows,cols])
    
#  data matrix
    G = zeros((rows*cols,len(pos))) 
    k = 0                                   
    for b in pos:
        band = inDataset.GetRasterBand(b)
        tmp = band.ReadAsArray(x0,y0,cols,rows)\
                              .astype(float).ravel()
        G[:,k] = tmp - mean(tmp)
        k += 1
        
#  covariance matrix
    C = mat(G).T*mat(G)/(cols*rows-1)
    
#  diagonalize    
    lams,U = linalg.eigh(C)
     
#  sort
    idx = argsort(lams)[::-1]
    lams = lams[idx]
    U = U[:,idx]         
               
#  project
    PCs = reshape(array(G*U),(rows,cols,bands))   
    
#  write to disk       
    outfile,fmt = auxil.select_outfilefmt() 
    if outfile:
        driver = gdal.GetDriverByName(fmt)   
        outDataset = driver.Create(outfile,
                        cols,rows,bands,GDT_Float32)
        projection = inDataset.GetProjection()
        geotransform = inDataset.GetGeoTransform()
        if geotransform is not None:
            gt = list(geotransform)
            gt[0] = gt[0] + x0*gt[1]
            gt[3] = gt[3] + y0*gt[5]
            outDataset.SetGeoTransform(tuple(gt))
        if projection is not None:
            outDataset.SetProjection(projection)        
        for k in range(bands):        
            outBand = outDataset.GetRasterBand(k+1)
            outBand.WriteArray(PCs[:,:,k],0,0) 
            outBand.FlushCache() 
        outDataset = None    
    inDataset = None        
Ejemplo n.º 48
0
def main(): 
    gdal.AllRegister()
    path = auxil.select_directory('Working directory')
    if path:
        os.chdir(path)        
    file1=auxil.select_infile(title='Base image') 
    if file1:                   
        inDataset1 = gdal.Open(file1,GA_ReadOnly)     
        cols1 = inDataset1.RasterXSize
        rows1 = inDataset1.RasterYSize
        print 'Base image: %s'%file1    
    else:
        return     
    file2=auxil.select_infile(title='Warp image') 
    if file2:                  
        inDataset2 = gdal.Open(file2,GA_ReadOnly)     
        cols2 = inDataset2.RasterXSize
        rows2 = inDataset2.RasterYSize
        bands2 = inDataset2.RasterCount        
        print 'Warp image: %s'%file2    
    else:
        return 
    file3 = auxil.select_infile(title='GCP file',\
                                  filt='pts')  
    if file3:
        pts1,pts2 = parse_gcp(file3)
    else:
        return
    outfile,fmt = auxil.select_outfilefmt() 
    if not outfile:
        return   
    image2 = zeros((bands2,rows2,cols2))                                   
    for k in range(bands2):
        band = inDataset2.GetRasterBand(k+1)
        image2[k,:,:]=band.ReadAsArray(0,0,cols2,rows2)
    inDataset2 = None
    n = len(pts1)    
    y = pts1.ravel()
    A = zeros((2*n,4))
    for i in range(n):
        A[2*i,:] =   [pts2[i,0],-pts2[i,1],1,0]
        A[2*i+1,:] = [pts2[i,1], pts2[i,0],0,1]   
    a,b,x0,y0 = linalg.lstsq(A,y)[0]
    R = array([[a,-b],[b,a]])     
    warped = zeros((bands2,rows1,cols1),dtype=uint8) 
    for k in range(bands2):
        tmp = ndimage.affine_transform(image2[k,:,:],R)
        warped[k,:,:]=tmp[-y0:-y0+rows1,-x0:-x0+cols1]   
    driver = gdal.GetDriverByName(fmt)   
    outDataset = driver.Create(outfile,
                    cols1,rows1,bands2,GDT_Byte)    
    geotransform = inDataset1.GetGeoTransform()
    projection = inDataset1.GetProjection()   
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    if projection is not None:
        outDataset.SetProjection(projection)        
    for k in range(bands2):        
        outBand = outDataset.GetRasterBand(k+1)
        outBand.WriteArray(warped[k,:,:],0,0) 
        outBand.FlushCache()
    outDataset = None
    inDataset1 = None       
    print 'Warped image written to: %s'%outfile        
Ejemplo n.º 49
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)        
#  get (spatial subset of) the C11 or C33 file first    
    file1 = auxil.select_infile(title='Choose one componenst (C11, C22 or C33) ') 
    if file1:                   
        inDataset1 = gdal.Open(file1,GA_ReadOnly)     
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize    
        bands = inDataset1.RasterCount
    else:
        return
    inDataset = None
#  spatial subset    
    x0,y0,rows,cols=auxil.select_dims([0,0,rows,cols])    
#  output file
    outfile,fmt = auxil.select_outfilefmt() 
    if not outfile:
        return    
#  output image
    outim = np.zeros((9,rows,cols), dtype=np.float32)    
#  get list of all files
    files = os.listdir(path) 
    for afile in files:
        if re.search('hdr|sml',afile):
            continue       
#      single polarimetry  
        if re.search('pwr_geo',afile): 
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[0,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None
#      dual and quad polarimetry                
        elif re.search('hh_hh_geo|C11\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[0,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None 
        elif re.search('re_hh_hv_geo|C12_real\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[1,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None     
        elif re.search('im_hh_hv_geo|C12_imag\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[2,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None      
        elif re.search('re_hh_vv_geo|C13_real\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[3,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None     
        elif re.search('im_hh_vv_geo|C13_imag\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[4,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None       
        elif re.search('hv_hv_geo|C22\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[5,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None     
        elif re.search('re_hv_vv_geo|C23_real\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[6,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None     
        elif re.search('im_hv_vv_geo|C23_imag\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[7,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None      
        elif re.search('vv_vv_geo|C33\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[8,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None  
    outim = np.nan_to_num(outim)           
    idx = np.where(np.sum(np.abs(outim),axis=(1,2))>0)[0]
    if idx == []:
        print 'no polarimetric bands found'    
        return
    bands = len(idx)
    driver = gdal.GetDriverByName(fmt)   
    outDataset = driver.Create(outfile,cols,rows,bands,GDT_Float32)
    projection = inDataset1.GetProjection()
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0*gt[1]
        gt[3] = gt[3] + y0*gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)        
    for k in range(bands):        
        outBand = outDataset.GetRasterBand(k+1)
        outBand.WriteArray(outim[idx[k],:,:],0,0) 
        outBand.FlushCache() 
    outDataset = None            
    print '%i-band polarimetric image written to: %s'%(bands,outfile)        
Ejemplo n.º 50
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    #    path = 'd:\\imagery\\CRC\\Chapters6-7'
    if path:
        os.chdir(path)
    infile = auxil.select_infile(title='Select a class probability image')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        classes = inDataset.RasterCount
    else:
        return
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
    nitr = auxil.select_integer(3, 'Select number of iterations')
    print '========================='
    print '       PLR'
    print '========================='
    print 'infile:  %s' % infile
    print 'iterations:  %i' % nitr
    start = time.time()
    prob_image = np.zeros((classes, rows, cols))
    for k in range(classes):
        band = inDataset.GetRasterBand(k + 1)
        prob_image[k, :, :] = band.ReadAsArray(0, 0, cols, rows).astype(float)
#  compatibility matrix
    Pmn = np.zeros((classes, classes))
    n_samples = (cols - 1) * (rows - 1)
    samplem = np.reshape(prob_image[:, 0:rows - 1, 0:cols - 1],
                         (classes, n_samples))
    samplen = np.reshape(prob_image[:, 1:rows, 0:cols - 1],
                         (classes, n_samples))
    sampleu = np.reshape(prob_image[:, 0:rows - 1, 1:cols],
                         (classes, n_samples))
    max_samplem = np.amax(samplem, axis=0)
    max_samplen = np.amax(samplen, axis=0)
    max_sampleu = np.amax(sampleu, axis=0)
    print 'estimating compatibility matrix...'
    for j in range(n_samples):
        if j % 50000 == 0:
            print '%i samples of %i' % (j, n_samples)
        m1 = np.where(samplem[:, j] == max_samplem[j])[0][0]
        n1 = np.where(samplen[:, j] == max_samplen[j])[0][0]
        if isinstance(m1, int) and isinstance(n1, int):
            Pmn[m1, n1] += 1
        u1 = np.where(sampleu[:, j] == max_sampleu[j])[0][0]
        if isinstance(m1, int) and isinstance(u1, int):
            Pmn[m1, u1] += 1
    for j in range(classes):
        n = np.sum(Pmn[j, :])
        if n > 0:
            Pmn[j, :] /= n
    print Pmn
    itr = 0
    temp = prob_image * 0
    print 'label relaxation...'
    while itr < nitr:
        print 'iteration %i' % (itr + 1)
        Pm = np.zeros(classes)
        Pn = np.zeros(classes)
        for i in range(1, rows - 1):
            if i % 50 == 0:
                print '%i rows processed' % i
            for j in range(1, cols - 1):
                Pm[:] = prob_image[:, i, j]
                Pn[:] = prob_image[:, i - 1, j] / 4
                Pn[:] += prob_image[:, i + 1, j] / 4
                Pn[:] += prob_image[:, i, j - 1] / 4
                Pn[:] += prob_image[:, i, j + 1] / 4
                Pn = np.transpose(Pn)
                if np.sum(Pm) == 0:
                    Pm_new = Pm
                else:
                    Pm_new = Pm * (np.dot(Pmn, Pn)) / (np.dot(
                        np.dot(Pm, Pmn), Pn))
                temp[:, i, j] = Pm_new
        prob_image = temp
        itr += 1


#  write to disk
    prob_image = np.byte(prob_image * 255)
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols, rows, classes, GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    if projection is not None:
        outDataset.SetProjection(projection)
    for k in range(classes):
        outBand = outDataset.GetRasterBand(k + 1)
        outBand.WriteArray(prob_image[k, :, :], 0, 0)
        outBand.FlushCache()
    outDataset = None
    inDataset = None
    print 'result written to: ' + outfile
    print 'elapsed time: ' + str(time.time() - start)
    print '--done------------------------'