Ejemplo n.º 1
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path) 
    infile = auxil.select_infile(title='Select an image') 
    if infile:                   
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        
        return
    pos =  auxil.select_pos(bands) 
    if not pos:
        return   
    bands = len(pos)
    dims = auxil.select_dims([0,0,cols,rows])
    if dims:
        x0,y0,cols,rows = dims
    else:
        return   
    class_image = np.zeros((rows,cols),dtype=np.byte)
    K = auxil.select_integer(6,'Number of clusters')
    max_scale = auxil.select_integer(2,'Maximum scaling factor')
    max_scale = min((max_scale,3))
    min_scale = auxil.select_integer(0,'Minimum scaling factor')
    min_scale = min((max_scale,min_scale))
    T0 = auxil.select_float(0.5,'Initial annealing temperature')
    beta = auxil.select_float(0.5,'Spatial mixing parameter')            
    outfile, outfmt = auxil.select_outfilefmt('Select output classification file')  
    if not outfile:
        return
    probfile, probfmt = auxil.select_outfilefmt('Select output probability file (optional)')  
    print '========================='
    print '     EM clustering'
    print '========================='
    print 'infile:   %s'%infile
    print 'clusters: %i'%K
    print 'T0:       %f'%T0
    print 'beta:     %f'%beta         

    start = time.time()                                     
#  read in image and compress 
    DWTbands = []               
    for b in pos:
        band = inDataset.GetRasterBand(b)
        DWTband = auxil.DWTArray(band.ReadAsArray(x0,y0,cols,rows).astype(float),cols,rows)
        for i in range(max_scale):
            DWTband.filter()
        DWTbands.append(DWTband)
    rows,cols = DWTbands[0].get_quadrant(0).shape    
    G = np.transpose(np.array([DWTbands[i].get_quadrant(0,float=True).ravel() for i in range(bands)]))
#  initialize membership matrix    
    n = G.shape[0]
    U = np.random.random((K,n))
    den = np.sum(U,axis=0)
    for j in range(K):
        U[j,:] = U[j,:]/den
#  cluster at minimum scale
    try:
        U,Ms,Cs,Ps,pdens = em(G,U,T0,beta,rows,cols)
    except:
        print 'em failed' 
        return     
#  sort clusters wrt partition density
    idx = np.argsort(pdens)  
    idx = idx[::-1]
    U = U[idx,:]
#  clustering at increasing scales
    for i in range(max_scale-min_scale):
#      expand U and renormalize         
        U = np.reshape(U,(K,rows,cols))  
        rows = rows*2
        cols = cols*2
        U = ndi.zoom(U,(1,2,2))
        U = np.reshape(U,(K,rows*cols)) 
        idx = np.where(U<0.0)
        U[idx] = 0.0
        den = np.sum(U,axis=0)        
        for j in range(K):
            U[j,:] = U[j,:]/den
#      expand the image
        for i in range(bands):
            DWTbands[i].invert()
        G = np.transpose(np.array([DWTbands[i].get_quadrant(0,float=True).ravel() for i in range(bands)]))  
#      cluster
        unfrozen = np.where(np.max(U,axis=0) < 0.90)
        try:
            U,Ms,Cs,Ps,pdens = em(G,U,0.0,beta,rows,cols,unfrozen=unfrozen)
        except:
            print 'em failed' 
            return                         
    print 'Cluster mean vectors'
    print Ms
    print 'Cluster covariance matrices'
    for k in range(K):
        print 'cluster: %i'%k
        print Cs[k]
#  up-sample class memberships if necessary
    if min_scale>0:
        U = np.reshape(U,(K,rows,cols))
        f = 2**min_scale  
        rows = rows*f
        cols = cols*f
        U = ndi.zoom(U,(1,f,f))
        U = np.reshape(U,(K,rows*cols)) 
        idx = np.where(U<0.0)
        U[idx] = 0.0
        den = np.sum(U,axis=0)        
        for j in range(K):
            U[j,:] = U[j,:]/den        
#  classify
    labels = np.byte(np.argmax(U,axis=0)+1)
    class_image[0:rows,0:cols] = np.reshape(labels,(rows,cols))
    rows1,cols1 = class_image.shape
#  write to disk
    driver = gdal.GetDriverByName(outfmt)    
    outDataset = driver.Create(outfile,cols1,rows1,1,GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0*gt[1]
        gt[3] = gt[3] + y0*gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)               
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(class_image,0,0) 
    outBand.FlushCache() 
    outDataset = None   
#  write class membership probability file if desired  
    if probfile:
        driver = gdal.GetDriverByName(probfmt)    
        outDataset = driver.Create(probfile,cols,rows,K,GDT_Byte) 
        if geotransform is not None:
            outDataset.SetGeoTransform(tuple(gt)) 
        if projection is not None:
            outDataset.SetProjection(projection)  
        for k in range(K):
            probs = np.reshape(U[k,:],(rows,cols))
            probs = np.byte(probs*255)
            outBand = outDataset.GetRasterBand(k+1)
            outBand.WriteArray(probs,0,0)
            outBand.FlushCache()    
        outDataset = None    
        print 'class probabilities written to: %s'%probfile                                  
    inDataset = None
    if (outfmt == 'ENVI') and (K<19):
#  try to make an ENVI classification header file            
        hdr = header.Header() 
        headerfile = outfile+'.hdr'
        f = open(headerfile)
        line = f.readline()
        envihdr = ''
        while line:
            envihdr += line
            line = f.readline()
        f.close()         
        hdr.read(envihdr)
        hdr['file type'] ='ENVI Classification'
        hdr['classes'] = str(K+1)
        classlookup = '{0'
        for i in range(1,3*(K+1)):
            classlookup += ', '+str(str(ctable[i]))
        classlookup +='}'    
        hdr['class lookup'] = classlookup
        hdr['class names'] = ['class %i'%i for i in range(K+1)]
        f = open(headerfile,'w')
        f.write(str(hdr))
        f.close()                 
    print 'classification written to: '+outfile       
    print 'elapsed time: '+str(time.time()-start)                        
    print '--done------------------------'  
Ejemplo n.º 2
0
def main():
    print '================================'
    print 'Complex Wishart Change Detection'
    print '================================'
    print time.asctime()
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)        
#  first SAR image    
    infile1 = auxil.select_infile(title='Choose first SAR image') 
    if infile1:                   
        inDataset1 = gdal.Open(infile1,GA_ReadOnly)     
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize    
        bands = inDataset1.RasterCount
    else:
        return
    m = auxil.select_integer(5,msg='Number of looks')
    if not m:
        return
    print 'first filename:  %s'%infile1
    print 'number of looks: %i'%m  
#  second SAR image    
    infile2 = auxil.select_infile(title='Choose second SAR image') 
    if not infile2:                   
        return
    n = auxil.select_integer(5,msg='Number of looks')
    if not n:
        return
    print 'second filename:  %s'%infile2
    print 'number of looks: %i'%n  
#  output file
    outfile,fmt = auxil.select_outfilefmt() 
    if not outfile:
        return    
#  significance level
    sig = auxil.select_float(0.01, 'Choose significance level')   
    print 'Signifcane level: %f'%sig  
    start = time.time()    
    print 'co-registering...'
    registerSAR.registerSAR(infile1,infile2,'warp.tif','GTiff')
    infile2 = 'warp.tif'
    inDataset2 = gdal.Open(infile2,GA_ReadOnly)     
    cols2 = inDataset2.RasterXSize
    rows2 = inDataset2.RasterYSize    
    bands2 = inDataset2.RasterCount   
    if (bands != bands2) or (cols != cols2) or (rows != rows2):
        print 'Size mismatch'
        return   
    if bands == 9:
        print 'Quad polarimetry'  
#      C11 (k1)
        b = inDataset1.GetRasterBand(1)
        k1 = m*b.ReadAsArray(0,0,cols,rows)
#      C12  (a1)
        b = inDataset1.GetRasterBand(2)
        a1 = b.ReadAsArray(0,0,cols,rows)
        b = inDataset1.GetRasterBand(3)    
        im = b.ReadAsArray(0,0,cols,rows)
        a1 = m*(a1 + 1j*im)
#      C13  (rho1)
        b = inDataset1.GetRasterBand(4)
        rho1 = b.ReadAsArray(0,0,cols,rows)
        b = inDataset1.GetRasterBand(5)
        im = b.ReadAsArray(0,0,cols,rows)
        rho1 = m*(rho1 + 1j*im)      
#      C22 (xsi1)
        b = inDataset1.GetRasterBand(6)
        xsi1 = m*b.ReadAsArray(0,0,cols,rows)    
#      C23 (b1)        
        b = inDataset1.GetRasterBand(7)
        b1 = b.ReadAsArray(0,0,cols,rows)
        b = inDataset1.GetRasterBand(8)
        im = b.ReadAsArray(0,0,cols,rows)
        b1 = m*(b1 + 1j*im)      
#      C33 (zeta1)
        b = inDataset1.GetRasterBand(9)
        zeta1 = m*b.ReadAsArray(0,0,cols,rows)              
#      C11 (k2)
        b = inDataset2.GetRasterBand(1)
        k2 = n*b.ReadAsArray(0,0,cols,rows)
#      C12  (a2)
        b = inDataset2.GetRasterBand(2)
        a2 = b.ReadAsArray(0,0,cols,rows)
        b = inDataset2.GetRasterBand(3)
        im = b.ReadAsArray(0,0,cols,rows)
        a2 = n*(a2 + 1j*im)
#      C13  (rho2)
        b = inDataset2.GetRasterBand(4)
        rho2 = b.ReadAsArray(0,0,cols,rows)
        b = inDataset2.GetRasterBand(5)
        im = b.ReadAsArray(0,0,cols,rows)
        rho2 = n*(rho2 + 1j*im)        
#      C22 (xsi2)
        b = inDataset2.GetRasterBand(6)
        xsi2 = n*b.ReadAsArray(0,0,cols,rows)    
#      C23 (b2)        
        b = inDataset2.GetRasterBand(7)
        b2 = b.ReadAsArray(0,0,cols,rows)
        b = inDataset2.GetRasterBand(8)
        im = b.ReadAsArray(0,0,cols,rows)
        b2 = n*(b2 + 1j*im)        
#      C33 (zeta2)
        b = inDataset2.GetRasterBand(9)
        zeta2 = n*b.ReadAsArray(0,0,cols,rows)           
        k3    = k1 + k2  
        a3    = a1 + a2
        rho3  = rho1 + rho2
        xsi3  = xsi1 + xsi2
        b3    = b1 + b2
        zeta3 = zeta1 + zeta2           
        det1 = k1*xsi1*zeta1 + 2*np.real(a1*b1*np.conj(rho1)) - xsi1*(abs(rho1)**2) - k1*(abs(b1)**2) - zeta1*(abs(a1)**2)    
        det2 = k2*xsi2*zeta2 + 2*np.real(a2*b2*np.conj(rho2)) - xsi2*(abs(rho2)**2) - k2*(abs(b2)**2) - zeta2*(abs(a2)**2)       
        det3 = k3*xsi3*zeta3 + 2*np.real(a3*b3*np.conj(rho3)) - xsi3*(abs(rho3)**2) - k3*(abs(b3)**2) - zeta3*(abs(a3)**2)       
        p = 3
        f = p**2
        cst = p*((n+m)*np.log(n+m)-n*np.log(n)-m*np.log(m)) 
        rho = 1. - (2.*p**2-1.)*(1./n + 1./m - 1./(n+m))/(6.*p)    
        omega2 = -(p*p/4.)*(1. - 1./rho)**2 + p**2*(p**2-1.)*(1./n**2 + 1./m**2 - 1./(n+m)**2)/(24.*rho**2)        
    elif bands == 4:
        print 'Dual polarimetry'  
#      C11 (k1)
        b = inDataset1.GetRasterBand(1)
        k1 = m*b.ReadAsArray(0,0,cols,rows)
#      C12  (a1)
        b = inDataset1.GetRasterBand(2)
        a1 = b.ReadAsArray(0,0,cols,rows)
        b = inDataset1.GetRasterBand(3)
        im = b.ReadAsArray(0,0,cols,rows)
        a1 = m*(a1 + 1j*im)        
#      C22 (xsi1)
        b = inDataset1.GetRasterBand(4)
        xsi1 = m*b.ReadAsArray(0,0,cols,rows)          
#      C11 (k2)
        b = inDataset2.GetRasterBand(1)
        k2 = n*b.ReadAsArray(0,0,cols,rows)
#      C12  (a2)
        b = inDataset2.GetRasterBand(2)
        a2 = b.ReadAsArray(0,0,cols,rows)
        b = inDataset2.GetRasterBand(3)
        im = b.ReadAsArray(0,0,cols,rows)
        a2 = n*(a2 + 1j*im)        
#      C22 (xsi2)
        b = inDataset2.GetRasterBand(4)
        xsi2 = n*b.ReadAsArray(0,0,cols,rows)        
        k3    = k1 + k2  
        a3    = a1 + a2
        xsi3  = xsi1 + xsi2       
        det1 = k1*xsi1 - abs(a1)**2
        det2 = k2*xsi2 - abs(a2)**2 
        det3 = k3*xsi3 - abs(a3)**2        
        p = 2 
        cst = p*((n+m)*np.log(n+m)-n*np.log(n)-m*np.log(m)) 
        f = p**2
        rho = 1-(2*f-1)*(1./n+1./m-1./(n+m))/(6.*p)
        omega2 = -f/4.*(1-1./rho)**2 + f*(f-1)*(1./n**2+1./m**2-1./(n+m)**2)/(24.*rho**2)  
    elif bands == 1:
        print 'Single polarimetry'         
#      C11 (k1)
        b = inDataset1.GetRasterBand(1)
        k1 = m*b.ReadAsArray(0,0,cols,rows) 
#      C11 (k2)
        b = inDataset2.GetRasterBand(1)
        k2 = n*b.ReadAsArray(0,0,cols,rows) 
        k3 = k1 + k2
        det1 = k1 
        det2 = k2
        det3 = k3    
        p = 1 
        cst = p*((n+m)*np.log(n+m)-n*np.log(n)-m*np.log(m)) 
        f = p**2
        rho = 1-(2.*f-1)*(1./n+1./m-1./(n+m))/(6.*p)
        omega2 = -f/4.*(1-1./rho)**2+f*(f-1)*(1./n**2+1./m**2-1./(n+m)**2)/(24.*rho**2)  
    else:   
        print 'Incorrect number of bands'
        return   
    idx = np.where(det1 <= 0.0)
    det1[idx] = 0.0001   
    idx = np.where(det2 <= 0.0)
    det2[idx] = 0.0001 
    idx = np.where(det3 <= 0.0)
    det3[idx] = 0.0001  
    lnQ = cst+m*np.log(det1)+n*np.log(det2)-(n+m)*np.log(det3)
#  test statistic    
    Z = -2*rho*lnQ
#  change probabilty
    P =  (1.-omega2)*stats.chi2.cdf(Z,[f])+omega2*stats.chi2.cdf(Z,[f+4])
    P =  ndimage.filters.median_filter(P, size = (3,3))
#  change map
    a255 = np.ones((rows,cols),dtype=np.byte)*255
    a0 = a255*0
    c11 = np.log(k1+0.0001) 
    min1 =np.min(c11)
    max1 = np.max(c11)
    c11 = (c11-min1)*255.0/(max1-min1)  
    c11 = np.where(c11<0,a0,c11)  
    c11 = np.where(c11>255,a255,c11) 
    c11 = np.where(P>(1.0-sig),a0,c11)      
    cmap = np.where(P>(1.0-sig),a255,c11)
#  write to file system        
    driver = gdal.GetDriverByName(fmt)    
    outDataset = driver.Create(outfile,cols,rows,2,GDT_Float32)
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    projection = inDataset1.GetProjection()        
    if projection is not None:
        outDataset.SetProjection(projection) 
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(Z,0,0) 
    outBand.FlushCache() 
    outBand = outDataset.GetRasterBand(2)
    outBand.WriteArray(P,0,0) 
    outBand.FlushCache()     
    outDataset = None
    print 'test statistic and probabilities written to: %s'%outfile 
    basename = os.path.basename(outfile)
    name, ext = os.path.splitext(basename)
    outfile=outfile.replace(name,name+'_cmap')
    outDataset = driver.Create(outfile,cols,rows,3,GDT_Byte)
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    projection = inDataset1.GetProjection()        
    if projection is not None:
        outDataset.SetProjection(projection)     
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(cmap,0,0) 
    outBand.FlushCache() 
    outBand = outDataset.GetRasterBand(2)
    outBand.WriteArray(c11,0,0) 
    outBand.FlushCache()  
    outBand = outDataset.GetRasterBand(3)
    outBand.WriteArray(c11,0,0) 
    outBand.FlushCache()  
    outDataset = None    
    print 'change map image written to: %s'%outfile   
    print 'elapsed time: '+str(time.time()-start)  
Ejemplo n.º 3
0
def main():
    print '================================'
    print 'Complex Wishart Change Detection'
    print '================================'
    print time.asctime()
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
#  first SAR image
    infile1 = auxil.select_infile(title='Choose first SAR image')
    if infile1:
        inDataset1 = gdal.Open(infile1, GA_ReadOnly)
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize
        bands = inDataset1.RasterCount
    else:
        return
    m = auxil.select_integer(5, msg='Number of looks')
    if not m:
        return
    print 'first filename:  %s' % infile1
    print 'number of looks: %i' % m
    #  second SAR image
    infile2 = auxil.select_infile(title='Choose second SAR image')
    if not infile2:
        return
    n = auxil.select_integer(5, msg='Number of looks')
    if not n:
        return
    print 'second filename:  %s' % infile2
    print 'number of looks: %i' % n
    #  output file
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return


#  significance level
    sig = auxil.select_float(0.01, 'Choose significance level')
    print 'Signifcane level: %f' % sig
    start = time.time()
    print 'co-registering...'
    registerSAR.registerSAR(infile1, infile2, 'warp.tif', 'GTiff')
    infile2 = 'warp.tif'
    inDataset2 = gdal.Open(infile2, GA_ReadOnly)
    cols2 = inDataset2.RasterXSize
    rows2 = inDataset2.RasterYSize
    bands2 = inDataset2.RasterCount
    if (bands != bands2) or (cols != cols2) or (rows != rows2):
        print 'Size mismatch'
        return
    if bands == 9:
        print 'Quad polarimetry'
        #      C11 (k1)
        b = inDataset1.GetRasterBand(1)
        k1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C12  (a1)
        b = inDataset1.GetRasterBand(2)
        a1 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset1.GetRasterBand(3)
        im = b.ReadAsArray(0, 0, cols, rows)
        a1 = m * (a1 + 1j * im)
        #      C13  (rho1)
        b = inDataset1.GetRasterBand(4)
        rho1 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset1.GetRasterBand(5)
        im = b.ReadAsArray(0, 0, cols, rows)
        rho1 = m * (rho1 + 1j * im)
        #      C22 (xsi1)
        b = inDataset1.GetRasterBand(6)
        xsi1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C23 (b1)
        b = inDataset1.GetRasterBand(7)
        b1 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset1.GetRasterBand(8)
        im = b.ReadAsArray(0, 0, cols, rows)
        b1 = m * (b1 + 1j * im)
        #      C33 (zeta1)
        b = inDataset1.GetRasterBand(9)
        zeta1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C11 (k2)
        b = inDataset2.GetRasterBand(1)
        k2 = n * b.ReadAsArray(0, 0, cols, rows)
        #      C12  (a2)
        b = inDataset2.GetRasterBand(2)
        a2 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset2.GetRasterBand(3)
        im = b.ReadAsArray(0, 0, cols, rows)
        a2 = n * (a2 + 1j * im)
        #      C13  (rho2)
        b = inDataset2.GetRasterBand(4)
        rho2 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset2.GetRasterBand(5)
        im = b.ReadAsArray(0, 0, cols, rows)
        rho2 = n * (rho2 + 1j * im)
        #      C22 (xsi2)
        b = inDataset2.GetRasterBand(6)
        xsi2 = n * b.ReadAsArray(0, 0, cols, rows)
        #      C23 (b2)
        b = inDataset2.GetRasterBand(7)
        b2 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset2.GetRasterBand(8)
        im = b.ReadAsArray(0, 0, cols, rows)
        b2 = n * (b2 + 1j * im)
        #      C33 (zeta2)
        b = inDataset2.GetRasterBand(9)
        zeta2 = n * b.ReadAsArray(0, 0, cols, rows)
        k3 = k1 + k2
        a3 = a1 + a2
        rho3 = rho1 + rho2
        xsi3 = xsi1 + xsi2
        b3 = b1 + b2
        zeta3 = zeta1 + zeta2
        det1 = k1 * xsi1 * zeta1 + 2 * np.real(
            a1 * b1 * np.conj(rho1)) - xsi1 * (abs(rho1)**2) - k1 * (
                abs(b1)**2) - zeta1 * (abs(a1)**2)
        det2 = k2 * xsi2 * zeta2 + 2 * np.real(
            a2 * b2 * np.conj(rho2)) - xsi2 * (abs(rho2)**2) - k2 * (
                abs(b2)**2) - zeta2 * (abs(a2)**2)
        det3 = k3 * xsi3 * zeta3 + 2 * np.real(
            a3 * b3 * np.conj(rho3)) - xsi3 * (abs(rho3)**2) - k3 * (
                abs(b3)**2) - zeta3 * (abs(a3)**2)
        p = 3
        f = p**2
        cst = p * ((n + m) * np.log(n + m) - n * np.log(n) - m * np.log(m))
        rho = 1. - (2. * p**2 - 1.) * (1. / n + 1. / m - 1. /
                                       (n + m)) / (6. * p)
        omega2 = -(p * p / 4.) * (1. - 1. / rho)**2 + p**2 * (p**2 - 1.) * (
            1. / n**2 + 1. / m**2 - 1. / (n + m)**2) / (24. * rho**2)
    elif bands == 4:
        print 'Dual polarimetry'
        #      C11 (k1)
        b = inDataset1.GetRasterBand(1)
        k1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C12  (a1)
        b = inDataset1.GetRasterBand(2)
        a1 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset1.GetRasterBand(3)
        im = b.ReadAsArray(0, 0, cols, rows)
        a1 = m * (a1 + 1j * im)
        #      C22 (xsi1)
        b = inDataset1.GetRasterBand(4)
        xsi1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C11 (k2)
        b = inDataset2.GetRasterBand(1)
        k2 = n * b.ReadAsArray(0, 0, cols, rows)
        #      C12  (a2)
        b = inDataset2.GetRasterBand(2)
        a2 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset2.GetRasterBand(3)
        im = b.ReadAsArray(0, 0, cols, rows)
        a2 = n * (a2 + 1j * im)
        #      C22 (xsi2)
        b = inDataset2.GetRasterBand(4)
        xsi2 = n * b.ReadAsArray(0, 0, cols, rows)
        k3 = k1 + k2
        a3 = a1 + a2
        xsi3 = xsi1 + xsi2
        det1 = k1 * xsi1 - abs(a1)**2
        det2 = k2 * xsi2 - abs(a2)**2
        det3 = k3 * xsi3 - abs(a3)**2
        p = 2
        cst = p * ((n + m) * np.log(n + m) - n * np.log(n) - m * np.log(m))
        f = p**2
        rho = 1 - (2 * f - 1) * (1. / n + 1. / m - 1. / (n + m)) / (6. * p)
        omega2 = -f / 4. * (1 - 1. / rho)**2 + f * (f - 1) * (
            1. / n**2 + 1. / m**2 - 1. / (n + m)**2) / (24. * rho**2)
    elif bands == 1:
        print 'Single polarimetry'
        #      C11 (k1)
        b = inDataset1.GetRasterBand(1)
        k1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C11 (k2)
        b = inDataset2.GetRasterBand(1)
        k2 = n * b.ReadAsArray(0, 0, cols, rows)
        k3 = k1 + k2
        det1 = k1
        det2 = k2
        det3 = k3
        p = 1
        cst = p * ((n + m) * np.log(n + m) - n * np.log(n) - m * np.log(m))
        f = p**2
        rho = 1 - (2. * f - 1) * (1. / n + 1. / m - 1. / (n + m)) / (6. * p)
        omega2 = -f / 4. * (1 - 1. / rho)**2 + f * (f - 1) * (
            1. / n**2 + 1. / m**2 - 1. / (n + m)**2) / (24. * rho**2)
    else:
        print 'Incorrect number of bands'
        return
    idx = np.where(det1 <= 0.0)
    det1[idx] = 0.0001
    idx = np.where(det2 <= 0.0)
    det2[idx] = 0.0001
    idx = np.where(det3 <= 0.0)
    det3[idx] = 0.0001
    lnQ = cst + m * np.log(det1) + n * np.log(det2) - (n + m) * np.log(det3)
    #  test statistic
    Z = -2 * rho * lnQ
    #  change probabilty
    P = (1. - omega2) * stats.chi2.cdf(Z, [f]) + omega2 * stats.chi2.cdf(
        Z, [f + 4])
    P = ndimage.filters.median_filter(P, size=(3, 3))
    #  change map
    a255 = np.ones((rows, cols), dtype=np.byte) * 255
    a0 = a255 * 0
    c11 = np.log(k1 + 0.0001)
    min1 = np.min(c11)
    max1 = np.max(c11)
    c11 = (c11 - min1) * 255.0 / (max1 - min1)
    c11 = np.where(c11 < 0, a0, c11)
    c11 = np.where(c11 > 255, a255, c11)
    c11 = np.where(P > (1.0 - sig), a0, c11)
    cmap = np.where(P > (1.0 - sig), a255, c11)
    #  write to file system
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols, rows, 2, GDT_Float32)
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    projection = inDataset1.GetProjection()
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(Z, 0, 0)
    outBand.FlushCache()
    outBand = outDataset.GetRasterBand(2)
    outBand.WriteArray(P, 0, 0)
    outBand.FlushCache()
    outDataset = None
    print 'test statistic and probabilities written to: %s' % outfile
    basename = os.path.basename(outfile)
    name, ext = os.path.splitext(basename)
    outfile = outfile.replace(name, name + '_cmap')
    outDataset = driver.Create(outfile, cols, rows, 3, GDT_Byte)
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    projection = inDataset1.GetProjection()
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(cmap, 0, 0)
    outBand.FlushCache()
    outBand = outDataset.GetRasterBand(2)
    outBand.WriteArray(c11, 0, 0)
    outBand.FlushCache()
    outBand = outDataset.GetRasterBand(3)
    outBand.WriteArray(c11, 0, 0)
    outBand.FlushCache()
    outDataset = None
    print 'change map image written to: %s' % outfile
    print 'elapsed time: ' + str(time.time() - start)
Ejemplo n.º 4
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
    infile = auxil.select_infile(title='Select an image')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        bands = inDataset.RasterCount
    else:

        return
    pos = auxil.select_pos(bands)
    if not pos:
        return
    bands = len(pos)
    dims = auxil.select_dims([0, 0, cols, rows])
    if dims:
        x0, y0, cols, rows = dims
    else:
        return
    class_image = np.zeros((rows, cols), dtype=np.byte)
    K = auxil.select_integer(6, 'Number of clusters')
    max_scale = auxil.select_integer(2, 'Maximum scaling factor')
    max_scale = min((max_scale, 3))
    min_scale = auxil.select_integer(0, 'Minimum scaling factor')
    min_scale = min((max_scale, min_scale))
    T0 = auxil.select_float(0.5, 'Initial annealing temperature')
    beta = auxil.select_float(0.5, 'Spatial mixing parameter')
    outfile, outfmt = auxil.select_outfilefmt(
        'Select output classification file')
    if not outfile:
        return
    probfile, probfmt = auxil.select_outfilefmt(
        'Select output probability file (optional)')
    print '========================='
    print '     EM clustering'
    print '========================='
    print 'infile:   %s' % infile
    print 'clusters: %i' % K
    print 'T0:       %f' % T0
    print 'beta:     %f' % beta

    start = time.time()
    #  read in image and compress
    DWTbands = []
    for b in pos:
        band = inDataset.GetRasterBand(b)
        DWTband = auxil.DWTArray(
            band.ReadAsArray(x0, y0, cols, rows).astype(float), cols, rows)
        for i in range(max_scale):
            DWTband.filter()
        DWTbands.append(DWTband)
    rows, cols = DWTbands[0].get_quadrant(0).shape
    G = np.transpose(
        np.array([
            DWTbands[i].get_quadrant(0, float=True).ravel()
            for i in range(bands)
        ]))
    #  initialize membership matrix
    n = G.shape[0]
    U = np.random.random((K, n))
    den = np.sum(U, axis=0)
    for j in range(K):
        U[j, :] = U[j, :] / den
#  cluster at minimum scale
    try:
        U, Ms, Cs, Ps, pdens = em(G, U, T0, beta, rows, cols)
    except:
        print 'em failed'
        return
#  sort clusters wrt partition density
    idx = np.argsort(pdens)
    idx = idx[::-1]
    U = U[idx, :]
    #  clustering at increasing scales
    for i in range(max_scale - min_scale):
        #      expand U and renormalize
        U = np.reshape(U, (K, rows, cols))
        rows = rows * 2
        cols = cols * 2
        U = ndi.zoom(U, (1, 2, 2))
        U = np.reshape(U, (K, rows * cols))
        idx = np.where(U < 0.0)
        U[idx] = 0.0
        den = np.sum(U, axis=0)
        for j in range(K):
            U[j, :] = U[j, :] / den
#      expand the image
        for i in range(bands):
            DWTbands[i].invert()
        G = np.transpose(
            np.array([
                DWTbands[i].get_quadrant(0, float=True).ravel()
                for i in range(bands)
            ]))
        #      cluster
        unfrozen = np.where(np.max(U, axis=0) < 0.90)
        try:
            U, Ms, Cs, Ps, pdens = em(G,
                                      U,
                                      0.0,
                                      beta,
                                      rows,
                                      cols,
                                      unfrozen=unfrozen)
        except:
            print 'em failed'
            return
    print 'Cluster mean vectors'
    print Ms
    print 'Cluster covariance matrices'
    for k in range(K):
        print 'cluster: %i' % k
        print Cs[k]
#  up-sample class memberships if necessary
    if min_scale > 0:
        U = np.reshape(U, (K, rows, cols))
        f = 2**min_scale
        rows = rows * f
        cols = cols * f
        U = ndi.zoom(U, (1, f, f))
        U = np.reshape(U, (K, rows * cols))
        idx = np.where(U < 0.0)
        U[idx] = 0.0
        den = np.sum(U, axis=0)
        for j in range(K):
            U[j, :] = U[j, :] / den


#  classify
    labels = np.byte(np.argmax(U, axis=0) + 1)
    class_image[0:rows, 0:cols] = np.reshape(labels, (rows, cols))
    rows1, cols1 = class_image.shape
    #  write to disk
    driver = gdal.GetDriverByName(outfmt)
    outDataset = driver.Create(outfile, cols1, rows1, 1, GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0 * gt[1]
        gt[3] = gt[3] + y0 * gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(class_image, 0, 0)
    outBand.FlushCache()
    outDataset = None
    #  write class membership probability file if desired
    if probfile:
        driver = gdal.GetDriverByName(probfmt)
        outDataset = driver.Create(probfile, cols, rows, K, GDT_Byte)
        if geotransform is not None:
            outDataset.SetGeoTransform(tuple(gt))
        if projection is not None:
            outDataset.SetProjection(projection)
        for k in range(K):
            probs = np.reshape(U[k, :], (rows, cols))
            probs = np.byte(probs * 255)
            outBand = outDataset.GetRasterBand(k + 1)
            outBand.WriteArray(probs, 0, 0)
            outBand.FlushCache()
        outDataset = None
        print 'class probabilities written to: %s' % probfile
    inDataset = None
    if (outfmt == 'ENVI') and (K < 19):
        #  try to make an ENVI classification header file
        hdr = header.Header()
        headerfile = outfile + '.hdr'
        f = open(headerfile)
        line = f.readline()
        envihdr = ''
        while line:
            envihdr += line
            line = f.readline()
        f.close()
        hdr.read(envihdr)
        hdr['file type'] = 'ENVI Classification'
        hdr['classes'] = str(K + 1)
        classlookup = '{0'
        for i in range(1, 3 * (K + 1)):
            classlookup += ', ' + str(str(ctable[i]))
        classlookup += '}'
        hdr['class lookup'] = classlookup
        hdr['class names'] = ['class %i' % i for i in range(K + 1)]
        f = open(headerfile, 'w')
        f.write(str(hdr))
        f.close()
    print 'classification written to: ' + outfile
    print 'elapsed time: ' + str(time.time() - start)
    print '--done------------------------'