Example #1
0
def main():

    #   input directory
    in_path = auxil.select_directory(title="Choosing the Image file directory")
    shp = auxil.select_infile()

    #   imagery dataset
    lista = os.listdir(in_path)
    #print in_path
    GQ = []
    data_list = []
    imageList = []
    outputName = ""
    i = 0
    for k in range(len(lista)):
        GQ.append(str(lista[k]))


#

    for k in GQ:
        #print k[-4:]
        try:
            if str(k[-4:]) == ".tif" or str(k[-4:]) == ".TIF":
                data_list.append(k)
        except StandardError, e:
            print "Something is going wrong!"
Example #2
0
def main():

#   input directory    
    in_path = auxil.select_directory(title="Choosing the Image file directory")
    shp = auxil.select_infile()

#   imagery dataset
    lista = os.listdir(in_path)
    #print in_path
    GQ = []
    data_list=[]
    imageList = []
    outputName = ""
    i = 0
    for k in range(len(lista)):
        GQ.append(str(lista[k]))
#

    for k in GQ:
        #print k[-4:]
        try:
            if str(k[-4:]) == ".tif" or str(k[-4:]) == ".TIF":
                data_list.append(k)
        except StandardError, e:
            print "Something is going wrong!"
Example #3
0
def main():

#   input directory    
    in_path = auxil.select_directory()

#   imagery dataset
    lista = os.listdir(in_path)
    GQ = []
    i = 0
    for k in range(len(lista)):
        GQ.append(in_path+"/"+lista[k])
    for k in GQ:
        if k[-45:-38] == "MOD09GQ":
            calculate_ndvi(k)
Example #4
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Input directory')
    if path:
        os.chdir(path)


#  input image, convert to ENVI format
    infile = auxil.select_infile(title='Image file')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        projection = inDataset.GetProjection()
        geotransform = inDataset.GetGeoTransform()
        driver = gdal.GetDriverByName('ENVI')
        enviDataset=driver\
           .CreateCopy('entmp',inDataset)
        inDataset = None
        enviDataset = None
    else:
        return
    outfile, outfmt= \
           auxil.select_outfilefmt(title='Output file')
    #  RX-algorithm
    img = envi.open('entmp.hdr')
    arr = img.load()
    rx = RX(background=calc_stats(arr))
    res = rx(arr)
    #  output
    driver = gdal.GetDriverByName(outfmt)
    outDataset = driver.Create(outfile,cols,rows,1,\
                                    GDT_Float32)
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(np.asarray(res, np.float32), 0, 0)
    outBand.FlushCache()
    outDataset = None
    print 'Result written to %s' % outfile
Example #5
0
def main():
    print '========================='
    print '     Register SAR'
    print '========================='
    print time.asctime()
    path = auxil.select_directory('Working directory')
    if path:
        os.chdir(path)
    file0 = auxil.select_infile(title='Base image')
    if not file0:
        return
    file1 = auxil.select_infile(title='Warp image')
    if not file1:
        return
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
    if registerSAR(file0, file1, outfile, fmt):
        print 'done'
    else:
        print 'registerSAR failed'
Example #6
0
def main(): 
    print '========================='
    print '     Register SAR'
    print '========================='
    print time.asctime()  
    path = auxil.select_directory('Working directory')
    if path:
        os.chdir(path)        
    file0=auxil.select_infile(title='Base image') 
    if not file0:                   
        return  
    file1=auxil.select_infile(title='Warp image') 
    if not file1:                  
        return       
    outfile,fmt = auxil.select_outfilefmt() 
    if not outfile:
        return  
    if registerSAR(file0,file1,outfile,fmt):
        print 'done' 
    else:
        print 'registerSAR failed'        
Example #7
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory("Input directory")
    if path:
        os.chdir(path)
    #  input image, convert to ENVI format
    infile = auxil.select_infile(title="Image file")
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        projection = inDataset.GetProjection()
        geotransform = inDataset.GetGeoTransform()
        driver = gdal.GetDriverByName("ENVI")
        enviDataset = driver.CreateCopy("entmp", inDataset)
        inDataset = None
        enviDataset = None
    else:
        return
    outfile, outfmt = auxil.select_outfilefmt(title="Output file")
    #  RX-algorithm
    img = envi.open("entmp.hdr")
    arr = img.load()
    rx = RX(background=calc_stats(arr))
    res = rx(arr)
    #  output
    driver = gdal.GetDriverByName(outfmt)
    outDataset = driver.Create(outfile, cols, rows, 1, GDT_Float32)
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(np.asarray(res, np.float32), 0, 0)
    outBand.FlushCache()
    outDataset = None
    print "Result written to %s" % outfile
Example #8
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)        
#  get (spatial subset of) the C11 or C33 file first    
    file1 = auxil.select_infile(title='Choose one componenst (C11, C22 or C33) ') 
    if file1:                   
        inDataset1 = gdal.Open(file1,GA_ReadOnly)     
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize    
        bands = inDataset1.RasterCount
    else:
        return
    inDataset = None
#  spatial subset    
    x0,y0,rows,cols=auxil.select_dims([0,0,rows,cols])    
#  output file
    outfile,fmt = auxil.select_outfilefmt() 
    if not outfile:
        return    
#  output image
    outim = np.zeros((9,rows,cols), dtype=np.float32)    
#  get list of all files
    files = os.listdir(path) 
    for afile in files:
        if re.search('hdr|sml',afile):
            continue       
#      single polarimetry  
        if re.search('pwr_geo',afile): 
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[0,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None
#      dual and quad polarimetry                
        elif re.search('hh_hh_geo|C11\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[0,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None 
        elif re.search('re_hh_hv_geo|C12_real\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[1,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None     
        elif re.search('im_hh_hv_geo|C12_imag\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[2,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None      
        elif re.search('re_hh_vv_geo|C13_real\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[3,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None     
        elif re.search('im_hh_vv_geo|C13_imag\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[4,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None       
        elif re.search('hv_hv_geo|C22\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[5,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None     
        elif re.search('re_hv_vv_geo|C23_real\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[6,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None     
        elif re.search('im_hv_vv_geo|C23_imag\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[7,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None      
        elif re.search('vv_vv_geo|C33\.tif',afile):
            inDataset = gdal.Open(afile,GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[8,:,:] = band.ReadAsArray(x0,y0,cols,rows)   
            inDataset = None  
    outim = np.nan_to_num(outim)           
    idx = np.where(np.sum(np.abs(outim),axis=(1,2))>0)[0]
    if idx == []:
        print 'no polarimetric bands found'    
        return
    bands = len(idx)
    driver = gdal.GetDriverByName(fmt)   
    outDataset = driver.Create(outfile,cols,rows,bands,GDT_Float32)
    projection = inDataset1.GetProjection()
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0*gt[1]
        gt[3] = gt[3] + y0*gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)        
    for k in range(bands):        
        outBand = outDataset.GetRasterBand(k+1)
        outBand.WriteArray(outim[idx[k],:,:],0,0) 
        outBand.FlushCache() 
    outDataset = None            
    print '%i-band polarimetric image written to: %s'%(bands,outfile)        
Example #9
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)        
#  SAR image    
    infile = auxil.select_infile(title='Choose SAR image') 
    if infile:                   
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        return
#  spatial subset    
    x0,y0,rows,cols=auxil.select_dims([0,0,rows,cols])    
#  output file
    outfile,fmt = auxil.select_outfilefmt() 
    if not outfile:
        return       
    print '========================='
    print '     ENL Estimation'
    print '========================='
    print time.asctime()
    print 'infile:  %s'%infile   
    start = time.time()
    if bands == 9:
        print 'Quad polarimetry'  
#      C11 (k)
        band = inDataset.GetRasterBand(1)
        k = np.nan_to_num(band.ReadAsArray(x0,y0,cols,rows)).ravel()
#      C12  (a)
        band = inDataset.GetRasterBand(2)
        a = np.nan_to_num(band.ReadAsArray(x0,y0,cols,rows)).ravel()
        band = inDataset.GetRasterBand(3)    
        im = np.nan_to_num(band.ReadAsArray(x0,y0,cols,rows)).ravel()
        a = a + 1j*im
#      C13  (rho)
        band = inDataset.GetRasterBand(4)
        rho = np.nan_to_num(band.ReadAsArray(x0,y0,cols,rows)).ravel()
        band = inDataset.GetRasterBand(5)
        im = np.nan_to_num(band.ReadAsArray(x0,y0,cols,rows)).ravel()
        rho = rho + 1j*im     
#      C22 (xsi)
        band = inDataset.GetRasterBand(6)
        xsi = np.nan_to_num(band.ReadAsArray(x0,y0,cols,rows)).ravel()    
#      C23 (b)        
        band = inDataset.GetRasterBand(7)
        b = np.nan_to_num(band.ReadAsArray(x0,y0,cols,rows)).ravel()
        band = inDataset.GetRasterBand(8)
        im = np.nan_to_num(band.ReadAsArray(x0,y0,cols,rows)).ravel()
        b = b + 1j*im     
#      C33 (zeta)
        band = inDataset.GetRasterBand(9)
        zeta = np.nan_to_num(band.ReadAsArray(x0,y0,cols,rows)).ravel()                
        det = k*xsi*zeta + 2*np.real(a*b*np.conj(rho)) - xsi*(abs(rho)**2) - k*(abs(b)**2) - zeta*(abs(a)**2)
        d = 2
    elif bands == 4:
        print 'Dual polarimetry'  
#      C11 (k)
        band = inDataset.GetRasterBand(1)
        k = np.nan_to_num(band.ReadAsArray(x0,y0,cols,rows)).ravel()
#      C12  (a)
        band = inDataset.GetRasterBand(2)
        a = np.nan_to_num(band.ReadAsArray(x0,y0,cols,rows)).ravel()
        band = inDataset.GetRasterBand(3)
        im = np.nan_to_num(band.ReadAsArray(x0,y0,cols,rows)).ravel()
        a = a + 1j*im       
#      C22 (xsi)
        band = inDataset.GetRasterBand(4)
        xsi = np.nan_to_num(band.ReadAsArray(x0,y0,cols,rows)).ravel() 
        det = k*xsi - abs(a)**2   
        d = 1   
    elif bands == 1:
        print 'Single polarimetry'         
#      C11 (k)
        band = inDataset.GetRasterBand(1)
        k = band.ReadAsArray(x0,y0,cols,rows).ravel() 
        det = k
        d = 0      
    enl_ml = np.zeros((rows,cols), dtype= np.float32)
    lu = lookup.table()
    print 'filtering...'
    print 'row: ',
    sys.stdout.flush()    
    start = time.time()
    for i in range(3,rows-3):
        if i%50 == 0:
            print '%i '%i, 
            sys.stdout.flush()
        windex = get_windex(i,cols)  
        for j in range(3,cols-3):  
            detC = det[windex]
            if np.min(detC) > 0.0:
                avlogdetC = np.sum(np.log(detC))/49
                if bands == 9:
                    k1 = np.sum(k[windex])/49
                    a1 = np.sum(a[windex])/49
                    rho1 = np.sum(rho[windex])/49
                    xsi1 = np.sum(xsi[windex])/49
                    b1 = np.sum(b[windex])/49
                    zeta1 = np.sum(zeta[windex])/49
                    detavC = k1*xsi1*zeta1 + 2*np.real(a1*b1*np.conj(rho1)) - xsi1*(np.abs(rho1)**2) - k1*(np.abs(b1)**2) - zeta1*(np.abs(a1)**2)
                elif bands == 4:
                    k1 = np.sum(k[windex])/49
                    xsi1 = np.sum(xsi[windex])/49
                    a1 = np.sum(a[windex])/49   
                    detavC =  k1*xsi1 - np.abs(a1)**2
                else:
                    detavC = np.sum(k[windex])/49
                logdetavC = np.log(detavC)    
                arr =  avlogdetC - logdetavC + lu[:,d]    
                ell = np.where(arr*np.roll(arr,1)<0)[0]
                if ell != []:
                    enl_ml[i,j] = float(ell[-1])/10.0
            windex += 1
    driver = gdal.GetDriverByName(fmt)   
    outDataset = driver.Create(outfile,cols,rows,1,GDT_Float32)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0*gt[1]
        gt[3] = gt[3] + y0*gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)          
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(enl_ml,0,0) 
    outBand.FlushCache() 
    outDataset = None   
    ya,xa = np.histogram(enl_ml,bins=50)
    ya[0] = 0    
    plt.plot(xa[0:-1],ya)
    plt.show() 
    print ''        
    print 'ENL image written to: %s'%outfile                  
    print 'elapsed time: '+str(time.time()-start)                    
Example #10
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)      
#  reference image    
    file1 = auxil.select_infile(title='Choose reference image') 
    if file1:                  
        inDataset1 = gdal.Open(file1,GA_ReadOnly)     
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize    
        bands = inDataset1.RasterCount
    else:
        return
    pos1 =  auxil.select_pos(bands) 
    if not pos1:
        return   
    dims = auxil.select_dims([0,0,cols,rows])
    if dims:
        x10,y10,cols1,rows1 = dims
    else:
        return 
#  target image     
    file2 = auxil.select_infile(title='Choose target image') 
    if file2:                  
        inDataset2 = gdal.Open(file2,GA_ReadOnly)     
        cols = inDataset2.RasterXSize
        rows = inDataset2.RasterYSize    
        bands = inDataset2.RasterCount
    else:
        return   
    pos2 =  auxil.select_pos(bands)   
    if not pos2:
        return 
    dims=auxil.select_dims([0,0,cols,rows])  
    if dims:
        x20,y20,cols2,rows2 = dims
    else:
        return  
#  match dimensions       
    bands = len(pos2)
    if (rows1 != rows2) or (cols1 != cols2) or (len(pos1) != bands):
        sys.stderr.write("Size mismatch")
        sys.exit(1)             
#  iMAD image     
    file3 = auxil.select_infile(title='Choose iMAD image') 
    if file3:                  
        inDataset3 = gdal.Open(file3,GA_ReadOnly)     
        cols = inDataset3.RasterXSize
        rows = inDataset3.RasterYSize    
        imadbands = inDataset3.RasterCount
    else:
        return   
    dims=auxil.select_dims([0,0,cols,rows])  
    if dims:
        x30,y30,cols,rows = dims
    else:
        return     
    if (rows1 != rows) or (cols1 != cols):
        sys.stderr.write("Size mismatch")
        sys.exit(1)    
#  outfile
    outfile, fmt = auxil.select_outfilefmt()   
    if not outfile:
        return    
#  full scene
    fsfile = auxil.select_infile(title='Choose full target scene if desired')               
#  no-change threshold    
    ncpThresh = auxil.select_ncp(0.95)    
    if ncpThresh is None:
        return                 
    chisqr = inDataset3.GetRasterBand(imadbands).ReadAsArray(x30,y30,cols,rows).ravel()
    ncp = 1 - stats.chi2.cdf(chisqr,[imadbands-1])
    idx = np.where(ncp>ncpThresh)[0]
#  split train/test in ratio 2:1 
    tmp = np.asarray(range(len(idx)))
    tst = idx[np.where(np.mod(tmp,3) == 0)]
    trn = idx[np.where(np.mod(tmp,3) > 0)]
    
    print '========================================='
    print '             RADCAL'
    print '========================================='
    print time.asctime()     
    print 'reference: '+file1
    print 'target   : '+file2
    print 'no-change probability threshold: '+str(ncpThresh)
    print 'no-change pixels (train): '+str(len(trn))
    print 'no-change pixels (test): '+str(len(tst))           
    driver = gdal.GetDriverByName(fmt)    
    outDataset = driver.Create(outfile,cols,rows,bands,GDT_Float32) 
    projection = inDataset1.GetProjection()
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x10*gt[1]
        gt[3] = gt[3] + y10*gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)      
    aa = []
    bb = []  
    i = 1
    for k in pos1:
        x = inDataset1.GetRasterBand(k).ReadAsArray(x10,y10,cols,rows).astype(float).ravel()
        y = inDataset2.GetRasterBand(k).ReadAsArray(x20,y20,cols,rows).astype(float).ravel() 
        b,a,R = auxil.orthoregress(y[trn],x[trn])
        print '--------------------'
        print 'spectral band:      ', k
        print 'slope:              ', b
        print 'intercept:          ', a
        print 'correlation:        ', R
        print 'means(tgt,ref,nrm): ', np.mean(y[tst]),np.mean(x[tst]),np.mean(a+b*y[tst])
        print 't-test, p-value:    ', stats.ttest_rel(x[tst], a+b*y[tst])
        print 'vars(tgt,ref,nrm)   ', np.var(y[tst]),np.var(x[tst]),np.var(a+b*y[tst])
        print 'F-test, p-value:    ', auxil.fv_test(x[tst], a+b*y[tst])
        aa.append(a)
        bb.append(b)   
        outBand = outDataset.GetRasterBand(i)
        outBand.WriteArray(np.resize(a+b*y,(rows,cols)),0,0) 
        outBand.FlushCache()
        if i <= 10:
            plt.figure(i)    
            ymax = max(y[idx]) 
            xmax = max(x[idx])      
            plt.plot(y[idx],x[idx],'k.',[0,ymax],[a,a+b*ymax],'k-')
            plt.axis([0,ymax,0,xmax])
            plt.title('Band '+str(k))
            plt.xlabel('Target')
            plt.ylabel('Reference')        
        i += 1
    outDataset = None
    print 'result written to: '+outfile        
    if fsfile is not None:
        path = os.path.dirname(fsfile)
        basename = os.path.basename(fsfile)
        root, ext = os.path.splitext(basename)
        fsoutfile = path+'/'+root+'_norm'+ext        
        print 'normalizing '+fsfile+'...' 
        fsDataset = gdal.Open(fsfile,GA_ReadOnly)
        cols = fsDataset.RasterXSize
        rows = fsDataset.RasterYSize    
        driver = fsDataset.GetDriver()
        outDataset = driver.Create(fsoutfile,cols,rows,bands,GDT_Float32)
        projection = fsDataset.GetProjection()
        geotransform = fsDataset.GetGeoTransform()
        if geotransform is not None:
            outDataset.SetGeoTransform(geotransform)
        if projection is not None:
            outDataset.SetProjection(projection) 
        j = 0
        for k in pos2:
            inBand = fsDataset.GetRasterBand(k)
            outBand = outDataset.GetRasterBand(j+1)
            for i in range(rows):
                y = inBand.ReadAsArray(0,i,cols,1)
                outBand.WriteArray(aa[j]+bb[j]*y,0,i) 
            outBand.FlushCache() 
            j += 1      
        outDataset = None    
        print 'result written to: '+fsoutfile
    plt.show()
    print '-------done-----------------------------'
Example #11
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Input directory')
    if path:
        os.chdir(path)
#  input image
    infile = auxil.select_infile(title='Image file')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        bands = inDataset.RasterCount
        projection = inDataset.GetProjection()
        geotransform = inDataset.GetGeoTransform()
        if geotransform is not None:
            gt = list(geotransform)
        else:
            print 'No geotransform available'
            return
        imsr = osr.SpatialReference()
        imsr.ImportFromWkt(projection)
    else:
        return
    pos = auxil.select_pos(bands)
    if not pos:
        return
    N = len(pos)
    rasterBands = []
    for b in pos:
        rasterBands.append(inDataset.GetRasterBand(b))
#  training algorithm
    trainalg = auxil.select_integer(1,
                                    msg='1:Maxlike,2:Backprop,3:Congrad,4:SVM')
    if not trainalg:
        return
#  training data (shapefile)
    trnfile = auxil.select_infile(filt='.shp', title='Train shapefile')
    if trnfile:
        trnDriver = ogr.GetDriverByName('ESRI Shapefile')
        trnDatasource = trnDriver.Open(trnfile, 0)
        trnLayer = trnDatasource.GetLayer()
        trnsr = trnLayer.GetSpatialRef()
    else:
        return
    tstfile = auxil.select_outfile(filt='.tst', title='Test results file')
    if not tstfile:
        print 'No test output'
#  outfile
    outfile, outfmt = auxil.select_outfilefmt(title='Classification file')
    if not outfile:
        return
    if trainalg in (2, 3, 4):
        #      class probabilities file, hidden neurons
        probfile, probfmt = auxil.select_outfilefmt(title='Probabilities file')
    else:
        probfile = None
    if trainalg in (2, 3):
        L = auxil.select_integer(8, 'Number of hidden neurons')
        if not L:
            return
#  coordinate transformation from training to image projection
    ct = osr.CoordinateTransformation(trnsr, imsr)
    #  number of classes
    K = 1
    feature = trnLayer.GetNextFeature()
    while feature:
        classid = feature.GetField('CLASS_ID')
        if int(classid) > K:
            K = int(classid)
        feature = trnLayer.GetNextFeature()
    trnLayer.ResetReading()
    K += 1
    print '========================='
    print 'supervised classification'
    print '========================='
    print time.asctime()
    print 'image:    ' + infile
    print 'training: ' + trnfile
    if trainalg == 1:
        print 'Maximum Likelihood'
    elif trainalg == 2:
        print 'Neural Net (Backprop)'
    elif trainalg == 3:
        print 'Neural Net (Congrad)'
    else:
        print 'Support Vector Machine'
#  loop through the polygons
    Gs = []  # train observations
    ls = []  # class labels
    classnames = '{unclassified'
    classids = set()
    print 'reading training data...'
    for i in range(trnLayer.GetFeatureCount()):
        feature = trnLayer.GetFeature(i)
        classid = str(feature.GetField('CLASS_ID'))
        classname = feature.GetField('CLASS_NAME')
        if classid not in classids:
            classnames += ',   ' + classname
        classids = classids | set(classid)
        l = [0 for i in range(K)]
        l[int(classid)] = 1.0
        polygon = feature.GetGeometryRef()
        #      transform to same projection as image
        polygon.Transform(ct)
        #      convert to a Shapely object
        poly = shapely.wkt.loads(polygon.ExportToWkt())
        #      transform the boundary to pixel coords in numpy
        bdry = np.array(poly.boundary)
        bdry[:, 0] = bdry[:, 0] - gt[0]
        bdry[:, 1] = bdry[:, 1] - gt[3]
        GT = np.mat([[gt[1], gt[2]], [gt[4], gt[5]]])
        bdry = bdry * np.linalg.inv(GT)
        #      polygon in pixel coords
        polygon1 = asPolygon(bdry)
        #      raster over the bounding rectangle
        minx, miny, maxx, maxy = map(int, list(polygon1.bounds))
        pts = []
        for i in range(minx, maxx + 1):
            for j in range(miny, maxy + 1):
                pts.append((i, j))
        multipt = MultiPoint(pts)
        #      intersection as list
        intersection = np.array(multipt.intersection(polygon1),
                                dtype=np.int).tolist()
        #      cut out the bounded image cube
        cube = np.zeros((maxy - miny + 1, maxx - minx + 1, len(rasterBands)))
        k = 0
        for band in rasterBands:
            cube[:, :, k] = band.ReadAsArray(minx, miny, maxx - minx + 1,
                                             maxy - miny + 1)
            k += 1
#      get the training vectors
        for (x, y) in intersection:
            Gs.append(cube[y - miny, x - minx, :])
            ls.append(l)
        polygon = None
        polygon1 = None
        feature.Destroy()
    trnDatasource.Destroy()
    classnames += '}'
    m = len(ls)
    print str(m) + ' training pixel vectors were read in'
    Gs = np.array(Gs)
    ls = np.array(ls)
    #  stretch the pixel vectors to [-1,1] for ffn
    maxx = np.max(Gs, 0)
    minx = np.min(Gs, 0)
    for j in range(N):
        Gs[:, j] = 2 * (Gs[:, j] - minx[j]) / (maxx[j] - minx[j]) - 1.0
#  random permutation of training data
    idx = np.random.permutation(m)
    Gs = Gs[idx, :]
    ls = ls[idx, :]
    #  setup output datasets
    driver = gdal.GetDriverByName(outfmt)
    outDataset = driver.Create(outfile, cols, rows, 1, GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    if probfile:
        driver = gdal.GetDriverByName(probfmt)
        probDataset = driver.Create(probfile, cols, rows, K, GDT_Byte)
        if geotransform is not None:
            probDataset.SetGeoTransform(tuple(gt))
        if projection is not None:
            probDataset.SetProjection(projection)
        probBands = []
        for k in range(K):
            probBands.append(probDataset.GetRasterBand(k + 1))
    if tstfile:
        #  train on 2/3 training examples
        Gstrn = Gs[0:2 * m // 3, :]
        lstrn = ls[0:2 * m // 3, :]
        Gstst = Gs[2 * m // 3:, :]
        lstst = ls[2 * m // 3:, :]
    else:
        Gstrn = Gs
        lstrn = ls
    if trainalg == 1:
        classifier = sc.Maxlike(Gstrn, lstrn)
    elif trainalg == 2:
        classifier = sc.Ffnbp(Gstrn, lstrn, L)
    elif trainalg == 3:
        classifier = sc.Ffncg(Gstrn, lstrn, L)
    elif trainalg == 4:
        classifier = sc.Svm(Gstrn, lstrn)

    print 'training on %i pixel vectors...' % np.shape(Gstrn)[0]
    start = time.time()
    result = classifier.train()
    print 'elapsed time %s' % str(time.time() - start)
    if result:
        if trainalg in [2, 3]:
            cost = np.log10(result)
            ymax = np.max(cost)
            ymin = np.min(cost)
            xmax = len(cost)
            plt.plot(range(xmax), cost, 'k')
            plt.axis([0, xmax, ymin - 1, ymax])
            plt.title('Log(Cross entropy)')
            plt.xlabel('Epoch')


#      classify the image
        print 'classifying...'
        start = time.time()
        tile = np.zeros((cols, N))
        for row in range(rows):
            for j in range(N):
                tile[:, j] = rasterBands[j].ReadAsArray(0, row, cols, 1)
                tile[:, j] = 2 * (tile[:, j] - minx[j]) / (maxx[j] -
                                                           minx[j]) - 1.0
            cls, Ms = classifier.classify(tile)
            outBand.WriteArray(np.reshape(cls, (1, cols)), 0, row)
            if probfile:
                Ms = np.byte(Ms * 255)
                for k in range(K):
                    probBands[k].WriteArray(np.reshape(Ms[k, :], (1, cols)), 0,
                                            row)
        outBand.FlushCache()
        print 'elapsed time %s' % str(time.time() - start)
        outDataset = None
        inDataset = None
        if probfile:
            for probBand in probBands:
                probBand.FlushCache()
            probDataset = None
            print 'class probabilities written to: %s' % probfile
        K = lstrn.shape[1] + 1
        if (outfmt == 'ENVI') and (K < 19):
            #          try to make an ENVI classification header file
            hdr = header.Header()
            headerfile = outfile + '.hdr'
            f = open(headerfile)
            line = f.readline()
            envihdr = ''
            while line:
                envihdr += line
                line = f.readline()
            f.close()
            hdr.read(envihdr)
            hdr['file type'] = 'ENVI Classification'
            hdr['classes'] = str(K)
            classlookup = '{0'
            for i in range(1, 3 * K):
                classlookup += ', ' + str(str(ctable[i]))
            classlookup += '}'
            hdr['class lookup'] = classlookup
            hdr['class names'] = classnames
            f = open(headerfile, 'w')
            f.write(str(hdr))
            f.close()
        print 'thematic map written to: %s' % outfile
        if trainalg in [2, 3]:
            print 'please close the cross entropy plot to continue'
            plt.show()
        if tstfile:
            with open(tstfile, 'w') as f:
                print >> f, 'FFN test results for %s' % infile
                print >> f, time.asctime()
                print >> f, 'Classification image: %s' % outfile
                print >> f, 'Class probabilities image: %s' % probfile
                print >> f, lstst.shape[0], lstst.shape[1]
                classes, _ = classifier.classify(Gstst)
                labels = np.argmax(lstst, axis=1) + 1
                for i in range(len(classes)):
                    print >> f, classes[i], labels[i]
                f.close()
                print 'test results written to: %s' % tstfile
        print 'done'
    else:
        print 'an error occured'
        return
Example #12
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
#  MS image
    file1 = auxil.select_infile(title='Choose MS image')
    if file1:
        inDataset1 = gdal.Open(file1, GA_ReadOnly)
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize
        bands = inDataset1.RasterCount
    else:
        return
    pos1 = auxil.select_pos(bands)
    if not pos1:
        return
    num_bands = len(pos1)
    dims = auxil.select_dims([0, 0, cols, rows])
    if dims:
        x10, y10, cols1, rows1 = dims
    else:
        return
#  PAN image
    file2 = auxil.select_infile(title='Choose PAN image')
    if file2:
        inDataset2 = gdal.Open(file2, GA_ReadOnly)
        bands = inDataset2.RasterCount
    else:
        return
    if bands > 1:
        print 'Must be a single band (panchromatic) image'
        return
    geotransform1 = inDataset1.GetGeoTransform()
    geotransform2 = inDataset2.GetGeoTransform()
    #  outfile
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
#  resolution ratio
    ratio = auxil.select_integer(4, 'Resolution ratio (2 or 4)')
    if not ratio:
        return
#  MS registration band
    k1 = auxil.select_integer(1, 'MS band for registration')
    if not k1:
        return
    print '========================='
    print '   ATWT Pansharpening'
    print '========================='
    print time.asctime()
    print 'MS  file: ' + file1
    print 'PAN file: ' + file2
    #  read in MS image
    band = inDataset1.GetRasterBand(1)
    tmp = band.ReadAsArray(0, 0, 1, 1)
    dt = tmp.dtype
    MS = np.asarray(np.zeros((num_bands, rows1, cols1)), dtype=dt)
    k = 0
    for b in pos1:
        band = inDataset1.GetRasterBand(b)
        MS[k, :, :] = band.ReadAsArray(x10, y10, cols1, rows1)
        k += 1
#  if integer assume 11-bit quantization, otherwise must be byte
    if MS.dtype == np.int16:
        fact = 8.0
        MS = auxil.byteStretch(MS, (0, 2**11))
    else:
        fact = 1.0
#  read in corresponding spatial subset of PAN image
    if (geotransform1 is None) or (geotransform2 is None):
        print 'Image not georeferenced, aborting'
        return
#  upper left corner pixel in PAN
    gt1 = list(geotransform1)
    gt2 = list(geotransform2)
    ulx1 = gt1[0] + x10 * gt1[1]
    uly1 = gt1[3] + y10 * gt1[5]
    x20 = int(round(((ulx1 - gt2[0]) / gt2[1])))
    y20 = int(round(((uly1 - gt2[3]) / gt2[5])))
    cols2 = cols1 * ratio
    rows2 = rows1 * ratio
    band = inDataset2.GetRasterBand(1)
    PAN = band.ReadAsArray(x20, y20, cols2, rows2)
    #  if integer assume 11-bit quantization, otherwise must be byte
    if PAN.dtype == np.int16:
        PAN = auxil.byteStretch(PAN, (0, 2**11))
#  out array
    sharpened = np.zeros((num_bands, rows2, cols2), dtype=np.float32)
    #  compress PAN to resolution of MS image using DWT
    panDWT = auxil.DWTArray(PAN, cols2, rows2)
    r = ratio
    while r > 1:
        panDWT.filter()
        r /= 2
    bn0 = panDWT.get_quadrant(0)
    #  register (and subset) MS image to compressed PAN image using selected MSband
    lines0, samples0 = bn0.shape
    bn1 = MS[k1 - 1, :, :]
    #  register (and subset) MS image to compressed PAN image
    (scale, angle, shift) = auxil.similarity(bn0, bn1)
    tmp = np.zeros((num_bands, lines0, samples0))
    for k in range(num_bands):
        bn1 = MS[k, :, :]
        bn2 = ndii.zoom(bn1, 1.0 / scale)
        bn2 = ndii.rotate(bn2, angle)
        bn2 = ndii.shift(bn2, shift)
        tmp[k, :, :] = bn2[0:lines0, 0:samples0]
    MS = tmp
    smpl = np.random.randint(cols2 * rows2, size=100000)
    print 'Wavelet correlations:'
    #  loop over MS bands
    for k in range(num_bands):
        msATWT = auxil.ATWTArray(PAN)
        r = ratio
        while r > 1:
            msATWT.filter()
            r /= 2


#      sample PAN wavelet details
        X = msATWT.get_band(msATWT.num_iter)
        X = X.ravel()[smpl]
        #      resize the ms band to scale of the pan image
        ms_band = ndii.zoom(MS[k, :, :], ratio)
        #      sample details of MS band
        tmpATWT = auxil.ATWTArray(ms_band)
        r = ratio
        while r > 1:
            tmpATWT.filter()
            r /= 2
        Y = tmpATWT.get_band(msATWT.num_iter)
        Y = Y.ravel()[smpl]
        #      get band for injection
        bnd = tmpATWT.get_band(0)
        tmpATWT = None
        aa, bb, R = auxil.orthoregress(X, Y)
        print 'Band ' + str(k + 1) + ': %8.3f' % R
        #      inject the filtered MS band
        msATWT.inject(bnd)
        #      normalize wavelet components and expand
        msATWT.normalize(aa, bb)
        r = ratio
        while r > 1:
            msATWT.invert()
            r /= 2
        sharpened[k, :, :] = msATWT.get_band(0)
    sharpened *= fact  # rescale dynamic range
    msATWT = None
    #  write to disk

    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols2, rows2, num_bands, GDT_Float32)
    gt1[0] += x10 * ratio
    gt1[3] -= y10 * ratio
    gt1[1] = gt2[1]
    gt1[2] = gt2[2]
    gt1[4] = gt2[4]
    gt1[5] = gt2[5]
    outDataset.SetGeoTransform(tuple(gt1))
    projection1 = inDataset1.GetProjection()
    if projection1 is not None:
        outDataset.SetProjection(projection1)
    for k in range(num_bands):
        outBand = outDataset.GetRasterBand(k + 1)
        outBand.WriteArray(sharpened[k, :, :], 0, 0)
        outBand.FlushCache()
    outDataset = None
    print 'Result written to %s' % outfile
    inDataset1 = None
    inDataset2 = None
Example #13
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)        
#  MS image    
    file1 = auxil.select_infile(title='Choose MS image') 
    if file1:                   
        inDataset1 = gdal.Open(file1,GA_ReadOnly)     
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize    
        bands = inDataset1.RasterCount
    else:
        return
    pos1 =  auxil.select_pos(bands) 
    if not pos1:
        return   
    num_bands = len(pos1)
    dims = auxil.select_dims([0,0,cols,rows])
    if dims:
        x10,y10,cols1,rows1 = dims
    else:
        return 
#  PAN image     
    file2 = auxil.select_infile(title='Choose PAN image') 
    if file2:                  
        inDataset2 = gdal.Open(file2,GA_ReadOnly)       
        bands = inDataset2.RasterCount
    else:
        return   
    if bands>1:
        print 'Must be a single band (panchromatic) image'
        return 
    geotransform1 = inDataset1.GetGeoTransform()
    geotransform2 = inDataset2.GetGeoTransform()        
#  outfile
    outfile, fmt = auxil.select_outfilefmt()  
    if not outfile:
        return 
#  resolution ratio      
    ratio = auxil.select_integer(4, 'Resolution ratio (2 or 4)') 
    if not ratio:
        return        
#  MS registration band    
    k1 = auxil.select_integer(1, 'MS band for registration') 
    if not k1:
        return  
#  fine adjust
    roll = auxil.select_integer(0, 'Fine adjust (-2 ... 2)') 
    if roll is None:
        return        
    print '========================='
    print '   DWT Pansharpening'
    print '========================='
    print time.asctime()     
    print 'MS  file: '+file1
    print 'PAN file: '+file2       
#  image arrays
    band = inDataset1.GetRasterBand(1)
    tmp = band.ReadAsArray(0,0,1,1)
    dt = tmp.dtype
    MS = np.asarray(np.zeros((num_bands,rows1,cols1)),dtype=dt) 
    k = 0                                   
    for b in pos1:
        band = inDataset1.GetRasterBand(b)
        MS[k,:,:] = band.ReadAsArray(x10,y10,cols1,rows1)
        k += 1
#  if integer assume 11bit quantization otherwise must be byte   
    if MS.dtype == np.int16:
        fact = 8.0
        MS = auxil.byteStretch(MS,(0,2**11)) 
    else:
        fact = 1.0
#  read in corresponding spatial subset of PAN image    
    if (geotransform1 is None) or (geotransform2 is None):
        print 'Image not georeferenced, aborting' 
        return
#  upper left corner pixel in PAN    
    gt1 = list(geotransform1)               
    gt2 = list(geotransform2)
    ulx1 = gt1[0] + x10*gt1[1]
    uly1 = gt1[3] + y10*gt1[5]
    x20 = int(round(((ulx1 - gt2[0])/gt2[1])))
    y20 = int(round(((uly1 - gt2[3])/gt2[5])))
    cols2 = cols1*ratio
    rows2 = rows1*ratio
    band = inDataset2.GetRasterBand(1)
    PAN = band.ReadAsArray(x20,y20,cols2,rows2)        
#  if integer assume 11-bit quantization, otherwise must be byte    
    if PAN.dtype == np.int16:
        PAN = auxil.byteStretch(PAN,(0,2**11))                                   
#  compress PAN to resolution of MS image  
    panDWT = auxil.DWTArray(PAN,cols2,rows2)          
    r = ratio
    while r > 1:
        panDWT.filter()
        r /= 2
    bn0 = panDWT.get_quadrant(0) 
    lines0,samples0 = bn0.shape    
    bn1 = MS[k1-1,:,:]  
#  register (and subset) MS image to compressed PAN image 
    (scale,angle,shift) = auxil.similarity(bn0,bn1)
    tmp = np.zeros((num_bands,lines0,samples0))
    for k in range(num_bands): 
        bn1 = MS[k,:,:]                    
        bn2 = ndii.zoom(bn1, 1.0/scale)
        bn2 = ndii.rotate(bn2, angle)
        bn2 = ndii.shift(bn2, shift)
        tmp[k,:,:] = bn2[0:lines0,0:samples0]        
    MS = tmp   
    if roll != 0:
#  fine adjust                            
        PAN = np.roll(PAN,roll,axis=0)
        PAN = np.roll(PAN,roll,axis=1)
        panDWT = auxil.DWTArray(PAN,cols2,rows2)          
        r = ratio
        while r > 1:
            panDWT.filter()
            r /= 2                   
#  compress pan once more, extract wavelet quadrants, and restore
    panDWT.filter()  
    fgpan = panDWT.get_quadrant(1)
    gfpan = panDWT.get_quadrant(2)
    ggpan = panDWT.get_quadrant(3)    
    panDWT.invert()       
#  output array            
    sharpened = np.zeros((num_bands,rows2,cols2),dtype=np.float32)     
    aa = np.zeros(3)
    bb = np.zeros(3)       
    print 'Wavelet correlations:'                                   
    for i in range(num_bands):
#      make copy of panDWT and inject ith ms band                
        msDWT = copy.deepcopy(panDWT)
        msDWT.put_quadrant(MS[i,:,:],0)
#      compress once more                 
        msDWT.filter()
#      determine wavelet normalization coefficents                
        ms = msDWT.get_quadrant(1)    
        aa[0],bb[0],R = auxil.orthoregress(fgpan.ravel(), ms.ravel())
        Rs = 'Band '+str(i+1)+': %8.3f'%R
        ms = msDWT.get_quadrant(2)
        aa[1],bb[1],R = auxil.orthoregress(gfpan.ravel(), ms.ravel())
        Rs += '%8.3f'%R                     
        ms = msDWT.get_quadrant(3)
        aa[2],bb[2],R = auxil.orthoregress(ggpan.ravel(), ms.ravel()) 
        Rs += '%8.3f'%R    
        print Rs         
#      restore once and normalize wavelet coefficients
        msDWT.invert() 
        msDWT.normalize(aa,bb)   
#      restore completely and collect result
        r = 1
        while r < ratio:
            msDWT.invert()
            r *= 2                            
        sharpened[i,:,:] = msDWT.get_quadrant(0)      
    sharpened *= fact    
#  write to disk       
    driver = gdal.GetDriverByName(fmt)   
    outDataset = driver.Create(outfile,cols2,rows2,num_bands,GDT_Float32)
    projection1 = inDataset1.GetProjection()
    if projection1 is not None:
        outDataset.SetProjection(projection1)        
    gt1 = list(geotransform1)
    gt1[0] += x10*ratio  
    gt1[3] -= y10*ratio
    gt1[1] = gt2[1]
    gt1[2] = gt2[2]
    gt1[4] = gt2[4]
    gt1[5] = gt2[5]
    outDataset.SetGeoTransform(tuple(gt1))   
    for k in range(num_bands):        
        outBand = outDataset.GetRasterBand(k+1)
        outBand.WriteArray(sharpened[k,:,:],0,0) 
        outBand.FlushCache() 
    outDataset = None    
    print 'Result written to %s'%outfile    
    inDataset1 = None
    inDataset2 = None                      
Example #14
0
def main(): 
    gdal.AllRegister()
    path = auxil.select_directory('Working directory')
    if path:
        os.chdir(path)        
    file0=auxil.select_infile(title='Base image') 
    if file0:                   
        inDataset0 = gdal.Open(file0,GA_ReadOnly)     
        cols0 = inDataset0.RasterXSize
        rows0 = inDataset0.RasterYSize
        print 'Base image: %s'%file0    
    else:
        return     
    rasterBand = inDataset0.GetRasterBand(1)
    span0 = rasterBand.ReadAsArray(0,0,cols0,rows0)
    rasterBand = inDataset0.GetRasterBand(4)
    span0 += 2*rasterBand.ReadAsArray(0,0,cols0,rows0)
    rasterBand = inDataset0.GetRasterBand(6)
    span0 += rasterBand.ReadAsArray(0,0,cols0,rows0)  
    span0 = log(real(span0))      
    inDataset0 = None   
    file1=auxil.select_infile(title='Warp image') 
    if file1:                  
        inDataset1 = gdal.Open(file1,GA_ReadOnly)     
        cols1 = inDataset1.RasterXSize
        rows1 = inDataset1.RasterYSize
        print 'Warp image: %s'%file1    
    else:
        return   
    outfile,fmt = auxil.select_outfilefmt() 
    if not outfile:
        return   
    image1 = zeros((6,rows1,cols1),dtype=cfloat)                                   
    for k in range(6):
        band = inDataset1.GetRasterBand(k+1)
        image1[k,:,:]=band\
          .ReadAsArray(0,0,cols1,rows1).astype(cfloat)    
    inDataset1 = None 
    span1 = sum(image1[[0,3,5] ,:,:],axis=0)\
                                        +image1[3,:,:]                   
    span1 = log(real(span1))                
    scale,angle,shift = auxil.similarity(span0, span1)    
    tmp_real = zeros((6,rows0,cols0))
    tmp_imag = zeros((6,rows0,cols0))
    for k in range(6): 
        bn1 = real(image1[k,:,:])                   
        bn2 = ndii.zoom(bn1, 1.0/scale)
        bn2 = ndii.rotate(bn2, angle)
        bn2 = ndii.shift(bn2, shift)
        tmp_real[k,:,:] = bn2[0:rows0,0:cols0] 
        bn1 = imag(image1[k,:,:])                   
        bn2 = ndii.zoom(bn1, 1.0/scale)
        bn2 = ndii.rotate(bn2, angle)
        bn2 = ndii.shift(bn2, shift)
        tmp_imag[k,:,:] = bn2[0:rows0,0:cols0] 
    image2 = tmp_real + 1j*tmp_imag                  
    driver = gdal.GetDriverByName(fmt)   
    outDataset = driver.Create(outfile,
                    cols0,rows0,6,GDT_CFloat32)
    for k in range(6):        
        outBand = outDataset.GetRasterBand(k+1)
        outBand.WriteArray(image2[k,:,:],0,0) 
        outBand.FlushCache()
    outDataset = None
    print 'Warped image written to: %s'%outfile        
Example #15
0
def main():
    print '================================'
    print 'Complex Wishart Change Detection'
    print '================================'
    print time.asctime()
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)        
#  first SAR image    
    infile1 = auxil.select_infile(title='Choose first SAR image') 
    if infile1:                   
        inDataset1 = gdal.Open(infile1,GA_ReadOnly)     
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize    
        bands = inDataset1.RasterCount
    else:
        return
    m = auxil.select_integer(5,msg='Number of looks')
    if not m:
        return
    print 'first filename:  %s'%infile1
    print 'number of looks: %i'%m  
#  second SAR image    
    infile2 = auxil.select_infile(title='Choose second SAR image') 
    if not infile2:                   
        return
    n = auxil.select_integer(5,msg='Number of looks')
    if not n:
        return
    print 'second filename:  %s'%infile2
    print 'number of looks: %i'%n  
#  output file
    outfile,fmt = auxil.select_outfilefmt() 
    if not outfile:
        return    
#  significance level
    sig = auxil.select_float(0.01, 'Choose significance level')   
    print 'Signifcane level: %f'%sig  
    start = time.time()    
    print 'co-registering...'
    registerSAR.registerSAR(infile1,infile2,'warp.tif','GTiff')
    infile2 = 'warp.tif'
    inDataset2 = gdal.Open(infile2,GA_ReadOnly)     
    cols2 = inDataset2.RasterXSize
    rows2 = inDataset2.RasterYSize    
    bands2 = inDataset2.RasterCount   
    if (bands != bands2) or (cols != cols2) or (rows != rows2):
        print 'Size mismatch'
        return   
    if bands == 9:
        print 'Quad polarimetry'  
#      C11 (k1)
        b = inDataset1.GetRasterBand(1)
        k1 = m*b.ReadAsArray(0,0,cols,rows)
#      C12  (a1)
        b = inDataset1.GetRasterBand(2)
        a1 = b.ReadAsArray(0,0,cols,rows)
        b = inDataset1.GetRasterBand(3)    
        im = b.ReadAsArray(0,0,cols,rows)
        a1 = m*(a1 + 1j*im)
#      C13  (rho1)
        b = inDataset1.GetRasterBand(4)
        rho1 = b.ReadAsArray(0,0,cols,rows)
        b = inDataset1.GetRasterBand(5)
        im = b.ReadAsArray(0,0,cols,rows)
        rho1 = m*(rho1 + 1j*im)      
#      C22 (xsi1)
        b = inDataset1.GetRasterBand(6)
        xsi1 = m*b.ReadAsArray(0,0,cols,rows)    
#      C23 (b1)        
        b = inDataset1.GetRasterBand(7)
        b1 = b.ReadAsArray(0,0,cols,rows)
        b = inDataset1.GetRasterBand(8)
        im = b.ReadAsArray(0,0,cols,rows)
        b1 = m*(b1 + 1j*im)      
#      C33 (zeta1)
        b = inDataset1.GetRasterBand(9)
        zeta1 = m*b.ReadAsArray(0,0,cols,rows)              
#      C11 (k2)
        b = inDataset2.GetRasterBand(1)
        k2 = n*b.ReadAsArray(0,0,cols,rows)
#      C12  (a2)
        b = inDataset2.GetRasterBand(2)
        a2 = b.ReadAsArray(0,0,cols,rows)
        b = inDataset2.GetRasterBand(3)
        im = b.ReadAsArray(0,0,cols,rows)
        a2 = n*(a2 + 1j*im)
#      C13  (rho2)
        b = inDataset2.GetRasterBand(4)
        rho2 = b.ReadAsArray(0,0,cols,rows)
        b = inDataset2.GetRasterBand(5)
        im = b.ReadAsArray(0,0,cols,rows)
        rho2 = n*(rho2 + 1j*im)        
#      C22 (xsi2)
        b = inDataset2.GetRasterBand(6)
        xsi2 = n*b.ReadAsArray(0,0,cols,rows)    
#      C23 (b2)        
        b = inDataset2.GetRasterBand(7)
        b2 = b.ReadAsArray(0,0,cols,rows)
        b = inDataset2.GetRasterBand(8)
        im = b.ReadAsArray(0,0,cols,rows)
        b2 = n*(b2 + 1j*im)        
#      C33 (zeta2)
        b = inDataset2.GetRasterBand(9)
        zeta2 = n*b.ReadAsArray(0,0,cols,rows)           
        k3    = k1 + k2  
        a3    = a1 + a2
        rho3  = rho1 + rho2
        xsi3  = xsi1 + xsi2
        b3    = b1 + b2
        zeta3 = zeta1 + zeta2           
        det1 = k1*xsi1*zeta1 + 2*np.real(a1*b1*np.conj(rho1)) - xsi1*(abs(rho1)**2) - k1*(abs(b1)**2) - zeta1*(abs(a1)**2)    
        det2 = k2*xsi2*zeta2 + 2*np.real(a2*b2*np.conj(rho2)) - xsi2*(abs(rho2)**2) - k2*(abs(b2)**2) - zeta2*(abs(a2)**2)       
        det3 = k3*xsi3*zeta3 + 2*np.real(a3*b3*np.conj(rho3)) - xsi3*(abs(rho3)**2) - k3*(abs(b3)**2) - zeta3*(abs(a3)**2)       
        p = 3
        f = p**2
        cst = p*((n+m)*np.log(n+m)-n*np.log(n)-m*np.log(m)) 
        rho = 1. - (2.*p**2-1.)*(1./n + 1./m - 1./(n+m))/(6.*p)    
        omega2 = -(p*p/4.)*(1. - 1./rho)**2 + p**2*(p**2-1.)*(1./n**2 + 1./m**2 - 1./(n+m)**2)/(24.*rho**2)        
    elif bands == 4:
        print 'Dual polarimetry'  
#      C11 (k1)
        b = inDataset1.GetRasterBand(1)
        k1 = m*b.ReadAsArray(0,0,cols,rows)
#      C12  (a1)
        b = inDataset1.GetRasterBand(2)
        a1 = b.ReadAsArray(0,0,cols,rows)
        b = inDataset1.GetRasterBand(3)
        im = b.ReadAsArray(0,0,cols,rows)
        a1 = m*(a1 + 1j*im)        
#      C22 (xsi1)
        b = inDataset1.GetRasterBand(4)
        xsi1 = m*b.ReadAsArray(0,0,cols,rows)          
#      C11 (k2)
        b = inDataset2.GetRasterBand(1)
        k2 = n*b.ReadAsArray(0,0,cols,rows)
#      C12  (a2)
        b = inDataset2.GetRasterBand(2)
        a2 = b.ReadAsArray(0,0,cols,rows)
        b = inDataset2.GetRasterBand(3)
        im = b.ReadAsArray(0,0,cols,rows)
        a2 = n*(a2 + 1j*im)        
#      C22 (xsi2)
        b = inDataset2.GetRasterBand(4)
        xsi2 = n*b.ReadAsArray(0,0,cols,rows)        
        k3    = k1 + k2  
        a3    = a1 + a2
        xsi3  = xsi1 + xsi2       
        det1 = k1*xsi1 - abs(a1)**2
        det2 = k2*xsi2 - abs(a2)**2 
        det3 = k3*xsi3 - abs(a3)**2        
        p = 2 
        cst = p*((n+m)*np.log(n+m)-n*np.log(n)-m*np.log(m)) 
        f = p**2
        rho = 1-(2*f-1)*(1./n+1./m-1./(n+m))/(6.*p)
        omega2 = -f/4.*(1-1./rho)**2 + f*(f-1)*(1./n**2+1./m**2-1./(n+m)**2)/(24.*rho**2)  
    elif bands == 1:
        print 'Single polarimetry'         
#      C11 (k1)
        b = inDataset1.GetRasterBand(1)
        k1 = m*b.ReadAsArray(0,0,cols,rows) 
#      C11 (k2)
        b = inDataset2.GetRasterBand(1)
        k2 = n*b.ReadAsArray(0,0,cols,rows) 
        k3 = k1 + k2
        det1 = k1 
        det2 = k2
        det3 = k3    
        p = 1 
        cst = p*((n+m)*np.log(n+m)-n*np.log(n)-m*np.log(m)) 
        f = p**2
        rho = 1-(2.*f-1)*(1./n+1./m-1./(n+m))/(6.*p)
        omega2 = -f/4.*(1-1./rho)**2+f*(f-1)*(1./n**2+1./m**2-1./(n+m)**2)/(24.*rho**2)  
    else:   
        print 'Incorrect number of bands'
        return   
    idx = np.where(det1 <= 0.0)
    det1[idx] = 0.0001   
    idx = np.where(det2 <= 0.0)
    det2[idx] = 0.0001 
    idx = np.where(det3 <= 0.0)
    det3[idx] = 0.0001  
    lnQ = cst+m*np.log(det1)+n*np.log(det2)-(n+m)*np.log(det3)
#  test statistic    
    Z = -2*rho*lnQ
#  change probabilty
    P =  (1.-omega2)*stats.chi2.cdf(Z,[f])+omega2*stats.chi2.cdf(Z,[f+4])
    P =  ndimage.filters.median_filter(P, size = (3,3))
#  change map
    a255 = np.ones((rows,cols),dtype=np.byte)*255
    a0 = a255*0
    c11 = np.log(k1+0.0001) 
    min1 =np.min(c11)
    max1 = np.max(c11)
    c11 = (c11-min1)*255.0/(max1-min1)  
    c11 = np.where(c11<0,a0,c11)  
    c11 = np.where(c11>255,a255,c11) 
    c11 = np.where(P>(1.0-sig),a0,c11)      
    cmap = np.where(P>(1.0-sig),a255,c11)
#  write to file system        
    driver = gdal.GetDriverByName(fmt)    
    outDataset = driver.Create(outfile,cols,rows,2,GDT_Float32)
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    projection = inDataset1.GetProjection()        
    if projection is not None:
        outDataset.SetProjection(projection) 
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(Z,0,0) 
    outBand.FlushCache() 
    outBand = outDataset.GetRasterBand(2)
    outBand.WriteArray(P,0,0) 
    outBand.FlushCache()     
    outDataset = None
    print 'test statistic and probabilities written to: %s'%outfile 
    basename = os.path.basename(outfile)
    name, ext = os.path.splitext(basename)
    outfile=outfile.replace(name,name+'_cmap')
    outDataset = driver.Create(outfile,cols,rows,3,GDT_Byte)
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    projection = inDataset1.GetProjection()        
    if projection is not None:
        outDataset.SetProjection(projection)     
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(cmap,0,0) 
    outBand.FlushCache() 
    outBand = outDataset.GetRasterBand(2)
    outBand.WriteArray(c11,0,0) 
    outBand.FlushCache()  
    outBand = outDataset.GetRasterBand(3)
    outBand.WriteArray(c11,0,0) 
    outBand.FlushCache()  
    outDataset = None    
    print 'change map image written to: %s'%outfile   
    print 'elapsed time: '+str(time.time()-start)  
Example #16
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path) 
    infile = auxil.select_infile(title='Select an image') 
    if infile:                   
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        
        return
    pos =  auxil.select_pos(bands) 
    if not pos:
        return   
    bands = len(pos)
    dims = auxil.select_dims([0,0,cols,rows])
    if dims:
        x0,y0,cols,rows = dims
    else:
        return   
    class_image = np.zeros((rows,cols),dtype=np.byte)
    K = auxil.select_integer(6,'Number of clusters')
    max_scale = auxil.select_integer(2,'Maximum scaling factor')
    max_scale = min((max_scale,3))
    min_scale = auxil.select_integer(0,'Minimum scaling factor')
    min_scale = min((max_scale,min_scale))
    T0 = auxil.select_float(0.5,'Initial annealing temperature')
    beta = auxil.select_float(0.5,'Spatial mixing parameter')            
    outfile, outfmt = auxil.select_outfilefmt('Select output classification file')  
    if not outfile:
        return
    probfile, probfmt = auxil.select_outfilefmt('Select output probability file (optional)')  
    print '========================='
    print '     EM clustering'
    print '========================='
    print 'infile:   %s'%infile
    print 'clusters: %i'%K
    print 'T0:       %f'%T0
    print 'beta:     %f'%beta         

    start = time.time()                                     
#  read in image and compress 
    DWTbands = []               
    for b in pos:
        band = inDataset.GetRasterBand(b)
        DWTband = auxil.DWTArray(band.ReadAsArray(x0,y0,cols,rows).astype(float),cols,rows)
        for i in range(max_scale):
            DWTband.filter()
        DWTbands.append(DWTband)
    rows,cols = DWTbands[0].get_quadrant(0).shape    
    G = np.transpose(np.array([DWTbands[i].get_quadrant(0,float=True).ravel() for i in range(bands)]))
#  initialize membership matrix    
    n = G.shape[0]
    U = np.random.random((K,n))
    den = np.sum(U,axis=0)
    for j in range(K):
        U[j,:] = U[j,:]/den
#  cluster at minimum scale
    try:
        U,Ms,Cs,Ps,pdens = em(G,U,T0,beta,rows,cols)
    except:
        print 'em failed' 
        return     
#  sort clusters wrt partition density
    idx = np.argsort(pdens)  
    idx = idx[::-1]
    U = U[idx,:]
#  clustering at increasing scales
    for i in range(max_scale-min_scale):
#      expand U and renormalize         
        U = np.reshape(U,(K,rows,cols))  
        rows = rows*2
        cols = cols*2
        U = ndi.zoom(U,(1,2,2))
        U = np.reshape(U,(K,rows*cols)) 
        idx = np.where(U<0.0)
        U[idx] = 0.0
        den = np.sum(U,axis=0)        
        for j in range(K):
            U[j,:] = U[j,:]/den
#      expand the image
        for i in range(bands):
            DWTbands[i].invert()
        G = np.transpose(np.array([DWTbands[i].get_quadrant(0,float=True).ravel() for i in range(bands)]))  
#      cluster
        unfrozen = np.where(np.max(U,axis=0) < 0.90)
        try:
            U,Ms,Cs,Ps,pdens = em(G,U,0.0,beta,rows,cols,unfrozen=unfrozen)
        except:
            print 'em failed' 
            return                         
    print 'Cluster mean vectors'
    print Ms
    print 'Cluster covariance matrices'
    for k in range(K):
        print 'cluster: %i'%k
        print Cs[k]
#  up-sample class memberships if necessary
    if min_scale>0:
        U = np.reshape(U,(K,rows,cols))
        f = 2**min_scale  
        rows = rows*f
        cols = cols*f
        U = ndi.zoom(U,(1,f,f))
        U = np.reshape(U,(K,rows*cols)) 
        idx = np.where(U<0.0)
        U[idx] = 0.0
        den = np.sum(U,axis=0)        
        for j in range(K):
            U[j,:] = U[j,:]/den        
#  classify
    labels = np.byte(np.argmax(U,axis=0)+1)
    class_image[0:rows,0:cols] = np.reshape(labels,(rows,cols))
    rows1,cols1 = class_image.shape
#  write to disk
    driver = gdal.GetDriverByName(outfmt)    
    outDataset = driver.Create(outfile,cols1,rows1,1,GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0*gt[1]
        gt[3] = gt[3] + y0*gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)               
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(class_image,0,0) 
    outBand.FlushCache() 
    outDataset = None   
#  write class membership probability file if desired  
    if probfile:
        driver = gdal.GetDriverByName(probfmt)    
        outDataset = driver.Create(probfile,cols,rows,K,GDT_Byte) 
        if geotransform is not None:
            outDataset.SetGeoTransform(tuple(gt)) 
        if projection is not None:
            outDataset.SetProjection(projection)  
        for k in range(K):
            probs = np.reshape(U[k,:],(rows,cols))
            probs = np.byte(probs*255)
            outBand = outDataset.GetRasterBand(k+1)
            outBand.WriteArray(probs,0,0)
            outBand.FlushCache()    
        outDataset = None    
        print 'class probabilities written to: %s'%probfile                                  
    inDataset = None
    if (outfmt == 'ENVI') and (K<19):
#  try to make an ENVI classification header file            
        hdr = header.Header() 
        headerfile = outfile+'.hdr'
        f = open(headerfile)
        line = f.readline()
        envihdr = ''
        while line:
            envihdr += line
            line = f.readline()
        f.close()         
        hdr.read(envihdr)
        hdr['file type'] ='ENVI Classification'
        hdr['classes'] = str(K+1)
        classlookup = '{0'
        for i in range(1,3*(K+1)):
            classlookup += ', '+str(str(ctable[i]))
        classlookup +='}'    
        hdr['class lookup'] = classlookup
        hdr['class names'] = ['class %i'%i for i in range(K+1)]
        f = open(headerfile,'w')
        f.write(str(hdr))
        f.close()                 
    print 'classification written to: '+outfile       
    print 'elapsed time: '+str(time.time()-start)                        
    print '--done------------------------'  
Example #17
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    #    path = 'd:\\imagery\\CRC\\Chapters6-7'
    if path:
        os.chdir(path)
    infile = auxil.select_infile(title='Select a class probability image')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        classes = inDataset.RasterCount
    else:
        return
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
    nitr = auxil.select_integer(3, 'Select number of iterations')
    print '========================='
    print '       PLR'
    print '========================='
    print 'infile:  %s' % infile
    print 'iterations:  %i' % nitr
    start = time.time()
    prob_image = np.zeros((classes, rows, cols))
    for k in range(classes):
        band = inDataset.GetRasterBand(k + 1)
        prob_image[k, :, :] = band.ReadAsArray(0, 0, cols, rows).astype(float)
#  compatibility matrix
    Pmn = np.zeros((classes, classes))
    n_samples = (cols - 1) * (rows - 1)
    samplem = np.reshape(prob_image[:, 0:rows - 1, 0:cols - 1],
                         (classes, n_samples))
    samplen = np.reshape(prob_image[:, 1:rows, 0:cols - 1],
                         (classes, n_samples))
    sampleu = np.reshape(prob_image[:, 0:rows - 1, 1:cols],
                         (classes, n_samples))
    max_samplem = np.amax(samplem, axis=0)
    max_samplen = np.amax(samplen, axis=0)
    max_sampleu = np.amax(sampleu, axis=0)
    print 'estimating compatibility matrix...'
    for j in range(n_samples):
        if j % 50000 == 0:
            print '%i samples of %i' % (j, n_samples)
        m1 = np.where(samplem[:, j] == max_samplem[j])[0][0]
        n1 = np.where(samplen[:, j] == max_samplen[j])[0][0]
        if isinstance(m1, int) and isinstance(n1, int):
            Pmn[m1, n1] += 1
        u1 = np.where(sampleu[:, j] == max_sampleu[j])[0][0]
        if isinstance(m1, int) and isinstance(u1, int):
            Pmn[m1, u1] += 1
    for j in range(classes):
        n = np.sum(Pmn[j, :])
        if n > 0:
            Pmn[j, :] /= n
    print Pmn
    itr = 0
    temp = prob_image * 0
    print 'label relaxation...'
    while itr < nitr:
        print 'iteration %i' % (itr + 1)
        Pm = np.zeros(classes)
        Pn = np.zeros(classes)
        for i in range(1, rows - 1):
            if i % 50 == 0:
                print '%i rows processed' % i
            for j in range(1, cols - 1):
                Pm[:] = prob_image[:, i, j]
                Pn[:] = prob_image[:, i - 1, j] / 4
                Pn[:] += prob_image[:, i + 1, j] / 4
                Pn[:] += prob_image[:, i, j - 1] / 4
                Pn[:] += prob_image[:, i, j + 1] / 4
                Pn = np.transpose(Pn)
                if np.sum(Pm) == 0:
                    Pm_new = Pm
                else:
                    Pm_new = Pm * (np.dot(Pmn, Pn)) / (np.dot(
                        np.dot(Pm, Pmn), Pn))
                temp[:, i, j] = Pm_new
        prob_image = temp
        itr += 1


#  write to disk
    prob_image = np.byte(prob_image * 255)
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols, rows, classes, GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    if projection is not None:
        outDataset.SetProjection(projection)
    for k in range(classes):
        outBand = outDataset.GetRasterBand(k + 1)
        outBand.WriteArray(prob_image[k, :, :], 0, 0)
        outBand.FlushCache()
    outDataset = None
    inDataset = None
    print 'result written to: ' + outfile
    print 'elapsed time: ' + str(time.time() - start)
    print '--done------------------------'
Example #18
0
def main():  
    gdal.AllRegister()
    path = auxil.select_directory('Choose input directory')
    if path:
        os.chdir(path)        
#  input image    
    infile = auxil.select_infile(title='Choose image file') 
    if infile:                   
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
        projection = inDataset.GetProjection()
        geotransform = inDataset.GetGeoTransform()
        if geotransform is not None:
            gt = list(geotransform) 
        else:
            print 'No geotransform available'
            return       
        imsr = osr.SpatialReference()  
        imsr.ImportFromWkt(projection)      
    else:
        return  
    pos =  auxil.select_pos(bands)  
    if not pos:
        return
    N = len(pos) 
    rasterBands = [] 
    for b in pos:
        rasterBands.append(inDataset.GetRasterBand(b)) 
#  training data (shapefile)      
    trnfile = auxil.select_infile(filt='.shp',title='Choose train shapefile')
    if trnfile:
        trnDriver = ogr.GetDriverByName('ESRI Shapefile')
        trnDatasource = trnDriver.Open(trnfile,0)
        trnLayer = trnDatasource.GetLayer() 
        trnsr = trnLayer.GetSpatialRef()             
    else:
        return
#  hidden neurons
    L = auxil.select_integer(8,'number of hidden neurons')    
    if not L:
        return
#  outfile
    outfile, fmt = auxil.select_outfilefmt()   
    if not outfile:
        return     
#  coordinate transformation from training to image projection   
    ct= osr.CoordinateTransformation(trnsr,imsr) 
#  number of classes    
    feature = trnLayer.GetNextFeature() 
    while feature:
        classid = feature.GetField('CLASS_ID')
        feature = trnLayer.GetNextFeature() 
    trnLayer.ResetReading()    
    K = int(classid)+1       
    print '========================='
    print '       ffncg'
    print '========================='
    print time.asctime()    
    print 'image:    '+infile
    print 'training: '+trnfile          
#  loop through the polygons    
    Gs = [] # train observations
    ls = [] # class labels
    print 'reading training data...'
    for i in range(trnLayer.GetFeatureCount()):
        feature = trnLayer.GetFeature(i)
        classid = feature.GetField('CLASS_ID')
        l = [0 for i in range(K)]
        l[int(classid)] = 1.0
        polygon = feature.GetGeometryRef()
#      transform to same projection as image        
        polygon.Transform(ct)  
#      convert to a Shapely object            
        poly = shapely.wkt.loads(polygon.ExportToWkt())
#      transform the boundary to pixel coords in numpy        
        bdry = np.array(poly.boundary) 
        bdry[:,0] = bdry[:,0]-gt[0]
        bdry[:,1] = bdry[:,1]-gt[3]
        GT = np.mat([[gt[1],gt[2]],[gt[4],gt[5]]])
        bdry = bdry*np.linalg.inv(GT) 
#      polygon in pixel coords        
        polygon1 = asPolygon(bdry)
#      raster over the bounding rectangle        
        minx,miny,maxx,maxy = map(int,list(polygon1.bounds))  
        pts = [] 
        for i in range(minx,maxx+1):
            for j in range(miny,maxy+1): 
                pts.append((i,j))             
        multipt =  MultiPoint(pts)   
#      intersection as list              
        intersection = np.array(multipt.intersection(polygon1),dtype=np.int).tolist()
#      cut out the bounded image cube               
        cube = np.zeros((maxy-miny+1,maxx-minx+1,len(rasterBands)))
        k=0
        for band in rasterBands:
            cube[:,:,k] = band.ReadAsArray(minx,miny,maxx-minx+1,maxy-miny+1)
            k += 1
#      get the training vectors
        for (x,y) in intersection:         
            Gs.append(cube[y-miny,x-minx,:])
            ls.append(l)   
        polygon = None
        polygon1 = None            
        feature.Destroy()  
    trnDatasource.Destroy() 
    m = len(ls)       
    print str(m) + ' training pixel vectors were read in' 
    Gs = np.array(Gs) 
    ls = np.array(ls)
#  stretch the pixel vectors to [-1,1]
    maxx = np.max(Gs,0)
    minx = np.min(Gs,0)
    for j in range(N):
        Gs[:,j] = 2*(Gs[:,j]-minx[j])/(maxx[j]-minx[j]) - 1.0 
#  random permutation of training data
    idx = np.random.permutation(m)
    Gs = Gs[idx,:] 
    ls = ls[idx,:]     
#  setup output dataset 
    driver = gdal.GetDriverByName(fmt)    
    outDataset = driver.Create(outfile,cols,rows,1,GDT_Byte) 
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection) 
    outBand = outDataset.GetRasterBand(1) 
#  train on 9/10 training examples         
    Gstrn = Gs[0:9*m//10,:]
    lstrn = ls[0:9*m//10,:]
    affn = Ffncg(Gstrn,lstrn,L)
    print 'training on %i pixel vectors...' % np.shape(Gstrn)[0]
    start = time.time()
    cost = affn.train(epochs=epochs)
    print 'elapsed time %s' %str(time.time()-start) 
    if cost is not None:
#        cost = np.log10(cost)  
        ymax = np.max(cost)
        ymin = np.min(cost) 
        xmax = len(cost)      
        plt.plot(range(xmax),cost,'k')
        plt.axis([0,xmax,ymin-1,ymax])
        plt.title('Cross entropy')
        plt.xlabel('Epoch')              
#      classify the image           
        print 'classifying...'
        tile = np.zeros((cols,N))    
        for row in range(rows):
            for j in range(N):
                tile[:,j] = rasterBands[j].ReadAsArray(0,row,cols,1)
                tile[:,j] = 2*(tile[:,j]-minx[j])/(maxx[j]-minx[j]) - 1.0 
            cls, _ = affn.classify(tile)  
            outBand.WriteArray(np.reshape(cls,(1,cols)),0,row)
        outBand.FlushCache()
        outDataset = None
        inDataset = None  
        print 'thematic map written to: ' + outfile
        print 'please close the cross entropy plot to continue'
        plt.show()
    else:
        print 'an error occured' 
        return 
    
    print 'submitting cross-validation to multyvac'    
    start = time.time()
    jid = mv.submit(traintst,Gs,ls,L,_layer='ms_image_analysis')  
    print 'submission time: %s' %str(time.time()-start)
    start = time.time()    
    job = mv.get(jid)
    result = job.get_result(job) 
    
    
    print 'execution time: %s' %str(time.time()-start)      
    print 'misclassification rate: %f' %np.mean(result)
    print 'standard deviation:     %f' %np.std(result)         
    print '--------done---------------------'       
Example #19
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
    infile = auxil.select_infile(title='Select an image')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        bands = inDataset.RasterCount
    else:
        return
    pos = auxil.select_pos(bands)
    if not pos:
        return
    dims = auxil.select_dims([0, 0, cols, rows])
    if dims:
        x0, y0, cols, rows = dims
    else:
        return
    m = auxil.select_integer(1000, 'Select training sample size')
    K = auxil.select_integer(6, 'Select number of clusters')
    outfile, outfmt = auxil.select_outfilefmt()
    if not outfile:
        return
    kernel = auxil.select_integer(1, 'Select kernel: 0=linear, 1=Gaussian')
    print '========================='
    print '       kkmeans'
    print '========================='
    print 'infile:  ' + infile
    print 'samples: ' + str(m)
    if kernel == 0:
        print 'kernel:  ' + 'linear'
    else:
        print 'kernel:  ' + 'Gaussian'
    start = time.time()
    #  input data matrix
    XX = np.zeros((cols * rows, bands))
    k = 0
    for b in pos:
        band = inDataset.GetRasterBand(b)
        band = band.ReadAsArray(x0, y0, cols, rows).astype(float)
        XX[:, k] = np.ravel(band)
        k += 1
#  training data matrix
    idx = np.fix(np.random.random(m) * (cols * rows)).astype(np.integer)
    X = XX[idx, :]
    print 'kernel matrix...'
    # uncentered kernel matrix
    KK, gma = auxil.kernelMatrix(X, kernel=kernel)
    if gma is not None:
        print 'gamma: ' + str(round(gma, 6))


#  initial (random) class labels
    labels = np.random.randint(K, size=m)
    #  iteration
    change = True
    itr = 0
    onesm = np.mat(np.ones(m, dtype=float))
    while change and (itr < 100):
        change = False
        U = np.zeros((K, m))
        for i in range(m):
            U[labels[i], i] = 1
        M = np.diag(1.0 / (np.sum(U, axis=1) + 1.0))
        MU = np.mat(np.dot(M, U))
        Z = (onesm.T) * np.diag(MU * KK * (MU.T)) - 2 * KK * (MU.T)
        Z = np.array(Z)
        labels1 = (np.argmin(Z, axis=1) % K).ravel()
        if np.sum(labels1 != labels):
            change = True
        labels = labels1
        itr += 1
    print 'iterations: %i' % itr
    #  classify image
    print 'classifying...'
    i = 0
    A = np.diag(MU * KK * (MU.T))
    A = np.tile(A, (cols, 1))
    class_image = np.zeros((rows, cols), dtype=np.byte)
    while i < rows:
        XXi = XX[i * cols:(i + 1) * cols, :]
        KKK, _ = auxil.kernelMatrix(X, XXi, gma=gma, kernel=kernel)
        Z = A - 2 * (KKK.T) * (MU.T)
        Z = np.array(Z)
        labels = np.argmin(Z, axis=1).ravel()
        class_image[i, :] = (labels % K) + 1
        i += 1
    sys.stdout.write("\n")
    #  write to disk
    driver = gdal.GetDriverByName(outfmt)
    outDataset = driver.Create(outfile, cols, rows, 1, GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0 * gt[1]
        gt[3] = gt[3] + y0 * gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(class_image, 0, 0)
    outBand.FlushCache()
    outDataset = None
    inDataset = None
    if (outfmt == 'ENVI') and (K < 19):
        #  try to make an ENVI classification header file
        hdr = header.Header()
        headerfile = outfile + '.hdr'
        f = open(headerfile)
        line = f.readline()
        envihdr = ''
        while line:
            envihdr += line
            line = f.readline()
        f.close()
        hdr.read(envihdr)
        hdr['file type'] = 'ENVI Classification'
        hdr['classes'] = str(K)
        classlookup = '{0'
        for i in range(1, 3 * K):
            classlookup += ', ' + str(str(ctable[i]))
        classlookup += '}'
        hdr['class lookup'] = classlookup
        hdr['class names'] = [str(i + 1) for i in range(K)]
        f = open(headerfile, 'w')
        f.write(str(hdr))
        f.close()
    print 'result written to: ' + outfile
    print 'elapsed time: ' + str(time.time() - start)
    print '--done------------------------'
Example #20
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory("Choose working directory")
    #    path = 'd:\\imagery\\CRC\\Chapters6-7'
    if path:
        os.chdir(path)
    infile = auxil.select_infile(title="Select a class probability image")
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        classes = inDataset.RasterCount
    else:
        return
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
    nitr = auxil.select_integer(3, "Select number of iterations")
    print "========================="
    print "       PLR"
    print "========================="
    print "infile:  %s" % infile
    print "iterations:  %i" % nitr
    start = time.time()
    prob_image = np.zeros((classes, rows, cols))
    for k in range(classes):
        band = inDataset.GetRasterBand(k + 1)
        prob_image[k, :, :] = band.ReadAsArray(0, 0, cols, rows).astype(float)
    #  compatibility matrix
    Pmn = np.zeros((classes, classes))
    n_samples = (cols - 1) * (rows - 1)
    samplem = np.reshape(prob_image[:, 0 : rows - 1, 0 : cols - 1], (classes, n_samples))
    samplen = np.reshape(prob_image[:, 1:rows, 0 : cols - 1], (classes, n_samples))
    sampleu = np.reshape(prob_image[:, 0 : rows - 1, 1:cols], (classes, n_samples))
    max_samplem = np.amax(samplem, axis=0)
    max_samplen = np.amax(samplen, axis=0)
    max_sampleu = np.amax(sampleu, axis=0)
    print "estimating compatibility matrix..."
    for j in range(n_samples):
        if j % 50000 == 0:
            print "%i samples of %i" % (j, n_samples)
        m1 = np.where(samplem[:, j] == max_samplem[j])[0][0]
        n1 = np.where(samplen[:, j] == max_samplen[j])[0][0]
        if isinstance(m1, int) and isinstance(n1, int):
            Pmn[m1, n1] += 1
        u1 = np.where(sampleu[:, j] == max_sampleu[j])[0][0]
        if isinstance(m1, int) and isinstance(u1, int):
            Pmn[m1, u1] += 1
    for j in range(classes):
        n = np.sum(Pmn[j, :])
        if n > 0:
            Pmn[j, :] /= n
    print Pmn
    itr = 0
    temp = prob_image * 0
    print "label relaxation..."
    while itr < nitr:
        print "iteration %i" % (itr + 1)
        Pm = np.zeros(classes)
        Pn = np.zeros(classes)
        for i in range(1, rows - 1):
            if i % 50 == 0:
                print "%i rows processed" % i
            for j in range(1, cols - 1):
                Pm[:] = prob_image[:, i, j]
                Pn[:] = prob_image[:, i - 1, j] / 4
                Pn[:] += prob_image[:, i + 1, j] / 4
                Pn[:] += prob_image[:, i, j - 1] / 4
                Pn[:] += prob_image[:, i, j + 1] / 4
                Pn = np.transpose(Pn)
                if np.sum(Pm) == 0:
                    Pm_new = Pm
                else:
                    Pm_new = Pm * (np.dot(Pmn, Pn)) / (np.dot(np.dot(Pm, Pmn), Pn))
                temp[:, i, j] = Pm_new
        prob_image = temp
        itr += 1
    #  write to disk
    prob_image = np.byte(prob_image * 255)
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols, rows, classes, GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    if projection is not None:
        outDataset.SetProjection(projection)
    for k in range(classes):
        outBand = outDataset.GetRasterBand(k + 1)
        outBand.WriteArray(prob_image[k, :, :], 0, 0)
        outBand.FlushCache()
    outDataset = None
    inDataset = None
    print "result written to: " + outfile
    print "elapsed time: " + str(time.time() - start)
    print "--done------------------------"
Example #21
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    #    path = 'd:\\imagery\\CRC\\Chapters6-7'
    if path:
        os.chdir(path)
    infile = auxil.select_infile(title='Select a class probability image')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        K = inDataset.RasterCount
    else:
        return
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
    print '========================='
    print '       PLR_reclass'
    print '========================='
    print 'infile:  %s' % infile
    start = time.time()
    prob_image = np.zeros((K, rows, cols))
    for k in range(K):
        band = inDataset.GetRasterBand(k + 1)
        prob_image[k, :, :] = band.ReadAsArray(0, 0, cols, rows).astype(float)
    class_image = np.zeros((rows, cols), dtype=np.byte)
    print 'reclassifying...'
    for i in range(rows):
        if i % 50 == 0:
            print '%i rows processed' % i
        for j in range(cols):
            cls = np.where(prob_image[:, i, j] == np.amax(prob_image[:, i,
                                                                     j]))[0][0]
            if isinstance(cls, int):
                class_image[i, j] = cls + 1


#  write to disk
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols, rows, 1, GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(class_image, 0, 0)
    outBand.FlushCache()
    outDataset = None
    inDataset = None
    if (fmt == 'ENVI') and (K < 19):
        #          try to make an ENVI classification header file
        classnames = '{unclassified '
        for i in range(K):
            classnames += ', ' + str(i + 1)
        classnames += '}'
        hdr = header.Header()
        headerfile = outfile + '.hdr'
        f = open(headerfile)
        line = f.readline()
        envihdr = ''
        while line:
            envihdr += line
            line = f.readline()
        f.close()
        hdr.read(envihdr)
        hdr['file type'] = 'ENVI Classification'
        hdr['classes'] = str(K + 1)
        classlookup = '{0'
        for i in range(1, 3 * (K + 1)):
            classlookup += ', ' + str(str(auxil.ctable[i]))
        classlookup += '}'
        hdr['class lookup'] = classlookup
        hdr['class names'] = classnames
        f = open(headerfile, 'w')
        f.write(str(hdr))
        f.close()
    print 'result written to: ' + outfile
    print 'elapsed time: ' + str(time.time() - start)
    print '--done------------------------'
Example #22
0
def main():
    print '================================'
    print 'Complex Wishart Change Detection'
    print '================================'
    print time.asctime()
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
#  first SAR image
    infile1 = auxil.select_infile(title='Choose first SAR image')
    if infile1:
        inDataset1 = gdal.Open(infile1, GA_ReadOnly)
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize
        bands = inDataset1.RasterCount
    else:
        return
    m = auxil.select_integer(5, msg='Number of looks')
    if not m:
        return
    print 'first filename:  %s' % infile1
    print 'number of looks: %i' % m
    #  second SAR image
    infile2 = auxil.select_infile(title='Choose second SAR image')
    if not infile2:
        return
    n = auxil.select_integer(5, msg='Number of looks')
    if not n:
        return
    print 'second filename:  %s' % infile2
    print 'number of looks: %i' % n
    #  output file
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return


#  significance level
    sig = auxil.select_float(0.01, 'Choose significance level')
    print 'Signifcane level: %f' % sig
    start = time.time()
    print 'co-registering...'
    registerSAR.registerSAR(infile1, infile2, 'warp.tif', 'GTiff')
    infile2 = 'warp.tif'
    inDataset2 = gdal.Open(infile2, GA_ReadOnly)
    cols2 = inDataset2.RasterXSize
    rows2 = inDataset2.RasterYSize
    bands2 = inDataset2.RasterCount
    if (bands != bands2) or (cols != cols2) or (rows != rows2):
        print 'Size mismatch'
        return
    if bands == 9:
        print 'Quad polarimetry'
        #      C11 (k1)
        b = inDataset1.GetRasterBand(1)
        k1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C12  (a1)
        b = inDataset1.GetRasterBand(2)
        a1 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset1.GetRasterBand(3)
        im = b.ReadAsArray(0, 0, cols, rows)
        a1 = m * (a1 + 1j * im)
        #      C13  (rho1)
        b = inDataset1.GetRasterBand(4)
        rho1 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset1.GetRasterBand(5)
        im = b.ReadAsArray(0, 0, cols, rows)
        rho1 = m * (rho1 + 1j * im)
        #      C22 (xsi1)
        b = inDataset1.GetRasterBand(6)
        xsi1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C23 (b1)
        b = inDataset1.GetRasterBand(7)
        b1 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset1.GetRasterBand(8)
        im = b.ReadAsArray(0, 0, cols, rows)
        b1 = m * (b1 + 1j * im)
        #      C33 (zeta1)
        b = inDataset1.GetRasterBand(9)
        zeta1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C11 (k2)
        b = inDataset2.GetRasterBand(1)
        k2 = n * b.ReadAsArray(0, 0, cols, rows)
        #      C12  (a2)
        b = inDataset2.GetRasterBand(2)
        a2 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset2.GetRasterBand(3)
        im = b.ReadAsArray(0, 0, cols, rows)
        a2 = n * (a2 + 1j * im)
        #      C13  (rho2)
        b = inDataset2.GetRasterBand(4)
        rho2 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset2.GetRasterBand(5)
        im = b.ReadAsArray(0, 0, cols, rows)
        rho2 = n * (rho2 + 1j * im)
        #      C22 (xsi2)
        b = inDataset2.GetRasterBand(6)
        xsi2 = n * b.ReadAsArray(0, 0, cols, rows)
        #      C23 (b2)
        b = inDataset2.GetRasterBand(7)
        b2 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset2.GetRasterBand(8)
        im = b.ReadAsArray(0, 0, cols, rows)
        b2 = n * (b2 + 1j * im)
        #      C33 (zeta2)
        b = inDataset2.GetRasterBand(9)
        zeta2 = n * b.ReadAsArray(0, 0, cols, rows)
        k3 = k1 + k2
        a3 = a1 + a2
        rho3 = rho1 + rho2
        xsi3 = xsi1 + xsi2
        b3 = b1 + b2
        zeta3 = zeta1 + zeta2
        det1 = k1 * xsi1 * zeta1 + 2 * np.real(
            a1 * b1 * np.conj(rho1)) - xsi1 * (abs(rho1)**2) - k1 * (
                abs(b1)**2) - zeta1 * (abs(a1)**2)
        det2 = k2 * xsi2 * zeta2 + 2 * np.real(
            a2 * b2 * np.conj(rho2)) - xsi2 * (abs(rho2)**2) - k2 * (
                abs(b2)**2) - zeta2 * (abs(a2)**2)
        det3 = k3 * xsi3 * zeta3 + 2 * np.real(
            a3 * b3 * np.conj(rho3)) - xsi3 * (abs(rho3)**2) - k3 * (
                abs(b3)**2) - zeta3 * (abs(a3)**2)
        p = 3
        f = p**2
        cst = p * ((n + m) * np.log(n + m) - n * np.log(n) - m * np.log(m))
        rho = 1. - (2. * p**2 - 1.) * (1. / n + 1. / m - 1. /
                                       (n + m)) / (6. * p)
        omega2 = -(p * p / 4.) * (1. - 1. / rho)**2 + p**2 * (p**2 - 1.) * (
            1. / n**2 + 1. / m**2 - 1. / (n + m)**2) / (24. * rho**2)
    elif bands == 4:
        print 'Dual polarimetry'
        #      C11 (k1)
        b = inDataset1.GetRasterBand(1)
        k1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C12  (a1)
        b = inDataset1.GetRasterBand(2)
        a1 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset1.GetRasterBand(3)
        im = b.ReadAsArray(0, 0, cols, rows)
        a1 = m * (a1 + 1j * im)
        #      C22 (xsi1)
        b = inDataset1.GetRasterBand(4)
        xsi1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C11 (k2)
        b = inDataset2.GetRasterBand(1)
        k2 = n * b.ReadAsArray(0, 0, cols, rows)
        #      C12  (a2)
        b = inDataset2.GetRasterBand(2)
        a2 = b.ReadAsArray(0, 0, cols, rows)
        b = inDataset2.GetRasterBand(3)
        im = b.ReadAsArray(0, 0, cols, rows)
        a2 = n * (a2 + 1j * im)
        #      C22 (xsi2)
        b = inDataset2.GetRasterBand(4)
        xsi2 = n * b.ReadAsArray(0, 0, cols, rows)
        k3 = k1 + k2
        a3 = a1 + a2
        xsi3 = xsi1 + xsi2
        det1 = k1 * xsi1 - abs(a1)**2
        det2 = k2 * xsi2 - abs(a2)**2
        det3 = k3 * xsi3 - abs(a3)**2
        p = 2
        cst = p * ((n + m) * np.log(n + m) - n * np.log(n) - m * np.log(m))
        f = p**2
        rho = 1 - (2 * f - 1) * (1. / n + 1. / m - 1. / (n + m)) / (6. * p)
        omega2 = -f / 4. * (1 - 1. / rho)**2 + f * (f - 1) * (
            1. / n**2 + 1. / m**2 - 1. / (n + m)**2) / (24. * rho**2)
    elif bands == 1:
        print 'Single polarimetry'
        #      C11 (k1)
        b = inDataset1.GetRasterBand(1)
        k1 = m * b.ReadAsArray(0, 0, cols, rows)
        #      C11 (k2)
        b = inDataset2.GetRasterBand(1)
        k2 = n * b.ReadAsArray(0, 0, cols, rows)
        k3 = k1 + k2
        det1 = k1
        det2 = k2
        det3 = k3
        p = 1
        cst = p * ((n + m) * np.log(n + m) - n * np.log(n) - m * np.log(m))
        f = p**2
        rho = 1 - (2. * f - 1) * (1. / n + 1. / m - 1. / (n + m)) / (6. * p)
        omega2 = -f / 4. * (1 - 1. / rho)**2 + f * (f - 1) * (
            1. / n**2 + 1. / m**2 - 1. / (n + m)**2) / (24. * rho**2)
    else:
        print 'Incorrect number of bands'
        return
    idx = np.where(det1 <= 0.0)
    det1[idx] = 0.0001
    idx = np.where(det2 <= 0.0)
    det2[idx] = 0.0001
    idx = np.where(det3 <= 0.0)
    det3[idx] = 0.0001
    lnQ = cst + m * np.log(det1) + n * np.log(det2) - (n + m) * np.log(det3)
    #  test statistic
    Z = -2 * rho * lnQ
    #  change probabilty
    P = (1. - omega2) * stats.chi2.cdf(Z, [f]) + omega2 * stats.chi2.cdf(
        Z, [f + 4])
    P = ndimage.filters.median_filter(P, size=(3, 3))
    #  change map
    a255 = np.ones((rows, cols), dtype=np.byte) * 255
    a0 = a255 * 0
    c11 = np.log(k1 + 0.0001)
    min1 = np.min(c11)
    max1 = np.max(c11)
    c11 = (c11 - min1) * 255.0 / (max1 - min1)
    c11 = np.where(c11 < 0, a0, c11)
    c11 = np.where(c11 > 255, a255, c11)
    c11 = np.where(P > (1.0 - sig), a0, c11)
    cmap = np.where(P > (1.0 - sig), a255, c11)
    #  write to file system
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols, rows, 2, GDT_Float32)
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    projection = inDataset1.GetProjection()
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(Z, 0, 0)
    outBand.FlushCache()
    outBand = outDataset.GetRasterBand(2)
    outBand.WriteArray(P, 0, 0)
    outBand.FlushCache()
    outDataset = None
    print 'test statistic and probabilities written to: %s' % outfile
    basename = os.path.basename(outfile)
    name, ext = os.path.splitext(basename)
    outfile = outfile.replace(name, name + '_cmap')
    outDataset = driver.Create(outfile, cols, rows, 3, GDT_Byte)
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    projection = inDataset1.GetProjection()
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(cmap, 0, 0)
    outBand.FlushCache()
    outBand = outDataset.GetRasterBand(2)
    outBand.WriteArray(c11, 0, 0)
    outBand.FlushCache()
    outBand = outDataset.GetRasterBand(3)
    outBand.WriteArray(c11, 0, 0)
    outBand.FlushCache()
    outDataset = None
    print 'change map image written to: %s' % outfile
    print 'elapsed time: ' + str(time.time() - start)
Example #23
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)        
#  SAR image    
    infile = auxil.select_infile(title='Choose SAR image') 
    if infile:                   
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        return
#  spatial subset    
    x0,y0,rows,cols=auxil.select_dims([0,0,rows,cols])    
#  number of looks
    m = auxil.select_integer(5,msg='Number of looks')
    if not m:
        return
#  number of iterations
    niter = auxil.select_integer(1,msg='Number of iterations')    
#  output file
    outfile,fmt = auxil.select_outfilefmt() 
    if not outfile:
        return       
#  process diagonal bands only
    driver = gdal.GetDriverByName(fmt) 
    if bands == 9:   
        outDataset = driver.Create(outfile,cols,rows,3,GDT_Float32)
        inimage = np.zeros((3,rows,cols))
        band = inDataset.GetRasterBand(1)
        inimage[0] = band.ReadAsArray(x0,y0,cols,rows)     
        band = inDataset.GetRasterBand(6)
        inimage[1] = band.ReadAsArray(x0,y0,cols,rows)
        band = inDataset.GetRasterBand(9)
        inimage[2] = band.ReadAsArray(x0,y0,cols,rows)        
    elif bands == 4:
        outDataset = driver.Create(outfile,cols,rows,2,GDT_Float32)        
        inimage = np.zeros((2,rows,cols))
        band = inDataset.GetRasterBand(1)
        inimage[0] = band.ReadAsArray(x0,y0,cols,rows)     
        band = inDataset.GetRasterBand(4)
        inimage[1] = band.ReadAsArray(x0,y0,cols,rows) 
    else:
        outDataset = driver.Create(outfile,cols,rows,1,GDT_Float32)
        inimage = inDataset.GetRasterBand(1)  
    outimage = np.copy(inimage)
    print '========================='
    print '    GAMMA MAP FILTER'
    print '========================='
    print time.asctime()
    print 'infile:  %s'%infile
    print 'number of looks: %i'%m   
    print 'number of iterations: %i'%niter         
    start = time.time() 
    itr = 0
    while itr < niter:
        print 'iteration %i'%(itr+1) 
        if bands == 9:
            for k in range(3):
                outimage[k] = gamma_filter(k,inimage,outimage,rows,cols,m)
        elif bands == 4:
            for k in range(2):
                outimage[k] = gamma_filter(k,inimage,outimage,rows,cols,m)   
        else:
            outimage = gamma_filter(0,inimage,outimage,rows,cols,m)                  
        itr += 1   
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0*gt[1]
        gt[3] = gt[3] + y0*gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    projection = inDataset.GetProjection()        
    if projection is not None:
        outDataset.SetProjection(projection) 
    if bands == 9:
        for k in range(3):    
            outBand = outDataset.GetRasterBand(k+1)
            outBand.WriteArray(outimage[k],0,0) 
            outBand.FlushCache() 
    elif bands == 4:
        for k in range(2):    
            outBand = outDataset.GetRasterBand(k+1)
            outBand.WriteArray(outimage[k],0,0) 
            outBand.FlushCache() 
    else:
        outBand = outDataset.GetRasterBand(1)
        outBand.WriteArray(outimage,0,0) 
        outBand.FlushCache()                     
    outDataset = None
    print 'result written to: '+outfile 
    print 'elapsed time: '+str(time.time()-start)                 
Example #24
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
#    path = 'd:\\imagery\\CRC\\Chapters6-7'
    if path:
        os.chdir(path)   
    infile = auxil.select_infile(title='Select a class probability image') 
    if infile:                   
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        K = inDataset.RasterCount
    else:
        return
    outfile, fmt = auxil.select_outfilefmt()  
    if not outfile:
        return   
    print '========================='
    print '       PLR_reclass'
    print '========================='
    print 'infile:  %s'%infile
    start = time.time() 
    prob_image = np.zeros((K,rows,cols))
    for k in range (K):
        band = inDataset.GetRasterBand(k+1)
        prob_image[k,:,:] = band.ReadAsArray(0,0,cols,rows).astype(float)                                   
    class_image = np.zeros((rows,cols),dtype=np.byte)  
    print 'reclassifying...'
    for i in range(rows):
        if i % 50 == 0:
            print '%i rows processed'%i
        for j in range(cols):
            cls = np.where(prob_image[:,i,j]==np.amax(prob_image[:,i,j]))[0][0]
            if isinstance(cls,int):
                class_image[i,j] = cls+1               
#  write to disk
    driver = gdal.GetDriverByName(fmt)    
    outDataset = driver.Create(outfile,cols,rows,1,GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    if projection is not None:
        outDataset.SetProjection(projection)               
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(class_image,0,0) 
    outBand.FlushCache() 
    outDataset = None
    inDataset = None
    if (fmt == 'ENVI') and (K<19):
#          try to make an ENVI classification header file 
        classnames = '{unclassified '   
        for i in range(K):
            classnames += ', '+str(i+1)
        classnames += '}'       
        hdr = header.Header() 
        headerfile = outfile+'.hdr'
        f = open(headerfile)
        line = f.readline()
        envihdr = ''
        while line:
            envihdr += line
            line = f.readline()
        f.close()         
        hdr.read(envihdr)
        hdr['file type'] ='ENVI Classification'
        hdr['classes'] = str(K+1)
        classlookup = '{0'
        for i in range(1,3*(K+1)):
            classlookup += ', '+str(str(auxil.ctable[i]))
        classlookup +='}'    
        hdr['class lookup'] = classlookup
        hdr['class names'] = classnames
        f = open(headerfile,'w')
        f.write(str(hdr))
        f.close()       
    print 'result written to: '+outfile    
    print 'elapsed time: '+str(time.time()-start)                        
    print '--done------------------------'  
Example #25
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
#  reference image
    file1 = auxil.select_infile(title='Choose reference image')
    if file1:
        inDataset1 = gdal.Open(file1, GA_ReadOnly)
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize
        bands = inDataset1.RasterCount
    else:
        return
    pos1 = auxil.select_pos(bands)
    if not pos1:
        return
    dims = auxil.select_dims([0, 0, cols, rows])
    if dims:
        x10, y10, cols1, rows1 = dims
    else:
        return
#  target image
    file2 = auxil.select_infile(title='Choose target image')
    if file2:
        inDataset2 = gdal.Open(file2, GA_ReadOnly)
        cols = inDataset2.RasterXSize
        rows = inDataset2.RasterYSize
        bands = inDataset2.RasterCount
    else:
        return
    pos2 = auxil.select_pos(bands)
    if not pos2:
        return
    dims = auxil.select_dims([0, 0, cols, rows])
    if dims:
        x20, y20, cols2, rows2 = dims
    else:
        return
#  match dimensions
    bands = len(pos2)
    if (rows1 != rows2) or (cols1 != cols2) or (len(pos1) != bands):
        sys.stderr.write("Size mismatch")
        sys.exit(1)
#  iMAD image
    file3 = auxil.select_infile(title='Choose iMAD image')
    if file3:
        inDataset3 = gdal.Open(file3, GA_ReadOnly)
        cols = inDataset3.RasterXSize
        rows = inDataset3.RasterYSize
        imadbands = inDataset3.RasterCount
    else:
        return
    dims = auxil.select_dims([0, 0, cols, rows])
    if dims:
        x30, y30, cols, rows = dims
    else:
        return
    if (rows1 != rows) or (cols1 != cols):
        sys.stderr.write("Size mismatch")
        sys.exit(1)
#  outfile
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return


#  full scene
    fsfile = auxil.select_infile(title='Choose full target scene if desired')
    #  no-change threshold
    ncpThresh = auxil.select_ncp(0.95)
    if ncpThresh is None:
        return
    chisqr = inDataset3.GetRasterBand(imadbands).ReadAsArray(
        x30, y30, cols, rows).ravel()
    ncp = 1 - stats.chi2.cdf(chisqr, [imadbands - 1])
    idx = np.where(ncp > ncpThresh)[0]
    #  split train/test in ratio 2:1
    tmp = np.asarray(range(len(idx)))
    tst = idx[np.where(np.mod(tmp, 3) == 0)]
    trn = idx[np.where(np.mod(tmp, 3) > 0)]

    print '========================================='
    print '             RADCAL'
    print '========================================='
    print time.asctime()
    print 'reference: ' + file1
    print 'target   : ' + file2
    print 'no-change probability threshold: ' + str(ncpThresh)
    print 'no-change pixels (train): ' + str(len(trn))
    print 'no-change pixels (test): ' + str(len(tst))
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols, rows, bands, GDT_Float32)
    projection = inDataset1.GetProjection()
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x10 * gt[1]
        gt[3] = gt[3] + y10 * gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)
    aa = []
    bb = []
    i = 1
    for k in pos1:
        x = inDataset1.GetRasterBand(k).ReadAsArray(
            x10, y10, cols, rows).astype(float).ravel()
        y = inDataset2.GetRasterBand(k).ReadAsArray(
            x20, y20, cols, rows).astype(float).ravel()
        b, a, R = auxil.orthoregress(y[trn], x[trn])
        print '--------------------'
        print 'spectral band:      ', k
        print 'slope:              ', b
        print 'intercept:          ', a
        print 'correlation:        ', R
        print 'means(tgt,ref,nrm): ', np.mean(y[tst]), np.mean(
            x[tst]), np.mean(a + b * y[tst])
        print 't-test, p-value:    ', stats.ttest_rel(x[tst], a + b * y[tst])
        print 'vars(tgt,ref,nrm)   ', np.var(y[tst]), np.var(
            x[tst]), np.var(a + b * y[tst])
        print 'F-test, p-value:    ', auxil.fv_test(x[tst], a + b * y[tst])
        aa.append(a)
        bb.append(b)
        outBand = outDataset.GetRasterBand(i)
        outBand.WriteArray(np.resize(a + b * y, (rows, cols)), 0, 0)
        outBand.FlushCache()
        if i <= 10:
            plt.figure(i)
            ymax = max(y[idx])
            xmax = max(x[idx])
            plt.plot(y[idx], x[idx], 'k.', [0, ymax], [a, a + b * ymax], 'k-')
            plt.axis([0, ymax, 0, xmax])
            plt.title('Band ' + str(k))
            plt.xlabel('Target')
            plt.ylabel('Reference')
        i += 1
    outDataset = None
    print 'result written to: ' + outfile
    if fsfile is not None:
        path = os.path.dirname(fsfile)
        basename = os.path.basename(fsfile)
        root, ext = os.path.splitext(basename)
        fsoutfile = path + '/' + root + '_norm' + ext
        print 'normalizing ' + fsfile + '...'
        fsDataset = gdal.Open(fsfile, GA_ReadOnly)
        cols = fsDataset.RasterXSize
        rows = fsDataset.RasterYSize
        driver = fsDataset.GetDriver()
        outDataset = driver.Create(fsoutfile, cols, rows, bands, GDT_Float32)
        projection = fsDataset.GetProjection()
        geotransform = fsDataset.GetGeoTransform()
        if geotransform is not None:
            outDataset.SetGeoTransform(geotransform)
        if projection is not None:
            outDataset.SetProjection(projection)
        j = 0
        for k in pos2:
            inBand = fsDataset.GetRasterBand(k)
            outBand = outDataset.GetRasterBand(j + 1)
            for i in range(rows):
                y = inBand.ReadAsArray(0, i, cols, 1)
                outBand.WriteArray(aa[j] + bb[j] * y, 0, i)
            outBand.FlushCache()
            j += 1
        outDataset = None
        print 'result written to: ' + fsoutfile
    plt.show()
    print '-------done-----------------------------'
Example #26
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Working directory')
    if path:
        os.chdir(path)
    file1 = auxil.select_infile(title='Base image')
    if file1:
        inDataset1 = gdal.Open(file1, GA_ReadOnly)
        cols1 = inDataset1.RasterXSize
        rows1 = inDataset1.RasterYSize
        print 'Base image: %s' % file1
    else:
        return
    file2 = auxil.select_infile(title='Warp image')
    if file2:
        inDataset2 = gdal.Open(file2, GA_ReadOnly)
        cols2 = inDataset2.RasterXSize
        rows2 = inDataset2.RasterYSize
        bands2 = inDataset2.RasterCount
        print 'Warp image: %s' % file2
    else:
        return
    file3 = auxil.select_infile(title='GCP file',\
                                  filt='pts')
    if file3:
        pts1, pts2 = parse_gcp(file3)
    else:
        return
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
    image2 = zeros((bands2, rows2, cols2))
    for k in range(bands2):
        band = inDataset2.GetRasterBand(k + 1)
        image2[k, :, :] = band.ReadAsArray(0, 0, cols2, rows2)
    inDataset2 = None
    n = len(pts1)
    y = pts1.ravel()
    A = zeros((2 * n, 4))
    for i in range(n):
        A[2 * i, :] = [pts2[i, 0], -pts2[i, 1], 1, 0]
        A[2 * i + 1, :] = [pts2[i, 1], pts2[i, 0], 0, 1]
    a, b, x0, y0 = linalg.lstsq(A, y)[0]
    R = array([[a, -b], [b, a]])
    warped = zeros((bands2, rows1, cols1), dtype=uint8)
    for k in range(bands2):
        tmp = ndimage.affine_transform(image2[k, :, :], R)
        warped[k, :, :] = tmp[-y0:-y0 + rows1, -x0:-x0 + cols1]
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols1, rows1, bands2, GDT_Byte)
    geotransform = inDataset1.GetGeoTransform()
    projection = inDataset1.GetProjection()
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    if projection is not None:
        outDataset.SetProjection(projection)
    for k in range(bands2):
        outBand = outDataset.GetRasterBand(k + 1)
        outBand.WriteArray(warped[k, :, :], 0, 0)
        outBand.FlushCache()
    outDataset = None
    inDataset1 = None
    print 'Warped image written to: %s' % outfile
Example #27
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
    infile = auxil.select_infile(title='Select an image')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        bands = inDataset.RasterCount
    else:

        return
    pos = auxil.select_pos(bands)
    if not pos:
        return
    bands = len(pos)
    dims = auxil.select_dims([0, 0, cols, rows])
    if dims:
        x0, y0, cols, rows = dims
    else:
        return
    class_image = np.zeros((rows, cols), dtype=np.byte)
    K = auxil.select_integer(6, 'Number of clusters')
    max_scale = auxil.select_integer(2, 'Maximum scaling factor')
    max_scale = min((max_scale, 3))
    min_scale = auxil.select_integer(0, 'Minimum scaling factor')
    min_scale = min((max_scale, min_scale))
    T0 = auxil.select_float(0.5, 'Initial annealing temperature')
    beta = auxil.select_float(0.5, 'Spatial mixing parameter')
    outfile, outfmt = auxil.select_outfilefmt(
        'Select output classification file')
    if not outfile:
        return
    probfile, probfmt = auxil.select_outfilefmt(
        'Select output probability file (optional)')
    print '========================='
    print '     EM clustering'
    print '========================='
    print 'infile:   %s' % infile
    print 'clusters: %i' % K
    print 'T0:       %f' % T0
    print 'beta:     %f' % beta

    start = time.time()
    #  read in image and compress
    DWTbands = []
    for b in pos:
        band = inDataset.GetRasterBand(b)
        DWTband = auxil.DWTArray(
            band.ReadAsArray(x0, y0, cols, rows).astype(float), cols, rows)
        for i in range(max_scale):
            DWTband.filter()
        DWTbands.append(DWTband)
    rows, cols = DWTbands[0].get_quadrant(0).shape
    G = np.transpose(
        np.array([
            DWTbands[i].get_quadrant(0, float=True).ravel()
            for i in range(bands)
        ]))
    #  initialize membership matrix
    n = G.shape[0]
    U = np.random.random((K, n))
    den = np.sum(U, axis=0)
    for j in range(K):
        U[j, :] = U[j, :] / den
#  cluster at minimum scale
    try:
        U, Ms, Cs, Ps, pdens = em(G, U, T0, beta, rows, cols)
    except:
        print 'em failed'
        return
#  sort clusters wrt partition density
    idx = np.argsort(pdens)
    idx = idx[::-1]
    U = U[idx, :]
    #  clustering at increasing scales
    for i in range(max_scale - min_scale):
        #      expand U and renormalize
        U = np.reshape(U, (K, rows, cols))
        rows = rows * 2
        cols = cols * 2
        U = ndi.zoom(U, (1, 2, 2))
        U = np.reshape(U, (K, rows * cols))
        idx = np.where(U < 0.0)
        U[idx] = 0.0
        den = np.sum(U, axis=0)
        for j in range(K):
            U[j, :] = U[j, :] / den
#      expand the image
        for i in range(bands):
            DWTbands[i].invert()
        G = np.transpose(
            np.array([
                DWTbands[i].get_quadrant(0, float=True).ravel()
                for i in range(bands)
            ]))
        #      cluster
        unfrozen = np.where(np.max(U, axis=0) < 0.90)
        try:
            U, Ms, Cs, Ps, pdens = em(G,
                                      U,
                                      0.0,
                                      beta,
                                      rows,
                                      cols,
                                      unfrozen=unfrozen)
        except:
            print 'em failed'
            return
    print 'Cluster mean vectors'
    print Ms
    print 'Cluster covariance matrices'
    for k in range(K):
        print 'cluster: %i' % k
        print Cs[k]
#  up-sample class memberships if necessary
    if min_scale > 0:
        U = np.reshape(U, (K, rows, cols))
        f = 2**min_scale
        rows = rows * f
        cols = cols * f
        U = ndi.zoom(U, (1, f, f))
        U = np.reshape(U, (K, rows * cols))
        idx = np.where(U < 0.0)
        U[idx] = 0.0
        den = np.sum(U, axis=0)
        for j in range(K):
            U[j, :] = U[j, :] / den


#  classify
    labels = np.byte(np.argmax(U, axis=0) + 1)
    class_image[0:rows, 0:cols] = np.reshape(labels, (rows, cols))
    rows1, cols1 = class_image.shape
    #  write to disk
    driver = gdal.GetDriverByName(outfmt)
    outDataset = driver.Create(outfile, cols1, rows1, 1, GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0 * gt[1]
        gt[3] = gt[3] + y0 * gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(class_image, 0, 0)
    outBand.FlushCache()
    outDataset = None
    #  write class membership probability file if desired
    if probfile:
        driver = gdal.GetDriverByName(probfmt)
        outDataset = driver.Create(probfile, cols, rows, K, GDT_Byte)
        if geotransform is not None:
            outDataset.SetGeoTransform(tuple(gt))
        if projection is not None:
            outDataset.SetProjection(projection)
        for k in range(K):
            probs = np.reshape(U[k, :], (rows, cols))
            probs = np.byte(probs * 255)
            outBand = outDataset.GetRasterBand(k + 1)
            outBand.WriteArray(probs, 0, 0)
            outBand.FlushCache()
        outDataset = None
        print 'class probabilities written to: %s' % probfile
    inDataset = None
    if (outfmt == 'ENVI') and (K < 19):
        #  try to make an ENVI classification header file
        hdr = header.Header()
        headerfile = outfile + '.hdr'
        f = open(headerfile)
        line = f.readline()
        envihdr = ''
        while line:
            envihdr += line
            line = f.readline()
        f.close()
        hdr.read(envihdr)
        hdr['file type'] = 'ENVI Classification'
        hdr['classes'] = str(K + 1)
        classlookup = '{0'
        for i in range(1, 3 * (K + 1)):
            classlookup += ', ' + str(str(ctable[i]))
        classlookup += '}'
        hdr['class lookup'] = classlookup
        hdr['class names'] = ['class %i' % i for i in range(K + 1)]
        f = open(headerfile, 'w')
        f.write(str(hdr))
        f.close()
    print 'classification written to: ' + outfile
    print 'elapsed time: ' + str(time.time() - start)
    print '--done------------------------'
Example #28
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)        
#  SAR image    
    infile = auxil.select_infile(title='Choose SAR image') 
    if infile:                   
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        return
#  spatial subset    
    x0,y0,rows,cols=auxil.select_dims([0,0,rows,cols])    
#  number of looks
    m = auxil.select_integer(5,msg='Number of looks')
    if not m:
        return
#  output file
    outfile,fmt = auxil.select_outfilefmt() 
    if not outfile:
        return       
#  get filter weights from span image
    b = np.ones((rows,cols))
    band = inDataset.GetRasterBand(1)
    span = band.ReadAsArray(x0,y0,cols,rows).ravel()
    if bands==9:      
        band = inDataset.GetRasterBand(6)
        span += band.ReadAsArray(x0,y0,cols,rows).ravel()
        band = inDataset.GetRasterBand(9)
        span += band.ReadAsArray(x0,y0,cols,rows).ravel()
    elif bands==4:
        band = inDataset.GetRasterBand(4)
        span += band.ReadAsArray(x0,y0,cols,rows).ravel()    
    edge_idx = np.zeros((rows,cols),dtype=int)
    print '========================='
    print '       MMSE_FILTER'
    print '========================='
    print time.asctime()
    print 'infile:  %s'%infile
    print 'number of looks: %i'%m     
    print 'Determining filter weights from span image'    
    start = time.time()
    print 'row: ',
    sys.stdout.flush()     
    for j in range(3,rows-3):
        if j%50 == 0:
            print '%i '%j, 
            sys.stdout.flush()
        windex = get_windex(j,cols)
        for i in range(3,cols-3):            
            wind = np.reshape(span[windex],(7,7))         
#          3x3 compression
            w = congrid.congrid(wind,(3,3),method='spline',centre=True)
#          get appropriate edge mask
            es = [np.sum(edges[p]*w) for p in range(4)]
            idx = np.argmax(es)  
            if idx == 0:
                if np.abs(w[1,1]-w[1,0]) < np.abs(w[1,1]-w[1,2]):
                    edge_idx[j,i] = 0
                else:
                    edge_idx[j,i] = 4
            elif idx == 1:
                if np.abs(w[1,1]-w[2,0]) < np.abs(w[1,1]-w[0,2]):
                    edge_idx[j,i] = 1
                else:
                    edge_idx[j,i] = 5                
            elif idx == 2:
                if np.abs(w[1,1]-w[0,1]) < np.abs(w[1,1]-w[2,1]):
                    edge_idx[j,i] = 6
                else:
                    edge_idx[j,i] = 2  
            elif idx == 3:
                if np.abs(w[1,1]-w[0,0]) < np.abs(w[1,1]-w[2,2]):
                    edge_idx[j,i] = 7
                else:
                    edge_idx[j,i] = 3 
            edge = templates[edge_idx[j,i]]  
            wind = wind.ravel()[edge]
            gbar = np.mean(wind)
            varg = np.var(wind)
            if varg > 0:
                b[j,i] = np.max( ((1.0 - gbar**2/(varg*m))/(1.0+1.0/m), 0.0) )        
            windex += 1
    print ' done'        
#  filter the image
    outim = np.zeros((rows,cols),dtype=np.float32)
    driver = gdal.GetDriverByName(fmt)    
    outDataset = driver.Create(outfile,cols,rows,bands,GDT_Float32)
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0*gt[1]
        gt[3] = gt[3] + y0*gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    projection = inDataset.GetProjection()        
    if projection is not None:
        outDataset.SetProjection(projection) 
    print 'Filtering covariance matrix elememnts'  
    for k in range(1,bands+1):
        print 'band: %i'%(k)
        band = inDataset.GetRasterBand(k)
        band = band.ReadAsArray(0,0,cols,rows)
        gbar = band*0.0
#      get window means
        for j in range(3,rows-3):        
            windex = get_windex(j,cols)
            for i in range(3,cols-3):
                wind = band.ravel()[windex]
                edge = templates[edge_idx[j,i]]
                wind = wind[edge]
                gbar[j,i] = np.mean(wind)
                windex += 1
#      apply adaptive filter and write to disk
        outim = np.reshape(gbar + b*(band-gbar),(rows,cols))   
        outBand = outDataset.GetRasterBand(k)
        outBand.WriteArray(outim,0,0) 
        outBand.FlushCache() 
    outDataset = None
    print 'result written to: '+outfile 
    print 'elapsed time: '+str(time.time()-start)                 
Example #29
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)        
#  MS image    
    file1 = auxil.select_infile(title='Choose MS image') 
    if file1:                   
        inDataset1 = gdal.Open(file1,GA_ReadOnly)     
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize    
        bands = inDataset1.RasterCount
    else:
        return
    pos1 =  auxil.select_pos(bands) 
    if not pos1:
        return   
    num_bands = len(pos1)
    dims = auxil.select_dims([0,0,cols,rows])
    if dims:
        x10,y10,cols1,rows1 = dims
    else:
        return 
#  PAN image     
    file2 = auxil.select_infile(title='Choose PAN image') 
    if file2:                  
        inDataset2 = gdal.Open(file2,GA_ReadOnly)     
        cols = inDataset2.RasterXSize
        rows = inDataset2.RasterYSize    
        bands = inDataset2.RasterCount
    else:
        return   
    if bands>1:
        print 'Must be a single band (panchromatic) image'
        return 
    dims=auxil.select_dims([0,0,cols,rows])  
    if dims:
        x20,y20,cols2,rows2 = dims
    else:
        return 
#  outfile
    outfile, fmt = auxil.select_outfilefmt()  
    if not outfile:
        return 
#  resolution ratio      
    ratio = auxil.select_integer(4, 'Resolution ratio (2 or 4)') 
    if not ratio:
        return        
#  MS registration band    
    k1 = auxil.select_integer(1, 'MS band for registration') 
    if not k1:
        return       
    print '========================='
    print '   ATWT Pansharpening'
    print '========================='
    print time.asctime()     
    print 'MS  file: '+file1
    print 'PAN file: '+file2       
#  image arrays
    band = inDataset1.GetRasterBand(1)
    tmp = band.ReadAsArray(0,0,1,1)
    dt = tmp.dtype
    MS = np.asarray(np.zeros((num_bands,rows1,cols1)),dtype = dt)
#  result will be float32    
    sharpened = np.zeros((num_bands,rows2,cols2),dtype=np.float32) 
    k = 0                                   
    for b in pos1:
        band = inDataset1.GetRasterBand(b)
        MS[k,:,:] = band.ReadAsArray(x10,y10,cols1,rows1)
        k += 1
    band = inDataset2.GetRasterBand(1)
    PAN = band.ReadAsArray(x20,y20,cols2,rows2) 
#  if integer assume 11bit quantization, otherwise must be byte    
    if PAN.dtype == np.int16:
        PAN = auxil.byteStretch(PAN,(0,2**11))
    if MS.dtype == np.int16:
        MS = auxil.byteStretch(MS,(0,2**11))                
#  compress PAN to resolution of MS image using DWT  
    panDWT = auxil.DWTArray(PAN,cols2,rows2)          
    r = ratio
    while r > 1:
        panDWT.filter()
        r /= 2
    bn0 = panDWT.get_quadrant(0)   
#  register (and subset) MS image to compressed PAN image using MSband  
    lines0,samples0 = bn0.shape    
    bn1 = MS[k1,:,:]  
#  register (and subset) MS image to compressed PAN image 
    (scale,angle,shift) = auxil.similarity(bn0,bn1)
    tmp = np.zeros((num_bands,lines0,samples0))
    for k in range(num_bands): 
        bn1 = MS[k,:,:]                    
        bn2 = ndii.zoom(bn1, 1.0/scale)
        bn2 = ndii.rotate(bn2, angle)
        bn2 = ndii.shift(bn2, shift)
        tmp[k,:,:] = bn2[0:lines0,0:samples0]        
    MS = tmp          
    smpl = np.random.randint(cols2*rows2,size=100000)
    print 'Wavelet correlations:'    
#  loop over MS bands
    for k in range(num_bands):
        msATWT = auxil.ATWTArray(PAN)
        r = ratio
        while r > 1:
            msATWT.filter()
            r /= 2 
#      sample PAN wavelet details
        X = msATWT.get_band(msATWT.num_iter)
        X = X.ravel()[smpl]
#      resize the ms band to scale of the pan image
        ms_band = ndii.zoom(MS[k,:,:],ratio)
#      sample details of MS band
        tmpATWT = auxil.ATWTArray(ms_band)
        r = ratio
        while r > 1:
            tmpATWT.filter()
            r /= 2                 
        Y = tmpATWT.get_band(msATWT.num_iter)
        Y = Y.ravel()[smpl]  
#      get band for injection
        bnd = tmpATWT.get_band(0) 
        tmpATWT = None 
        aa,bb,R = auxil.orthoregress(X,Y)
        print 'Band '+str(k+1)+': %8.3f'%R
#      inject the filtered MS band
        msATWT.inject(bnd)    
#      normalize wavelet components and expand
        msATWT.normalize(aa,bb)                    
        r = ratio
        while r > 1:
            msATWT.invert()
            r /= 2 
        sharpened[k,:,:] = msATWT.get_band(0)                                  
#  write to disk       
    if outfile:
        driver = gdal.GetDriverByName(fmt)   
        outDataset = driver.Create(outfile,
                        cols2,rows2,num_bands,GDT_Float32)
        projection1 = inDataset1.GetProjection()
        geotransform1 = inDataset1.GetGeoTransform()
        geotransform2 = inDataset2.GetGeoTransform()
        if geotransform2 is not None:
            gt2 = list(geotransform2)
            if geotransform1 is not None:
                gt1 = list(geotransform1)
                gt1[0] += x10*gt2[1]  # using PAN pixel sizes
                gt1[3] += y10*gt2[5]
                gt1[1] = gt2[1]
                gt1[2] = gt2[2]
                gt1[4] = gt2[4]
                gt1[5] = gt2[5]
                outDataset.SetGeoTransform(tuple(gt1))
        if projection1 is not None:
            outDataset.SetProjection(projection1)        
        for k in range(num_bands):        
            outBand = outDataset.GetRasterBand(k+1)
            outBand.WriteArray(sharpened[k,:,:],0,0) 
            outBand.FlushCache() 
        outDataset = None    
    print 'Result written to %s'%outfile    
    inDataset1 = None
    inDataset2 = None                      
Example #30
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
#  SAR image
    infile = auxil.select_infile(title='Choose SAR image')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        bands = inDataset.RasterCount
    else:
        return
#  spatial subset
    x0, y0, rows, cols = auxil.select_dims([0, 0, rows, cols])
    #  number of looks
    m = auxil.select_integer(5, msg='Number of looks')
    if not m:
        return
#  output file
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
#  get filter weights from span image
    b = np.ones((rows, cols))
    band = inDataset.GetRasterBand(1)
    span = band.ReadAsArray(x0, y0, cols, rows).ravel()
    if bands == 9:
        band = inDataset.GetRasterBand(6)
        span += band.ReadAsArray(x0, y0, cols, rows).ravel()
        band = inDataset.GetRasterBand(9)
        span += band.ReadAsArray(x0, y0, cols, rows).ravel()
    elif bands == 4:
        band = inDataset.GetRasterBand(4)
        span += band.ReadAsArray(x0, y0, cols, rows).ravel()
    edge_idx = np.zeros((rows, cols), dtype=int)
    print '========================='
    print '       MMSE_FILTER'
    print '========================='
    print time.asctime()
    print 'infile:  %s' % infile
    print 'number of looks: %i' % m
    print 'Determining filter weights from span image'
    start = time.time()
    print 'row: ',
    sys.stdout.flush()
    for j in range(3, rows - 3):
        if j % 50 == 0:
            print '%i ' % j,
            sys.stdout.flush()
        windex = get_windex(j, cols)
        for i in range(3, cols - 3):
            wind = np.reshape(span[windex], (7, 7))
            #          3x3 compression
            w = congrid.congrid(wind, (3, 3), method='spline', centre=True)
            #          get appropriate edge mask
            es = [np.sum(edges[p] * w) for p in range(4)]
            idx = np.argmax(es)
            if idx == 0:
                if np.abs(w[1, 1] - w[1, 0]) < np.abs(w[1, 1] - w[1, 2]):
                    edge_idx[j, i] = 0
                else:
                    edge_idx[j, i] = 4
            elif idx == 1:
                if np.abs(w[1, 1] - w[2, 0]) < np.abs(w[1, 1] - w[0, 2]):
                    edge_idx[j, i] = 1
                else:
                    edge_idx[j, i] = 5
            elif idx == 2:
                if np.abs(w[1, 1] - w[0, 1]) < np.abs(w[1, 1] - w[2, 1]):
                    edge_idx[j, i] = 6
                else:
                    edge_idx[j, i] = 2
            elif idx == 3:
                if np.abs(w[1, 1] - w[0, 0]) < np.abs(w[1, 1] - w[2, 2]):
                    edge_idx[j, i] = 7
                else:
                    edge_idx[j, i] = 3
            edge = templates[edge_idx[j, i]]
            wind = wind.ravel()[edge]
            gbar = np.mean(wind)
            varg = np.var(wind)
            if varg > 0:
                b[j, i] = np.max(
                    ((1.0 - gbar**2 / (varg * m)) / (1.0 + 1.0 / m), 0.0))
            windex += 1
    print ' done'
    #  filter the image
    outim = np.zeros((rows, cols), dtype=np.float32)
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols, rows, bands, GDT_Float32)
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0 * gt[1]
        gt[3] = gt[3] + y0 * gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    projection = inDataset.GetProjection()
    if projection is not None:
        outDataset.SetProjection(projection)
    print 'Filtering covariance matrix elememnts'
    for k in range(1, bands + 1):
        print 'band: %i' % (k)
        band = inDataset.GetRasterBand(k)
        band = band.ReadAsArray(0, 0, cols, rows)
        gbar = band * 0.0
        #      get window means
        for j in range(3, rows - 3):
            windex = get_windex(j, cols)
            for i in range(3, cols - 3):
                wind = band.ravel()[windex]
                edge = templates[edge_idx[j, i]]
                wind = wind[edge]
                gbar[j, i] = np.mean(wind)
                windex += 1


#      apply adaptive filter and write to disk
        outim = np.reshape(gbar + b * (band - gbar), (rows, cols))
        outBand = outDataset.GetRasterBand(k)
        outBand.WriteArray(outim, 0, 0)
        outBand.FlushCache()
    outDataset = None
    print 'result written to: ' + outfile
    print 'elapsed time: ' + str(time.time() - start)
Example #31
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path) 
    infile = auxil.select_infile(title='Select an image') 
    if infile:                   
        inDataset = gdal.Open(infile,GA_ReadOnly)     
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
    else:
        return
    pos =  auxil.select_pos(bands) 
    if not pos:
        return   
    dims = auxil.select_dims([0,0,cols,rows])
    if dims:
        x0,y0,cols,rows = dims
    else:
        return   
    m = auxil.select_integer(1000,'Select training sample size')
    K = auxil.select_integer(6,'Select number of clusters')
    outfile, outfmt = auxil.select_outfilefmt()  
    if not outfile:
        return  
    kernel = auxil.select_integer(1,'Select kernel: 0=linear, 1=Gaussian')    
    print '========================='
    print '       kkmeans'
    print '========================='
    print 'infile:  '+infile
    print 'samples: '+str(m) 
    if kernel == 0:
        print 'kernel:  '+'linear' 
    else:
        print 'kernel:  '+'Gaussian'  
    start = time.time()                                     
#  input data matrix           
    XX = np.zeros((cols*rows,bands))      
    k = 0
    for b in pos:
        band = inDataset.GetRasterBand(b)
        band = band.ReadAsArray(x0,y0,cols,rows).astype(float)
        XX[:,k] = np.ravel(band)
        k += 1
#  training data matrix
    idx = np.fix(np.random.random(m)*(cols*rows)).astype(np.integer)
    X = XX[idx,:]  
    print 'kernel matrix...'
# uncentered kernel matrix    
    KK, gma = auxil.kernelMatrix(X,kernel=kernel)      
    if gma is not None:
        print 'gamma: '+str(round(gma,6))    
#  initial (random) class labels
    labels = np.random.randint(K,size = m)  
#  iteration
    change = True
    itr = 0
    onesm = np.mat(np.ones(m,dtype=float))
    while change and (itr < 100):
        change = False
        U = np.zeros((K,m))
        for i in range(m):
            U[labels[i],i] = 1
        M =  np.diag(1.0/(np.sum(U,axis=1)+1.0))
        MU = np.mat(np.dot(M,U))
        Z = (onesm.T)*np.diag(MU*KK*(MU.T)) - 2*KK*(MU.T)
        Z = np.array(Z) 
        labels1 = (np.argmin(Z,axis=1) % K).ravel()
        if np.sum(labels1 != labels):
            change = True
        labels = labels1   
        itr += 1
    print 'iterations: %i'%itr 
#  classify image
    print 'classifying...'
    i = 0
    A = np.diag(MU*KK*(MU.T))
    A = np.tile(A,(cols,1))
    class_image = np.zeros((rows,cols),dtype=np.byte)
    while i < rows:     
        XXi = XX[i*cols:(i+1)*cols,:]
        KKK,_ = auxil.kernelMatrix(X,XXi,gma=gma,kernel=kernel)
        Z = A - 2*(KKK.T)*(MU.T)
        Z= np.array(Z)
        labels = np.argmin(Z,axis=1).ravel()
        class_image[i,:] = (labels % K) +1
        i += 1   
    sys.stdout.write("\n")    
#  write to disk
    driver = gdal.GetDriverByName(outfmt)    
    outDataset = driver.Create(outfile,cols,rows,1,GDT_Byte)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0*gt[1]
        gt[3] = gt[3] + y0*gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)               
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(class_image,0,0) 
    outBand.FlushCache() 
    outDataset = None
    inDataset = None
    if (outfmt == 'ENVI') and (K<19):
#  try to make an ENVI classification header file            
        hdr = header.Header() 
        headerfile = outfile+'.hdr'
        f = open(headerfile)
        line = f.readline()
        envihdr = ''
        while line:
            envihdr += line
            line = f.readline()
        f.close()         
        hdr.read(envihdr)
        hdr['file type'] ='ENVI Classification'
        hdr['classes'] = str(K)
        classlookup = '{0'
        for i in range(1,3*K):
            classlookup += ', '+str(str(ctable[i]))
        classlookup +='}'    
        hdr['class lookup'] = classlookup
        hdr['class names'] = [str(i+1) for i in range(K)]
        f = open(headerfile,'w')
        f.write(str(hdr))
        f.close()                 
    print 'result written to: '+outfile    
    print 'elapsed time: '+str(time.time()-start)                        
    print '--done------------------------'  
Example #32
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
#  MS image
    file1 = auxil.select_infile(title='Choose MS image')
    if file1:
        inDataset1 = gdal.Open(file1, GA_ReadOnly)
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize
        bands = inDataset1.RasterCount
    else:
        return
    pos1 = auxil.select_pos(bands)
    if not pos1:
        return
    num_bands = len(pos1)
    dims = auxil.select_dims([0, 0, cols, rows])
    if dims:
        x10, y10, cols1, rows1 = dims
    else:
        return
#  PAN image
    file2 = auxil.select_infile(title='Choose PAN image')
    if file2:
        inDataset2 = gdal.Open(file2, GA_ReadOnly)
        bands = inDataset2.RasterCount
    else:
        return
    if bands > 1:
        print 'Must be a single band (panchromatic) image'
        return
    geotransform1 = inDataset1.GetGeoTransform()
    geotransform2 = inDataset2.GetGeoTransform()
    #  outfile
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
#  resolution ratio
    ratio = auxil.select_integer(4, 'Resolution ratio (2 or 4)')
    if not ratio:
        return
#  MS registration band
    k1 = auxil.select_integer(1, 'MS band for registration')
    if not k1:
        return
#  fine adjust
    roll = auxil.select_integer(0, 'Fine adjust (-2 ... 2)')
    if roll is None:
        return
    print '========================='
    print '   DWT Pansharpening'
    print '========================='
    print time.asctime()
    print 'MS  file: ' + file1
    print 'PAN file: ' + file2
    #  image arrays
    band = inDataset1.GetRasterBand(1)
    tmp = band.ReadAsArray(0, 0, 1, 1)
    dt = tmp.dtype
    MS = np.asarray(np.zeros((num_bands, rows1, cols1)), dtype=dt)
    k = 0
    for b in pos1:
        band = inDataset1.GetRasterBand(b)
        MS[k, :, :] = band.ReadAsArray(x10, y10, cols1, rows1)
        k += 1
#  if integer assume 11bit quantization otherwise must be byte
    if MS.dtype == np.int16:
        fact = 8.0
        MS = auxil.byteStretch(MS, (0, 2**11))
    else:
        fact = 1.0
#  read in corresponding spatial subset of PAN image
    if (geotransform1 is None) or (geotransform2 is None):
        print 'Image not georeferenced, aborting'
        return
#  upper left corner pixel in PAN
    gt1 = list(geotransform1)
    gt2 = list(geotransform2)
    ulx1 = gt1[0] + x10 * gt1[1]
    uly1 = gt1[3] + y10 * gt1[5]
    x20 = int(round(((ulx1 - gt2[0]) / gt2[1])))
    y20 = int(round(((uly1 - gt2[3]) / gt2[5])))
    cols2 = cols1 * ratio
    rows2 = rows1 * ratio
    band = inDataset2.GetRasterBand(1)
    PAN = band.ReadAsArray(x20, y20, cols2, rows2)
    #  if integer assume 11-bit quantization, otherwise must be byte
    if PAN.dtype == np.int16:
        PAN = auxil.byteStretch(PAN, (0, 2**11))
#  compress PAN to resolution of MS image
    panDWT = auxil.DWTArray(PAN, cols2, rows2)
    r = ratio
    while r > 1:
        panDWT.filter()
        r /= 2
    bn0 = panDWT.get_quadrant(0)
    lines0, samples0 = bn0.shape
    bn1 = MS[k1 - 1, :, :]
    #  register (and subset) MS image to compressed PAN image
    (scale, angle, shift) = auxil.similarity(bn0, bn1)
    tmp = np.zeros((num_bands, lines0, samples0))
    for k in range(num_bands):
        bn1 = MS[k, :, :]
        bn2 = ndii.zoom(bn1, 1.0 / scale)
        bn2 = ndii.rotate(bn2, angle)
        bn2 = ndii.shift(bn2, shift)
        tmp[k, :, :] = bn2[0:lines0, 0:samples0]
    MS = tmp
    if roll != 0:
        #  fine adjust
        PAN = np.roll(PAN, roll, axis=0)
        PAN = np.roll(PAN, roll, axis=1)
        panDWT = auxil.DWTArray(PAN, cols2, rows2)
        r = ratio
        while r > 1:
            panDWT.filter()
            r /= 2


#  compress pan once more, extract wavelet quadrants, and restore
    panDWT.filter()
    fgpan = panDWT.get_quadrant(1)
    gfpan = panDWT.get_quadrant(2)
    ggpan = panDWT.get_quadrant(3)
    panDWT.invert()
    #  output array
    sharpened = np.zeros((num_bands, rows2, cols2), dtype=np.float32)
    aa = np.zeros(3)
    bb = np.zeros(3)
    print 'Wavelet correlations:'
    for i in range(num_bands):
        #      make copy of panDWT and inject ith ms band
        msDWT = copy.deepcopy(panDWT)
        msDWT.put_quadrant(MS[i, :, :], 0)
        #      compress once more
        msDWT.filter()
        #      determine wavelet normalization coefficents
        ms = msDWT.get_quadrant(1)
        aa[0], bb[0], R = auxil.orthoregress(fgpan.ravel(), ms.ravel())
        Rs = 'Band ' + str(i + 1) + ': %8.3f' % R
        ms = msDWT.get_quadrant(2)
        aa[1], bb[1], R = auxil.orthoregress(gfpan.ravel(), ms.ravel())
        Rs += '%8.3f' % R
        ms = msDWT.get_quadrant(3)
        aa[2], bb[2], R = auxil.orthoregress(ggpan.ravel(), ms.ravel())
        Rs += '%8.3f' % R
        print Rs
        #      restore once and normalize wavelet coefficients
        msDWT.invert()
        msDWT.normalize(aa, bb)
        #      restore completely and collect result
        r = 1
        while r < ratio:
            msDWT.invert()
            r *= 2
        sharpened[i, :, :] = msDWT.get_quadrant(0)
    sharpened *= fact
    #  write to disk
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols2, rows2, num_bands, GDT_Float32)
    projection1 = inDataset1.GetProjection()
    if projection1 is not None:
        outDataset.SetProjection(projection1)
    gt1 = list(geotransform1)
    gt1[0] += x10 * ratio
    gt1[3] -= y10 * ratio
    gt1[1] = gt2[1]
    gt1[2] = gt2[2]
    gt1[4] = gt2[4]
    gt1[5] = gt2[5]
    outDataset.SetGeoTransform(tuple(gt1))
    for k in range(num_bands):
        outBand = outDataset.GetRasterBand(k + 1)
        outBand.WriteArray(sharpened[k, :, :], 0, 0)
        outBand.FlushCache()
    outDataset = None
    print 'Result written to %s' % outfile
    inDataset1 = None
    inDataset2 = None
Example #33
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
#  SAR image
    infile = auxil.select_infile(title='Choose SAR image')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        bands = inDataset.RasterCount
    else:
        return
#  spatial subset
    x0, y0, rows, cols = auxil.select_dims([0, 0, rows, cols])
    #  number of looks
    m = auxil.select_integer(5, msg='Number of looks')
    if not m:
        return
#  number of iterations
    niter = auxil.select_integer(1, msg='Number of iterations')
    #  output file
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return


#  process diagonal bands only
    driver = gdal.GetDriverByName(fmt)
    if bands == 9:
        outDataset = driver.Create(outfile, cols, rows, 3, GDT_Float32)
        inimage = np.zeros((3, rows, cols))
        band = inDataset.GetRasterBand(1)
        inimage[0] = band.ReadAsArray(x0, y0, cols, rows)
        band = inDataset.GetRasterBand(6)
        inimage[1] = band.ReadAsArray(x0, y0, cols, rows)
        band = inDataset.GetRasterBand(9)
        inimage[2] = band.ReadAsArray(x0, y0, cols, rows)
    elif bands == 4:
        outDataset = driver.Create(outfile, cols, rows, 2, GDT_Float32)
        inimage = np.zeros((2, rows, cols))
        band = inDataset.GetRasterBand(1)
        inimage[0] = band.ReadAsArray(x0, y0, cols, rows)
        band = inDataset.GetRasterBand(4)
        inimage[1] = band.ReadAsArray(x0, y0, cols, rows)
    else:
        outDataset = driver.Create(outfile, cols, rows, 1, GDT_Float32)
        inimage = inDataset.GetRasterBand(1)
    outimage = np.copy(inimage)
    print '========================='
    print '    GAMMA MAP FILTER'
    print '========================='
    print time.asctime()
    print 'infile:  %s' % infile
    print 'number of looks: %i' % m
    print 'number of iterations: %i' % niter
    start = time.time()
    itr = 0
    while itr < niter:
        print 'iteration %i' % (itr + 1)
        if bands == 9:
            for k in range(3):
                outimage[k] = gamma_filter(k, inimage, outimage, rows, cols, m)
        elif bands == 4:
            for k in range(2):
                outimage[k] = gamma_filter(k, inimage, outimage, rows, cols, m)
        else:
            outimage = gamma_filter(0, inimage, outimage, rows, cols, m)
        itr += 1
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0 * gt[1]
        gt[3] = gt[3] + y0 * gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    projection = inDataset.GetProjection()
    if projection is not None:
        outDataset.SetProjection(projection)
    if bands == 9:
        for k in range(3):
            outBand = outDataset.GetRasterBand(k + 1)
            outBand.WriteArray(outimage[k], 0, 0)
            outBand.FlushCache()
    elif bands == 4:
        for k in range(2):
            outBand = outDataset.GetRasterBand(k + 1)
            outBand.WriteArray(outimage[k], 0, 0)
            outBand.FlushCache()
    else:
        outBand = outDataset.GetRasterBand(1)
        outBand.WriteArray(outimage, 0, 0)
        outBand.FlushCache()
    outDataset = None
    print 'result written to: ' + outfile
    print 'elapsed time: ' + str(time.time() - start)
Example #34
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Working directory')
    if path:
        os.chdir(path)
    file0 = auxil.select_infile(title='Base image')
    if file0:
        inDataset0 = gdal.Open(file0, GA_ReadOnly)
        cols0 = inDataset0.RasterXSize
        rows0 = inDataset0.RasterYSize
        print 'Base image: %s' % file0
    else:
        return
    rasterBand = inDataset0.GetRasterBand(1)
    span0 = rasterBand.ReadAsArray(0, 0, cols0, rows0)
    rasterBand = inDataset0.GetRasterBand(4)
    span0 += 2 * rasterBand.ReadAsArray(0, 0, cols0, rows0)
    rasterBand = inDataset0.GetRasterBand(6)
    span0 += rasterBand.ReadAsArray(0, 0, cols0, rows0)
    span0 = log(real(span0))
    inDataset0 = None
    file1 = auxil.select_infile(title='Warp image')
    if file1:
        inDataset1 = gdal.Open(file1, GA_ReadOnly)
        cols1 = inDataset1.RasterXSize
        rows1 = inDataset1.RasterYSize
        print 'Warp image: %s' % file1
    else:
        return
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
    image1 = zeros((6, rows1, cols1), dtype=cfloat)
    for k in range(6):
        band = inDataset1.GetRasterBand(k + 1)
        image1[k,:,:]=band\
          .ReadAsArray(0,0,cols1,rows1).astype(cfloat)
    inDataset1 = None
    span1 = sum(image1[[0,3,5] ,:,:],axis=0)\
                                        +image1[3,:,:]
    span1 = log(real(span1))
    scale, angle, shift = auxil.similarity(span0, span1)
    tmp_real = zeros((6, rows0, cols0))
    tmp_imag = zeros((6, rows0, cols0))
    for k in range(6):
        bn1 = real(image1[k, :, :])
        bn2 = ndii.zoom(bn1, 1.0 / scale)
        bn2 = ndii.rotate(bn2, angle)
        bn2 = ndii.shift(bn2, shift)
        tmp_real[k, :, :] = bn2[0:rows0, 0:cols0]
        bn1 = imag(image1[k, :, :])
        bn2 = ndii.zoom(bn1, 1.0 / scale)
        bn2 = ndii.rotate(bn2, angle)
        bn2 = ndii.shift(bn2, shift)
        tmp_imag[k, :, :] = bn2[0:rows0, 0:cols0]
    image2 = tmp_real + 1j * tmp_imag
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols0, rows0, 6, GDT_CFloat32)
    for k in range(6):
        outBand = outDataset.GetRasterBand(k + 1)
        outBand.WriteArray(image2[k, :, :], 0, 0)
        outBand.FlushCache()
    outDataset = None
    print 'Warped image written to: %s' % outfile
Example #35
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
#  get (spatial subset of) the C11 or C33 file first
    file1 = auxil.select_infile(
        title='Choose one componenst (C11, C22 or C33) ')
    if file1:
        inDataset1 = gdal.Open(file1, GA_ReadOnly)
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize
        bands = inDataset1.RasterCount
    else:
        return
    inDataset = None
    #  spatial subset
    x0, y0, cols, rows = auxil.select_dims([0, 0, cols, rows])
    #  output file
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
#  output image
    outim = np.zeros((9, rows, cols), dtype=np.float32)
    #  get list of all files
    files = os.listdir(path)
    for afile in files:
        if re.search('hdr|sml', afile):
            continue
#      single polarimetry
        if re.search('pwr_geo', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[0, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None


#      dual and quad polarimetry
        elif re.search('hh_hh_geo|C11\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[0, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
        elif re.search('re_hh_hv_geo|C12_real\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[1, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
        elif re.search('im_hh_hv_geo|C12_imag\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[2, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
        elif re.search('re_hh_vv_geo|C13_real\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[3, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
        elif re.search('im_hh_vv_geo|C13_imag\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[4, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
        elif re.search('hv_hv_geo|C22\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[5, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
        elif re.search('re_hv_vv_geo|C23_real\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[6, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
        elif re.search('im_hv_vv_geo|C23_imag\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[7, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
        elif re.search('vv_vv_geo|C33\.tif', afile):
            inDataset = gdal.Open(afile, GA_ReadOnly)
            band = inDataset.GetRasterBand(1)
            outim[8, :, :] = band.ReadAsArray(x0, y0, cols, rows)
            inDataset = None
    outim = np.nan_to_num(outim)
    idx = np.where(np.sum(np.abs(outim), axis=(1, 2)) > 0)[0]
    if idx == []:
        print 'no polarimetric bands found'
        return
    bands = len(idx)
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols, rows, bands, GDT_Float32)
    projection = inDataset1.GetProjection()
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0 * gt[1]
        gt[3] = gt[3] + y0 * gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)
    for k in range(bands):
        outBand = outDataset.GetRasterBand(k + 1)
        outBand.WriteArray(outim[idx[k], :, :], 0, 0)
        outBand.FlushCache()
    outDataset = None
    print '%i-band polarimetric image written to: %s' % (bands, outfile)
Example #36
0
def main():      
    gdal.AllRegister()
    path = auxil.select_directory('Input directory')
    if path:
        os.chdir(path)        
#  input image    
    infile = auxil.select_infile(title='Image file') 
    if infile:                   
        inDataset = gdal.Open(infile,GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize    
        bands = inDataset.RasterCount
        projection = inDataset.GetProjection()
        geotransform = inDataset.GetGeoTransform()
        if geotransform is not None:
            gt = list(geotransform) 
        else:
            print 'No geotransform available'
            return       
        imsr = osr.SpatialReference()  
        imsr.ImportFromWkt(projection)      
    else:
        return  
    pos =  auxil.select_pos(bands)   
    if not pos:
        return
    N = len(pos) 
    rasterBands = [] 
    for b in pos:
        rasterBands.append(inDataset.GetRasterBand(b)) 
#  training algorithm
    trainalg = auxil.select_integer(1,msg='1:Maxlike,2:Backprop,3:Congrad,4:SVM') 
    if not trainalg:
        return           
#  training data (shapefile)      
    trnfile = auxil.select_infile(filt='.shp',title='Train shapefile')
    if trnfile:
        trnDriver = ogr.GetDriverByName('ESRI Shapefile')
        trnDatasource = trnDriver.Open(trnfile,0)
        trnLayer = trnDatasource.GetLayer() 
        trnsr = trnLayer.GetSpatialRef()             
    else:
        return     
    tstfile = auxil.select_outfile(filt='.tst', title='Test results file') 
    if not tstfile:
        print 'No test output'      
#  outfile
    outfile, outfmt = auxil.select_outfilefmt(title='Classification file')   
    if not outfile:
        return                   
    if trainalg in (2,3,4):
#      class probabilities file, hidden neurons
        probfile, probfmt = auxil.select_outfilefmt(title='Probabilities file')
    else:
        probfile = None     
    if trainalg in (2,3):    
        L = auxil.select_integer(8,'Number of hidden neurons')    
        if not L:
            return                  
#  coordinate transformation from training to image projection   
    ct= osr.CoordinateTransformation(trnsr,imsr) 
#  number of classes    
    K = 1
    feature = trnLayer.GetNextFeature() 
    while feature:
        classid = feature.GetField('CLASS_ID')
        if int(classid)>K:
            K = int(classid)
        feature = trnLayer.GetNextFeature() 
    trnLayer.ResetReading()    
    K += 1       
    print '========================='
    print 'supervised classification'
    print '========================='
    print time.asctime()    
    print 'image:    '+infile
    print 'training: '+trnfile  
    if trainalg == 1:
        print 'Maximum Likelihood'
    elif trainalg == 2:
        print 'Neural Net (Backprop)'
    elif trainalg ==3:
        print 'Neural Net (Congrad)'
    else:
        print 'Support Vector Machine'               
#  loop through the polygons    
    Gs = [] # train observations
    ls = [] # class labels
    classnames = '{unclassified'
    classids = set()
    print 'reading training data...'
    for i in range(trnLayer.GetFeatureCount()):
        feature = trnLayer.GetFeature(i)
        classid = str(feature.GetField('CLASS_ID'))
        classname  = feature.GetField('CLASS_NAME')
        if classid not in classids:
            classnames += ',   '+ classname
        classids = classids | set(classid)        
        l = [0 for i in range(K)]
        l[int(classid)] = 1.0
        polygon = feature.GetGeometryRef()
#      transform to same projection as image        
        polygon.Transform(ct)  
#      convert to a Shapely object            
        poly = shapely.wkt.loads(polygon.ExportToWkt())
#      transform the boundary to pixel coords in numpy        
        bdry = np.array(poly.boundary) 
        bdry[:,0] = bdry[:,0]-gt[0]
        bdry[:,1] = bdry[:,1]-gt[3]
        GT = np.mat([[gt[1],gt[2]],[gt[4],gt[5]]])
        bdry = bdry*np.linalg.inv(GT) 
#      polygon in pixel coords        
        polygon1 = asPolygon(bdry)
#      raster over the bounding rectangle        
        minx,miny,maxx,maxy = map(int,list(polygon1.bounds))  
        pts = [] 
        for i in range(minx,maxx+1):
            for j in range(miny,maxy+1): 
                pts.append((i,j))             
        multipt =  MultiPoint(pts)   
#      intersection as list              
        intersection = np.array(multipt.intersection(polygon1),dtype=np.int).tolist()
#      cut out the bounded image cube               
        cube = np.zeros((maxy-miny+1,maxx-minx+1,len(rasterBands)))
        k=0
        for band in rasterBands:
            cube[:,:,k] = band.ReadAsArray(minx,miny,maxx-minx+1,maxy-miny+1)
            k += 1
#      get the training vectors
        for (x,y) in intersection:         
            Gs.append(cube[y-miny,x-minx,:])
            ls.append(l)   
        polygon = None
        polygon1 = None            
        feature.Destroy()  
    trnDatasource.Destroy() 
    classnames += '}'
    m = len(ls)       
    print str(m) + ' training pixel vectors were read in' 
    Gs = np.array(Gs) 
    ls = np.array(ls)
#  stretch the pixel vectors to [-1,1] for ffn
    maxx = np.max(Gs,0)
    minx = np.min(Gs,0)
    for j in range(N):
        Gs[:,j] = 2*(Gs[:,j]-minx[j])/(maxx[j]-minx[j]) - 1.0 
#  random permutation of training data
    idx = np.random.permutation(m)
    Gs = Gs[idx,:] 
    ls = ls[idx,:]     
#  setup output datasets 
    driver = gdal.GetDriverByName(outfmt)    
    outDataset = driver.Create(outfile,cols,rows,1,GDT_Byte) 
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection) 
    outBand = outDataset.GetRasterBand(1) 
    if probfile:
        driver = gdal.GetDriverByName(probfmt)    
        probDataset = driver.Create(probfile,cols,rows,K,GDT_Byte) 
        if geotransform is not None:
            probDataset.SetGeoTransform(tuple(gt))
        if projection is not None:
            probDataset.SetProjection(projection)  
        probBands = [] 
        for k in range(K):
            probBands.append(probDataset.GetRasterBand(k+1))         
    if tstfile:
#  train on 2/3 training examples         
        Gstrn = Gs[0:2*m//3,:]
        lstrn = ls[0:2*m//3,:] 
        Gstst = Gs[2*m//3:,:]  
        lstst = ls[2*m//3:,:]    
    else:
        Gstrn = Gs
        lstrn = ls         
    if   trainalg == 1:
        classifier = sc.Maxlike(Gstrn,lstrn)
    elif trainalg == 2:
        classifier = sc.Ffnbp(Gstrn,lstrn,L)
    elif trainalg == 3:
        classifier = sc.Ffncg(Gstrn,lstrn,L)
    elif trainalg == 4:
        classifier = sc.Svm(Gstrn,lstrn)         
            
    print 'training on %i pixel vectors...' % np.shape(Gstrn)[0]
    start = time.time()
    result = classifier.train()
    print 'elapsed time %s' %str(time.time()-start) 
    if result:
        if trainalg in [2,3]:
            cost = np.log10(result)  
            ymax = np.max(cost)
            ymin = np.min(cost) 
            xmax = len(cost)      
            plt.plot(range(xmax),cost,'k')
            plt.axis([0,xmax,ymin-1,ymax])
            plt.title('Log(Cross entropy)')
            plt.xlabel('Epoch')              
#      classify the image           
        print 'classifying...'
        start = time.time()
        tile = np.zeros((cols,N))    
        for row in range(rows):
            for j in range(N):
                tile[:,j] = rasterBands[j].ReadAsArray(0,row,cols,1)
                tile[:,j] = 2*(tile[:,j]-minx[j])/(maxx[j]-minx[j]) - 1.0               
            cls, Ms = classifier.classify(tile)  
            outBand.WriteArray(np.reshape(cls,(1,cols)),0,row)
            if probfile:
                Ms = np.byte(Ms*255)
                for k in range(K):
                    probBands[k].WriteArray(np.reshape(Ms[k,:],(1,cols)),0,row)
        outBand.FlushCache()
        print 'elapsed time %s' %str(time.time()-start)
        outDataset = None
        inDataset = None      
        if probfile:
            for probBand in probBands:
                probBand.FlushCache() 
            probDataset = None
            print 'class probabilities written to: %s'%probfile   
        K =  lstrn.shape[1]+1                     
        if (outfmt == 'ENVI') and (K<19):
#          try to make an ENVI classification header file            
            hdr = header.Header() 
            headerfile = outfile+'.hdr'
            f = open(headerfile)
            line = f.readline()
            envihdr = ''
            while line:
                envihdr += line
                line = f.readline()
            f.close()         
            hdr.read(envihdr)
            hdr['file type'] ='ENVI Classification'
            hdr['classes'] = str(K)
            classlookup = '{0'
            for i in range(1,3*K):
                classlookup += ', '+str(str(ctable[i]))
            classlookup +='}'    
            hdr['class lookup'] = classlookup
            hdr['class names'] = classnames
            f = open(headerfile,'w')
            f.write(str(hdr))
            f.close()             
        print 'thematic map written to: %s'%outfile
        if trainalg in [2,3]:
            print 'please close the cross entropy plot to continue'
            plt.show()
        if tstfile:
            with open(tstfile,'w') as f:
                print >>f, 'FFN test results for %s'%infile
                print >>f, time.asctime()
                print >>f, 'Classification image: %s'%outfile
                print >>f, 'Class probabilities image: %s'%probfile
                print >>f, lstst.shape[0],lstst.shape[1]
                classes, _ = classifier.classify(Gstst)
                labels = np.argmax(lstst,axis=1)+1
                for i in range(len(classes)):
                    print >>f, classes[i], labels[i]              
                f.close()
                print 'test results written to: %s'%tstfile
        print 'done'
    else:
        print 'an error occured' 
        return 
Example #37
0
def main():     
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)        
#  first image    
    file1 = auxil.select_infile(title='Choose first image') 
    if file1:                   
        inDataset1 = gdal.Open(file1,GA_ReadOnly)     
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize    
        bands = inDataset1.RasterCount
    else:
        return
    pos1 =  auxil.select_pos(bands) 
    if not pos1:
        return   
    dims = auxil.select_dims([0,0,cols,rows])
    if dims:
        x10,y10,cols1,rows1 = dims
    else:
        return 
#  second image     
    file2 = auxil.select_infile(title='Choose second image') 
    if file2:                  
        inDataset2 = gdal.Open(file2,GA_ReadOnly)     
        cols = inDataset2.RasterXSize
        rows = inDataset2.RasterYSize    
        bands = inDataset2.RasterCount
    else:
        return   
    pos2 =  auxil.select_pos(bands)   
    if not pos2:
        return 
    dims=auxil.select_dims([0,0,cols,rows])  
    if dims:
        x20,y20,cols,rows = dims
    else:
        return    
#  penalization    
    lam = auxil.select_penal(0.0)    
    if lam is None:
        return    
#  outfile
    outfile, fmt = auxil.select_outfilefmt()  
    if not outfile:
        return  
#  match dimensions       
    bands = len(pos2)
    if (rows1 != rows) or (cols1 != cols) or (len(pos1) != bands):
        sys.stderr.write("Size mismatch")
        sys.exit(1)         
    print '========================='
    print '       iMAD'
    print '========================='
    print time.asctime()     
    print 'time1: '+file1
    print 'time2: '+file2   
    print 'Delta    [canonical correlations]'   
#  iteration of MAD    
    cpm = auxil.Cpm(2*bands)    
    delta = 1.0
    oldrho = np.zeros(bands)     
    itr = 0
    tile = np.zeros((cols,2*bands))
    sigMADs = 0
    means1 = 0
    means2 = 0
    A = 0
    B = 0
    rasterBands1 = []
    rasterBands2 = [] 
    for b in pos1:
        rasterBands1.append(inDataset1.GetRasterBand(b)) 
    for b in pos2:
        rasterBands2.append(inDataset2.GetRasterBand(b))                    
    while (delta > 0.001) and (itr < 100):   
#      spectral tiling for statistics
        for row in range(rows):
            for k in range(bands):
                tile[:,k] = rasterBands1[k].ReadAsArray(x10,y10+row,cols,1)
                tile[:,bands+k] = rasterBands2[k].ReadAsArray(x20,y20+row,cols,1)
#          eliminate no-data pixels (assuming all zeroes)                  
            tst1 = np.sum(tile[:,0:bands],axis=1) 
            tst2 = np.sum(tile[:,bands::],axis=1) 
            idx1 = set(np.where(  (tst1>0)  )[0]) 
            idx2 = set(np.where(  (tst2>0)  )[0]) 
            idx = list(idx1.intersection(idx2))    
            if itr>0:
                mads = np.asarray((tile[:,0:bands]-means1)*A - (tile[:,bands::]-means2)*B)
                chisqr = np.sum((mads/sigMADs)**2,axis=1)
                wts = 1-stats.chi2.cdf(chisqr,[bands])
                cpm.update(tile[idx,:],wts[idx])
            else:
                cpm.update(tile[idx,:])               
#     weighted covariance matrices and means 
        S = cpm.covariance() 
        means = cpm.means()    
#     reset prov means object           
        cpm.__init__(2*bands)  
        s11 = S[0:bands,0:bands]
        s11 = (1-lam)*s11 + lam*np.eye(bands)
        s22 = S[bands:,bands:] 
        s22 = (1-lam)*s22 + lam*np.eye(bands)
        s12 = S[0:bands,bands:]
        s21 = S[bands:,0:bands]        
        c1 = s12*linalg.inv(s22)*s21 
        b1 = s11
        c2 = s21*linalg.inv(s11)*s12
        b2 = s22
#     solution of generalized eigenproblems 
        if bands>1:
            mu2a,A = auxil.geneiv(c1,b1)                
            mu2b,B = auxil.geneiv(c2,b2)               
#          sort a   
            idx = np.argsort(mu2a)
            A = A[:,idx]        
#          sort b   
            idx = np.argsort(mu2b)
            B = B[:,idx] 
            mu2 = mu2b[idx]
        else:
            mu2 = c1/b1
            A = 1/np.sqrt(b1)
            B = 1/np.sqrt(b2)   
#      canonical correlations             
        mu = np.sqrt(mu2)
        a2 = np.diag(A.T*A)
        b2 = np.diag(B.T*B)
        sigma = np.sqrt( (2-lam*(a2+b2))/(1-lam)-2*mu )
        rho=mu*(1-lam)/np.sqrt( (1-lam*a2)*(1-lam*b2) )
#      stopping criterion
        delta = max(abs(rho-oldrho))
        print delta,rho 
        oldrho = rho  
#      tile the sigmas and means             
        sigMADs = np.tile(sigma,(cols,1)) 
        means1 = np.tile(means[0:bands],(cols,1)) 
        means2 = np.tile(means[bands::],(cols,1))
#      ensure sum of positive correlations between X and U is positive
        D = np.diag(1/np.sqrt(np.diag(s11)))  
        s = np.ravel(np.sum(D*s11*A,axis=0)) 
        A = A*np.diag(s/np.abs(s))          
#      ensure positive correlation between each pair of canonical variates        
        cov = np.diag(A.T*s12*B)    
        B = B*np.diag(cov/np.abs(cov))          
        itr += 1                 
# write results to disk
    driver = gdal.GetDriverByName(fmt)    
    outDataset = driver.Create(outfile,cols,rows,bands+1,GDT_Float32)
    projection = inDataset1.GetProjection()
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x10*gt[1]
        gt[3] = gt[3] + y10*gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)            
    outBands = [] 
    for k in range(bands+1):
        outBands.append(outDataset.GetRasterBand(k+1))   
    for row in range(rows):
        for k in range(bands):
            tile[:,k] = rasterBands1[k].ReadAsArray(x10,y10+row,cols,1)
            tile[:,bands+k] = rasterBands2[k].ReadAsArray(x20,y20+row,cols,1)       
        mads = np.asarray((tile[:,0:bands]-means1)*A - (tile[:,bands::]-means2)*B)
        chisqr = np.sum((mads/sigMADs)**2,axis=1) 
        for k in range(bands):
            outBands[k].WriteArray(np.reshape(mads[:,k],(1,cols)),0,row)
        outBands[bands].WriteArray(np.reshape(chisqr,(1,cols)),0,row)                        
    for outBand in outBands: 
        outBand.FlushCache()
    outDataset = None
    inDataset1 = None
    inDataset2 = None  
    print 'result written to: '+outfile
    print '--------done---------------------'     
Example #38
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
#  SAR image
    infile = auxil.select_infile(title='Choose SAR image')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        bands = inDataset.RasterCount
    else:
        return


#  spatial subset
    x0, y0, rows, cols = auxil.select_dims([0, 0, rows, cols])
    #  output file
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
    print '========================='
    print '     ENL Estimation'
    print '========================='
    print time.asctime()
    print 'infile:  %s' % infile
    start = time.time()
    if bands == 9:
        print 'Quad polarimetry'
        #      C11 (k)
        band = inDataset.GetRasterBand(1)
        k = np.nan_to_num(band.ReadAsArray(x0, y0, cols, rows)).ravel()
        #      C12  (a)
        band = inDataset.GetRasterBand(2)
        a = np.nan_to_num(band.ReadAsArray(x0, y0, cols, rows)).ravel()
        band = inDataset.GetRasterBand(3)
        im = np.nan_to_num(band.ReadAsArray(x0, y0, cols, rows)).ravel()
        a = a + 1j * im
        #      C13  (rho)
        band = inDataset.GetRasterBand(4)
        rho = np.nan_to_num(band.ReadAsArray(x0, y0, cols, rows)).ravel()
        band = inDataset.GetRasterBand(5)
        im = np.nan_to_num(band.ReadAsArray(x0, y0, cols, rows)).ravel()
        rho = rho + 1j * im
        #      C22 (xsi)
        band = inDataset.GetRasterBand(6)
        xsi = np.nan_to_num(band.ReadAsArray(x0, y0, cols, rows)).ravel()
        #      C23 (b)
        band = inDataset.GetRasterBand(7)
        b = np.nan_to_num(band.ReadAsArray(x0, y0, cols, rows)).ravel()
        band = inDataset.GetRasterBand(8)
        im = np.nan_to_num(band.ReadAsArray(x0, y0, cols, rows)).ravel()
        b = b + 1j * im
        #      C33 (zeta)
        band = inDataset.GetRasterBand(9)
        zeta = np.nan_to_num(band.ReadAsArray(x0, y0, cols, rows)).ravel()
        det = k * xsi * zeta + 2 * np.real(a * b * np.conj(rho)) - xsi * (
            abs(rho)**2) - k * (abs(b)**2) - zeta * (abs(a)**2)
        d = 2
    elif bands == 4:
        print 'Dual polarimetry'
        #      C11 (k)
        band = inDataset.GetRasterBand(1)
        k = np.nan_to_num(band.ReadAsArray(x0, y0, cols, rows)).ravel()
        #      C12  (a)
        band = inDataset.GetRasterBand(2)
        a = np.nan_to_num(band.ReadAsArray(x0, y0, cols, rows)).ravel()
        band = inDataset.GetRasterBand(3)
        im = np.nan_to_num(band.ReadAsArray(x0, y0, cols, rows)).ravel()
        a = a + 1j * im
        #      C22 (xsi)
        band = inDataset.GetRasterBand(4)
        xsi = np.nan_to_num(band.ReadAsArray(x0, y0, cols, rows)).ravel()
        det = k * xsi - abs(a)**2
        d = 1
    elif bands == 1:
        print 'Single polarimetry'
        #      C11 (k)
        band = inDataset.GetRasterBand(1)
        k = band.ReadAsArray(x0, y0, cols, rows).ravel()
        det = k
        d = 0
    enl_ml = np.zeros((rows, cols), dtype=np.float32)
    lu = lookup.table()
    print 'filtering...'
    print 'row: ',
    sys.stdout.flush()
    start = time.time()
    for i in range(3, rows - 3):
        if i % 50 == 0:
            print '%i ' % i,
            sys.stdout.flush()
        windex = get_windex(i, cols)
        for j in range(3, cols - 3):
            detC = det[windex]
            if np.min(detC) > 0.0:
                avlogdetC = np.sum(np.log(detC)) / 49
                if bands == 9:
                    k1 = np.sum(k[windex]) / 49
                    a1 = np.sum(a[windex]) / 49
                    rho1 = np.sum(rho[windex]) / 49
                    xsi1 = np.sum(xsi[windex]) / 49
                    b1 = np.sum(b[windex]) / 49
                    zeta1 = np.sum(zeta[windex]) / 49
                    detavC = k1 * xsi1 * zeta1 + 2 * np.real(
                        a1 * b1 * np.conj(rho1)) - xsi1 * (
                            np.abs(rho1)**
                            2) - k1 * (np.abs(b1)**2) - zeta1 * (np.abs(a1)**2)
                elif bands == 4:
                    k1 = np.sum(k[windex]) / 49
                    xsi1 = np.sum(xsi[windex]) / 49
                    a1 = np.sum(a[windex]) / 49
                    detavC = k1 * xsi1 - np.abs(a1)**2
                else:
                    detavC = np.sum(k[windex]) / 49
                logdetavC = np.log(detavC)
                arr = avlogdetC - logdetavC + lu[:, d]
                ell = np.where(arr * np.roll(arr, 1) < 0)[0]
                if ell != []:
                    enl_ml[i, j] = float(ell[-1]) / 10.0
            windex += 1
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols, rows, 1, GDT_Float32)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0 * gt[1]
        gt[3] = gt[3] + y0 * gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)
    outBand = outDataset.GetRasterBand(1)
    outBand.WriteArray(enl_ml, 0, 0)
    outBand.FlushCache()
    outDataset = None
    ya, xa = np.histogram(enl_ml, bins=50)
    ya[0] = 0
    plt.plot(xa[0:-1], ya)
    plt.show()
    print ''
    print 'ENL image written to: %s' % outfile
    print 'elapsed time: ' + str(time.time() - start)
Example #39
0
def main(): 
    gdal.AllRegister()
    path = auxil.select_directory('Working directory')
    if path:
        os.chdir(path)        
    file1=auxil.select_infile(title='Base image') 
    if file1:                   
        inDataset1 = gdal.Open(file1,GA_ReadOnly)     
        cols1 = inDataset1.RasterXSize
        rows1 = inDataset1.RasterYSize
        print 'Base image: %s'%file1    
    else:
        return     
    file2=auxil.select_infile(title='Warp image') 
    if file2:                  
        inDataset2 = gdal.Open(file2,GA_ReadOnly)     
        cols2 = inDataset2.RasterXSize
        rows2 = inDataset2.RasterYSize
        bands2 = inDataset2.RasterCount        
        print 'Warp image: %s'%file2    
    else:
        return 
    file3 = auxil.select_infile(title='GCP file',\
                                  filt='pts')  
    if file3:
        pts1,pts2 = parse_gcp(file3)
    else:
        return
    outfile,fmt = auxil.select_outfilefmt() 
    if not outfile:
        return   
    image2 = zeros((bands2,rows2,cols2))                                   
    for k in range(bands2):
        band = inDataset2.GetRasterBand(k+1)
        image2[k,:,:]=band.ReadAsArray(0,0,cols2,rows2)
    inDataset2 = None
    n = len(pts1)    
    y = pts1.ravel()
    A = zeros((2*n,4))
    for i in range(n):
        A[2*i,:] =   [pts2[i,0],-pts2[i,1],1,0]
        A[2*i+1,:] = [pts2[i,1], pts2[i,0],0,1]   
    a,b,x0,y0 = linalg.lstsq(A,y)[0]
    R = array([[a,-b],[b,a]])     
    warped = zeros((bands2,rows1,cols1),dtype=uint8) 
    for k in range(bands2):
        tmp = ndimage.affine_transform(image2[k,:,:],R)
        warped[k,:,:]=tmp[-y0:-y0+rows1,-x0:-x0+cols1]   
    driver = gdal.GetDriverByName(fmt)   
    outDataset = driver.Create(outfile,
                    cols1,rows1,bands2,GDT_Byte)    
    geotransform = inDataset1.GetGeoTransform()
    projection = inDataset1.GetProjection()   
    if geotransform is not None:
        outDataset.SetGeoTransform(geotransform)
    if projection is not None:
        outDataset.SetProjection(projection)        
    for k in range(bands2):        
        outBand = outDataset.GetRasterBand(k+1)
        outBand.WriteArray(warped[k,:,:],0,0) 
        outBand.FlushCache()
    outDataset = None
    inDataset1 = None       
    print 'Warped image written to: %s'%outfile        
Example #40
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    #path = arcpy.GetParameterAsText(0)
    if path:
        os.chdir(path)
    file1 = auxil.select_infile(title='Choose first image')
    #file1 = arcpy.GetParameterAsText(1)
    if file1:
        inDataset1 = gdal.Open(file1, GA_ReadOnly)
        cols = inDataset1.RasterXSize
        rows = inDataset1.RasterYSize
        bands = inDataset1.RasterCount
    else:
        return
    pos1 = auxil.select_pos(bands)
    if not pos1:
        return
    dims = auxil.select_dims([0, 0, cols, rows])
    if dims:
        x10, y10, cols1, rows1 = dims
    else:
        return
#  second image
    file2 = auxil.select_infile(title='Choose second image')
    #file2 = arcpy.GetParameterAsText(2)
    if file2:
        inDataset2 = gdal.Open(file2, GA_ReadOnly)
        cols = inDataset2.RasterXSize
        rows = inDataset2.RasterYSize
        bands = inDataset2.RasterCount
    else:
        return
    pos2 = auxil.select_pos(bands)
    if not pos2:
        return
    dims = auxil.select_dims([0, 0, cols, rows])
    if dims:
        x20, y20, cols, rows = dims
    else:
        return
#  penalization
    lam = auxil.select_penal(0.0)
    if lam is None:
        return
#  outfile
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
#  match dimensions
    bands = len(pos2)
    if (rows1 != rows) or (cols1 != cols) or (len(pos1) != bands):
        sys.stderr.write("Size mismatch")
        sys.exit(1)
    print('=========================')
    print('       iMAD')
    print('=========================')
    print(time.asctime())
    print('time1: ' + file1)
    print('time2: ' + file2)
    print('Delta    [canonical correlations]')
    #  iteration of MAD
    cpm = auxil.Cpm(2 * bands)
    delta = 1.0
    oldrho = np.zeros(bands)
    itr = 0
    tile = np.zeros((cols, 2 * bands))
    sigMADs = 0
    means1 = 0
    means2 = 0
    A = 0
    B = 0
    rasterBands1 = []
    rasterBands2 = []
    for b in pos1:
        rasterBands1.append(inDataset1.GetRasterBand(b))
    for b in pos2:
        rasterBands2.append(inDataset2.GetRasterBand(b))
    while (delta > 0.001) and (itr < 100):
        #      spectral tiling for statistics
        for row in range(rows):
            for k in range(bands):
                tile[:,
                     k] = rasterBands1[k].ReadAsArray(x10, y10 + row, cols, 1)
                tile[:, bands + k] = rasterBands2[k].ReadAsArray(
                    x20, y20 + row, cols, 1)
#          eliminate no-data pixels (assuming all zeroes)
            tst1 = np.sum(tile[:, 0:bands], axis=1)
            tst2 = np.sum(tile[:, bands::], axis=1)
            idx1 = set(np.where((tst1 > 0))[0])
            idx2 = set(np.where((tst2 > 0))[0])
            idx = list(idx1.intersection(idx2))
            if itr > 0:
                mads = np.asarray((tile[:, 0:bands] - means1) * A -
                                  (tile[:, bands::] - means2) * B)
                chisqr = np.sum((mads / sigMADs)**2, axis=1)
                wts = 1 - stats.chi2.cdf(chisqr, [bands])
                cpm.update(tile[idx, :], wts[idx])
            else:
                cpm.update(tile[idx, :])
#     weighted covariance matrices and means
        S = cpm.covariance()
        means = cpm.means()
        #     reset prov means object
        cpm.__init__(2 * bands)
        s11 = S[0:bands, 0:bands]
        s11 = (1 - lam) * s11 + lam * np.eye(bands)
        s22 = S[bands:, bands:]
        s22 = (1 - lam) * s22 + lam * np.eye(bands)
        s12 = S[0:bands, bands:]
        s21 = S[bands:, 0:bands]
        c1 = s12 * linalg.inv(s22) * s21
        b1 = s11
        c2 = s21 * linalg.inv(s11) * s12
        b2 = s22
        #     solution of generalized eigenproblems
        if bands > 1:
            mu2a, A = auxil.geneiv(c1, b1)
            mu2b, B = auxil.geneiv(c2, b2)
            #          sort a
            idx = np.argsort(mu2a)
            A = A[:, idx]
            #          sort b
            idx = np.argsort(mu2b)
            B = B[:, idx]
            mu2 = mu2b[idx]
        else:
            mu2 = c1 / b1
            A = 1 / np.sqrt(b1)
            B = 1 / np.sqrt(b2)
#      canonical correlations
        mu = np.sqrt(mu2)
        a2 = np.diag(A.T * A)
        b2 = np.diag(B.T * B)
        sigma = np.sqrt((2 - lam * (a2 + b2)) / (1 - lam) - 2 * mu)
        rho = mu * (1 - lam) / np.sqrt((1 - lam * a2) * (1 - lam * b2))
        #      stopping criterion
        delta = max(abs(rho - oldrho))
        print(delta, rho)
        oldrho = rho
        #      tile the sigmas and means
        sigMADs = np.tile(sigma, (cols, 1))
        means1 = np.tile(means[0:bands], (cols, 1))
        means2 = np.tile(means[bands::], (cols, 1))
        #      ensure sum of positive correlations between X and U is positive
        D = np.diag(1 / np.sqrt(np.diag(s11)))
        s = np.ravel(np.sum(D * s11 * A, axis=0))
        A = A * np.diag(s / np.abs(s))
        #      ensure positive correlation between each pair of canonical variates
        cov = np.diag(A.T * s12 * B)
        B = B * np.diag(cov / np.abs(cov))
        itr += 1


# write results to disk
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols, rows, bands + 1, GDT_Float32)
    projection = inDataset1.GetProjection()
    geotransform = inDataset1.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x10 * gt[1]
        gt[3] = gt[3] + y10 * gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)
    outBands = []
    for k in range(bands + 1):
        outBands.append(outDataset.GetRasterBand(k + 1))
    for row in range(rows):
        for k in range(bands):
            tile[:, k] = rasterBands1[k].ReadAsArray(x10, y10 + row, cols, 1)
            tile[:, bands + k] = rasterBands2[k].ReadAsArray(
                x20, y20 + row, cols, 1)
        mads = np.asarray((tile[:, 0:bands] - means1) * A -
                          (tile[:, bands::] - means2) * B)
        chisqr = np.sum((mads / sigMADs)**2, axis=1)
        for k in range(bands):
            outBands[k].WriteArray(np.reshape(mads[:, k], (1, cols)), 0, row)
        outBands[bands].WriteArray(np.reshape(chisqr, (1, cols)), 0, row)
    for outBand in outBands:
        outBand.FlushCache()
    outDataset = None
    inDataset1 = None
    inDataset2 = None
    print('result written to: ' + outfile)
    print('--------done---------------------')
Example #41
0
def main():
    gdal.AllRegister()
    path = auxil.select_directory('Choose working directory')
    if path:
        os.chdir(path)
    infile = auxil.select_infile(title='Select an image')
    if infile:
        inDataset = gdal.Open(infile, GA_ReadOnly)
        cols = inDataset.RasterXSize
        rows = inDataset.RasterYSize
        bands = inDataset.RasterCount
    else:
        return
    pos = auxil.select_pos(bands)
    if not pos:
        return
    dims = auxil.select_dims([0, 0, cols, rows])
    if dims:
        x0, y0, cols, rows = dims
    else:
        return
    m = auxil.select_integer(2000, 'Select sample size (0 for k-means)')

    n = auxil.select_integer(10, 'Select number of eigenvalues')
    outfile, fmt = auxil.select_outfilefmt()
    if not outfile:
        return
    kernel = auxil.select_integer(1, 'Select kernel: 0=linear, 1=Gaussian')
    print '========================='
    print '       kPCA'
    print '========================='
    print 'infile:  ' + infile
    print 'samples: ' + str(m)
    if kernel == 0:
        print 'kernel:  ' + 'linear'
    else:
        print 'kernel:  ' + 'Gaussian'
    start = time.time()
    if kernel == 0:
        n = min(bands, n)
# construct data design matrices
    XX = zeros((cols * rows, bands))
    k = 0
    for b in pos:
        band = inDataset.GetRasterBand(b)
        band = band.ReadAsArray(x0, y0, cols, rows).astype(float)
        XX[:, k] = ravel(band)
        k += 1
    if m > 0:
        idx = fix(random.random(m) * (cols * rows)).astype(integer)
        X = XX[idx, :]
    else:
        print 'running k-means on 100 cluster centers...'
        X, _ = kmeans(XX, 100, iter=1)
        m = 100
    print 'centered kernel matrix...'
    # centered kernel matrix
    K, gma = auxil.kernelMatrix(X, kernel=kernel)
    meanK = sum(K) / (m * m)
    rowmeans = mat(sum(K, axis=0) / m)
    if gma is not None:
        print 'gamma: ' + str(round(gma, 6))
    K = auxil.center(K)
    print 'diagonalizing...'
    # diagonalize
    try:
        w, v = linalg.eigh(K, eigvals=(m - n, m - 1))
        idx = range(n)
        idx.reverse()
        w = w[idx]
        v = v[:, idx]
        #      variance of PCs
        var = w / m
    except linalg.LinAlgError:
        print 'eigenvalue computation failed'
        sys.exit()
#  dual variables (normalized eigenvectors)
    alpha = mat(v) * mat(diag(1 / sqrt(w)))
    print 'projecting...'
    #  projecting
    image = zeros((rows, cols, n))
    for i in range(rows):
        XXi = XX[i * cols:(i + 1) * cols, :]
        KK, gma = auxil.kernelMatrix(X, XXi, kernel=kernel, gma=gma)
        #  centering on training data:
        #      subtract column means
        colmeans = mat(sum(KK, axis=0) / m)
        onesm = mat(ones(m))
        KK = KK - onesm.T * colmeans
        #      subtract row means
        onesc = mat(ones(cols))
        KK = KK - rowmeans.T * onesc
        #      add overall mean
        KK = KK + meanK
        #      project
        image[i, :, :] = KK.T * alpha


#  write to disk
    driver = gdal.GetDriverByName(fmt)
    outDataset = driver.Create(outfile, cols, rows, n, GDT_Float32)
    projection = inDataset.GetProjection()
    geotransform = inDataset.GetGeoTransform()
    if geotransform is not None:
        gt = list(geotransform)
        gt[0] = gt[0] + x0 * gt[1]
        gt[3] = gt[3] + y0 * gt[5]
        outDataset.SetGeoTransform(tuple(gt))
    if projection is not None:
        outDataset.SetProjection(projection)
    for k in range(n):
        outBand = outDataset.GetRasterBand(k + 1)
        outBand.WriteArray(image[:, :, k], 0, 0)
        outBand.FlushCache()
    outDataset = None
    inDataset = None
    print 'result written to: ' + outfile
    print 'elapsed time: ' + str(time.time() - start)
    plt.plot(range(1, n + 1), var, 'k-')
    plt.title('kernel PCA')
    plt.xlabel('principal component')
    plt.ylabel('Variance')
    plt.show()
    print '--done------------------------'