def main(): gdal.AllRegister() infile = auxil.select_infile() if infile: inDataset = gdal.Open(infile,GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize bands = inDataset.RasterCount else: return pos = auxil.select_pos(bands) bands = len(pos) x0,y0,rows,cols=auxil.select_dims([0,0,rows,cols]) K = auxil.select_integer(6,msg='Number clusters') G = zeros((rows*cols,len(pos))) k = 0 for b in pos: band = inDataset.GetRasterBand(b) G[:,k] = band.ReadAsArray(x0,y0,cols,rows)\ .astype(float).ravel() k += 1 centers, _ = kmeans(G,K) labels, _ = vq(G,centers) outfile,fmt = auxil.select_outfilefmt() if outfile: driver = gdal.GetDriverByName(fmt) outDataset = driver.Create(outfile, cols,rows,1,GDT_Byte) outBand = outDataset.GetRasterBand(1) outBand.WriteArray(reshape(labels,(rows,cols))\ ,0,0) outBand.FlushCache() outDataset = None inDataset = None
def main(): gdal.AllRegister() path = auxil.select_directory('Choose working directory') if path: os.chdir(path) infile = auxil.select_infile(title='Select an image') if infile: inDataset = gdal.Open(infile,GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize bands = inDataset.RasterCount else: return pos = auxil.select_pos(bands) if not pos: return bands = len(pos) dims = auxil.select_dims([0,0,cols,rows]) if dims: x0,y0,cols,rows = dims else: return class_image = np.zeros((rows,cols),dtype=np.byte) K = auxil.select_integer(6,'Number of clusters') max_scale = auxil.select_integer(2,'Maximum scaling factor') max_scale = min((max_scale,3)) min_scale = auxil.select_integer(0,'Minimum scaling factor') min_scale = min((max_scale,min_scale)) T0 = auxil.select_float(0.5,'Initial annealing temperature') beta = auxil.select_float(0.5,'Spatial mixing parameter') outfile, outfmt = auxil.select_outfilefmt('Select output classification file') if not outfile: return probfile, probfmt = auxil.select_outfilefmt('Select output probability file (optional)') print '=========================' print ' EM clustering' print '=========================' print 'infile: %s'%infile print 'clusters: %i'%K print 'T0: %f'%T0 print 'beta: %f'%beta start = time.time() # read in image and compress DWTbands = [] for b in pos: band = inDataset.GetRasterBand(b) DWTband = auxil.DWTArray(band.ReadAsArray(x0,y0,cols,rows).astype(float),cols,rows) for i in range(max_scale): DWTband.filter() DWTbands.append(DWTband) rows,cols = DWTbands[0].get_quadrant(0).shape G = np.transpose(np.array([DWTbands[i].get_quadrant(0,float=True).ravel() for i in range(bands)])) # initialize membership matrix n = G.shape[0] U = np.random.random((K,n)) den = np.sum(U,axis=0) for j in range(K): U[j,:] = U[j,:]/den # cluster at minimum scale try: U,Ms,Cs,Ps,pdens = em(G,U,T0,beta,rows,cols) except: print 'em failed' return # sort clusters wrt partition density idx = np.argsort(pdens) idx = idx[::-1] U = U[idx,:] # clustering at increasing scales for i in range(max_scale-min_scale): # expand U and renormalize U = np.reshape(U,(K,rows,cols)) rows = rows*2 cols = cols*2 U = ndi.zoom(U,(1,2,2)) U = np.reshape(U,(K,rows*cols)) idx = np.where(U<0.0) U[idx] = 0.0 den = np.sum(U,axis=0) for j in range(K): U[j,:] = U[j,:]/den # expand the image for i in range(bands): DWTbands[i].invert() G = np.transpose(np.array([DWTbands[i].get_quadrant(0,float=True).ravel() for i in range(bands)])) # cluster unfrozen = np.where(np.max(U,axis=0) < 0.90) try: U,Ms,Cs,Ps,pdens = em(G,U,0.0,beta,rows,cols,unfrozen=unfrozen) except: print 'em failed' return print 'Cluster mean vectors' print Ms print 'Cluster covariance matrices' for k in range(K): print 'cluster: %i'%k print Cs[k] # up-sample class memberships if necessary if min_scale>0: U = np.reshape(U,(K,rows,cols)) f = 2**min_scale rows = rows*f cols = cols*f U = ndi.zoom(U,(1,f,f)) U = np.reshape(U,(K,rows*cols)) idx = np.where(U<0.0) U[idx] = 0.0 den = np.sum(U,axis=0) for j in range(K): U[j,:] = U[j,:]/den # classify labels = np.byte(np.argmax(U,axis=0)+1) class_image[0:rows,0:cols] = np.reshape(labels,(rows,cols)) rows1,cols1 = class_image.shape # write to disk driver = gdal.GetDriverByName(outfmt) outDataset = driver.Create(outfile,cols1,rows1,1,GDT_Byte) projection = inDataset.GetProjection() geotransform = inDataset.GetGeoTransform() if geotransform is not None: gt = list(geotransform) gt[0] = gt[0] + x0*gt[1] gt[3] = gt[3] + y0*gt[5] outDataset.SetGeoTransform(tuple(gt)) if projection is not None: outDataset.SetProjection(projection) outBand = outDataset.GetRasterBand(1) outBand.WriteArray(class_image,0,0) outBand.FlushCache() outDataset = None # write class membership probability file if desired if probfile: driver = gdal.GetDriverByName(probfmt) outDataset = driver.Create(probfile,cols,rows,K,GDT_Byte) if geotransform is not None: outDataset.SetGeoTransform(tuple(gt)) if projection is not None: outDataset.SetProjection(projection) for k in range(K): probs = np.reshape(U[k,:],(rows,cols)) probs = np.byte(probs*255) outBand = outDataset.GetRasterBand(k+1) outBand.WriteArray(probs,0,0) outBand.FlushCache() outDataset = None print 'class probabilities written to: %s'%probfile inDataset = None if (outfmt == 'ENVI') and (K<19): # try to make an ENVI classification header file hdr = header.Header() headerfile = outfile+'.hdr' f = open(headerfile) line = f.readline() envihdr = '' while line: envihdr += line line = f.readline() f.close() hdr.read(envihdr) hdr['file type'] ='ENVI Classification' hdr['classes'] = str(K+1) classlookup = '{0' for i in range(1,3*(K+1)): classlookup += ', '+str(str(ctable[i])) classlookup +='}' hdr['class lookup'] = classlookup hdr['class names'] = ['class %i'%i for i in range(K+1)] f = open(headerfile,'w') f.write(str(hdr)) f.close() print 'classification written to: '+outfile print 'elapsed time: '+str(time.time()-start) print '--done------------------------'
def main(): gdal.AllRegister() path = auxil.select_directory('Choose working directory') if path: os.chdir(path) # SAR image infile = auxil.select_infile(title='Choose SAR image') if infile: inDataset = gdal.Open(infile,GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize bands = inDataset.RasterCount else: return # spatial subset x0,y0,rows,cols=auxil.select_dims([0,0,rows,cols]) # number of looks m = auxil.select_integer(5,msg='Number of looks') if not m: return # output file outfile,fmt = auxil.select_outfilefmt() if not outfile: return # get filter weights from span image b = np.ones((rows,cols)) band = inDataset.GetRasterBand(1) span = band.ReadAsArray(x0,y0,cols,rows).ravel() if bands==9: band = inDataset.GetRasterBand(6) span += band.ReadAsArray(x0,y0,cols,rows).ravel() band = inDataset.GetRasterBand(9) span += band.ReadAsArray(x0,y0,cols,rows).ravel() elif bands==4: band = inDataset.GetRasterBand(4) span += band.ReadAsArray(x0,y0,cols,rows).ravel() edge_idx = np.zeros((rows,cols),dtype=int) print '=========================' print ' MMSE_FILTER' print '=========================' print time.asctime() print 'infile: %s'%infile print 'number of looks: %i'%m print 'Determining filter weights from span image' start = time.time() print 'row: ', sys.stdout.flush() for j in range(3,rows-3): if j%50 == 0: print '%i '%j, sys.stdout.flush() windex = get_windex(j,cols) for i in range(3,cols-3): wind = np.reshape(span[windex],(7,7)) # 3x3 compression w = congrid.congrid(wind,(3,3),method='spline',centre=True) # get appropriate edge mask es = [np.sum(edges[p]*w) for p in range(4)] idx = np.argmax(es) if idx == 0: if np.abs(w[1,1]-w[1,0]) < np.abs(w[1,1]-w[1,2]): edge_idx[j,i] = 0 else: edge_idx[j,i] = 4 elif idx == 1: if np.abs(w[1,1]-w[2,0]) < np.abs(w[1,1]-w[0,2]): edge_idx[j,i] = 1 else: edge_idx[j,i] = 5 elif idx == 2: if np.abs(w[1,1]-w[0,1]) < np.abs(w[1,1]-w[2,1]): edge_idx[j,i] = 6 else: edge_idx[j,i] = 2 elif idx == 3: if np.abs(w[1,1]-w[0,0]) < np.abs(w[1,1]-w[2,2]): edge_idx[j,i] = 7 else: edge_idx[j,i] = 3 edge = templates[edge_idx[j,i]] wind = wind.ravel()[edge] gbar = np.mean(wind) varg = np.var(wind) if varg > 0: b[j,i] = np.max( ((1.0 - gbar**2/(varg*m))/(1.0+1.0/m), 0.0) ) windex += 1 print ' done' # filter the image outim = np.zeros((rows,cols),dtype=np.float32) driver = gdal.GetDriverByName(fmt) outDataset = driver.Create(outfile,cols,rows,bands,GDT_Float32) geotransform = inDataset.GetGeoTransform() if geotransform is not None: gt = list(geotransform) gt[0] = gt[0] + x0*gt[1] gt[3] = gt[3] + y0*gt[5] outDataset.SetGeoTransform(tuple(gt)) projection = inDataset.GetProjection() if projection is not None: outDataset.SetProjection(projection) print 'Filtering covariance matrix elememnts' for k in range(1,bands+1): print 'band: %i'%(k) band = inDataset.GetRasterBand(k) band = band.ReadAsArray(0,0,cols,rows) gbar = band*0.0 # get window means for j in range(3,rows-3): windex = get_windex(j,cols) for i in range(3,cols-3): wind = band.ravel()[windex] edge = templates[edge_idx[j,i]] wind = wind[edge] gbar[j,i] = np.mean(wind) windex += 1 # apply adaptive filter and write to disk outim = np.reshape(gbar + b*(band-gbar),(rows,cols)) outBand = outDataset.GetRasterBand(k) outBand.WriteArray(outim,0,0) outBand.FlushCache() outDataset = None print 'result written to: '+outfile print 'elapsed time: '+str(time.time()-start)
def main(): gdal.AllRegister() path = auxil.select_directory('Choose working directory') if path: os.chdir(path) # MS image file1 = auxil.select_infile(title='Choose MS image') if file1: inDataset1 = gdal.Open(file1,GA_ReadOnly) cols = inDataset1.RasterXSize rows = inDataset1.RasterYSize bands = inDataset1.RasterCount else: return pos1 = auxil.select_pos(bands) if not pos1: return num_bands = len(pos1) dims = auxil.select_dims([0,0,cols,rows]) if dims: x10,y10,cols1,rows1 = dims else: return # PAN image file2 = auxil.select_infile(title='Choose PAN image') if file2: inDataset2 = gdal.Open(file2,GA_ReadOnly) bands = inDataset2.RasterCount else: return if bands>1: print 'Must be a single band (panchromatic) image' return geotransform1 = inDataset1.GetGeoTransform() geotransform2 = inDataset2.GetGeoTransform() # outfile outfile, fmt = auxil.select_outfilefmt() if not outfile: return # resolution ratio ratio = auxil.select_integer(4, 'Resolution ratio (2 or 4)') if not ratio: return # MS registration band k1 = auxil.select_integer(1, 'MS band for registration') if not k1: return # fine adjust roll = auxil.select_integer(0, 'Fine adjust (-2 ... 2)') if roll is None: return print '=========================' print ' DWT Pansharpening' print '=========================' print time.asctime() print 'MS file: '+file1 print 'PAN file: '+file2 # image arrays band = inDataset1.GetRasterBand(1) tmp = band.ReadAsArray(0,0,1,1) dt = tmp.dtype MS = np.asarray(np.zeros((num_bands,rows1,cols1)),dtype=dt) k = 0 for b in pos1: band = inDataset1.GetRasterBand(b) MS[k,:,:] = band.ReadAsArray(x10,y10,cols1,rows1) k += 1 # if integer assume 11bit quantization otherwise must be byte if MS.dtype == np.int16: fact = 8.0 MS = auxil.byteStretch(MS,(0,2**11)) else: fact = 1.0 # read in corresponding spatial subset of PAN image if (geotransform1 is None) or (geotransform2 is None): print 'Image not georeferenced, aborting' return # upper left corner pixel in PAN gt1 = list(geotransform1) gt2 = list(geotransform2) ulx1 = gt1[0] + x10*gt1[1] uly1 = gt1[3] + y10*gt1[5] x20 = int(round(((ulx1 - gt2[0])/gt2[1]))) y20 = int(round(((uly1 - gt2[3])/gt2[5]))) cols2 = cols1*ratio rows2 = rows1*ratio band = inDataset2.GetRasterBand(1) PAN = band.ReadAsArray(x20,y20,cols2,rows2) # if integer assume 11-bit quantization, otherwise must be byte if PAN.dtype == np.int16: PAN = auxil.byteStretch(PAN,(0,2**11)) # compress PAN to resolution of MS image panDWT = auxil.DWTArray(PAN,cols2,rows2) r = ratio while r > 1: panDWT.filter() r /= 2 bn0 = panDWT.get_quadrant(0) lines0,samples0 = bn0.shape bn1 = MS[k1-1,:,:] # register (and subset) MS image to compressed PAN image (scale,angle,shift) = auxil.similarity(bn0,bn1) tmp = np.zeros((num_bands,lines0,samples0)) for k in range(num_bands): bn1 = MS[k,:,:] bn2 = ndii.zoom(bn1, 1.0/scale) bn2 = ndii.rotate(bn2, angle) bn2 = ndii.shift(bn2, shift) tmp[k,:,:] = bn2[0:lines0,0:samples0] MS = tmp if roll != 0: # fine adjust PAN = np.roll(PAN,roll,axis=0) PAN = np.roll(PAN,roll,axis=1) panDWT = auxil.DWTArray(PAN,cols2,rows2) r = ratio while r > 1: panDWT.filter() r /= 2 # compress pan once more, extract wavelet quadrants, and restore panDWT.filter() fgpan = panDWT.get_quadrant(1) gfpan = panDWT.get_quadrant(2) ggpan = panDWT.get_quadrant(3) panDWT.invert() # output array sharpened = np.zeros((num_bands,rows2,cols2),dtype=np.float32) aa = np.zeros(3) bb = np.zeros(3) print 'Wavelet correlations:' for i in range(num_bands): # make copy of panDWT and inject ith ms band msDWT = copy.deepcopy(panDWT) msDWT.put_quadrant(MS[i,:,:],0) # compress once more msDWT.filter() # determine wavelet normalization coefficents ms = msDWT.get_quadrant(1) aa[0],bb[0],R = auxil.orthoregress(fgpan.ravel(), ms.ravel()) Rs = 'Band '+str(i+1)+': %8.3f'%R ms = msDWT.get_quadrant(2) aa[1],bb[1],R = auxil.orthoregress(gfpan.ravel(), ms.ravel()) Rs += '%8.3f'%R ms = msDWT.get_quadrant(3) aa[2],bb[2],R = auxil.orthoregress(ggpan.ravel(), ms.ravel()) Rs += '%8.3f'%R print Rs # restore once and normalize wavelet coefficients msDWT.invert() msDWT.normalize(aa,bb) # restore completely and collect result r = 1 while r < ratio: msDWT.invert() r *= 2 sharpened[i,:,:] = msDWT.get_quadrant(0) sharpened *= fact # write to disk driver = gdal.GetDriverByName(fmt) outDataset = driver.Create(outfile,cols2,rows2,num_bands,GDT_Float32) projection1 = inDataset1.GetProjection() if projection1 is not None: outDataset.SetProjection(projection1) gt1 = list(geotransform1) gt1[0] += x10*ratio gt1[3] -= y10*ratio gt1[1] = gt2[1] gt1[2] = gt2[2] gt1[4] = gt2[4] gt1[5] = gt2[5] outDataset.SetGeoTransform(tuple(gt1)) for k in range(num_bands): outBand = outDataset.GetRasterBand(k+1) outBand.WriteArray(sharpened[k,:,:],0,0) outBand.FlushCache() outDataset = None print 'Result written to %s'%outfile inDataset1 = None inDataset2 = None
def main(): print '================================' print 'Complex Wishart Change Detection' print '================================' print time.asctime() gdal.AllRegister() path = auxil.select_directory('Choose working directory') if path: os.chdir(path) # first SAR image infile1 = auxil.select_infile(title='Choose first SAR image') if infile1: inDataset1 = gdal.Open(infile1, GA_ReadOnly) cols = inDataset1.RasterXSize rows = inDataset1.RasterYSize bands = inDataset1.RasterCount else: return m = auxil.select_integer(5, msg='Number of looks') if not m: return print 'first filename: %s' % infile1 print 'number of looks: %i' % m # second SAR image infile2 = auxil.select_infile(title='Choose second SAR image') if not infile2: return n = auxil.select_integer(5, msg='Number of looks') if not n: return print 'second filename: %s' % infile2 print 'number of looks: %i' % n # output file outfile, fmt = auxil.select_outfilefmt() if not outfile: return # significance level sig = auxil.select_float(0.01, 'Choose significance level') print 'Signifcane level: %f' % sig start = time.time() print 'co-registering...' registerSAR.registerSAR(infile1, infile2, 'warp.tif', 'GTiff') infile2 = 'warp.tif' inDataset2 = gdal.Open(infile2, GA_ReadOnly) cols2 = inDataset2.RasterXSize rows2 = inDataset2.RasterYSize bands2 = inDataset2.RasterCount if (bands != bands2) or (cols != cols2) or (rows != rows2): print 'Size mismatch' return if bands == 9: print 'Quad polarimetry' # C11 (k1) b = inDataset1.GetRasterBand(1) k1 = m * b.ReadAsArray(0, 0, cols, rows) # C12 (a1) b = inDataset1.GetRasterBand(2) a1 = b.ReadAsArray(0, 0, cols, rows) b = inDataset1.GetRasterBand(3) im = b.ReadAsArray(0, 0, cols, rows) a1 = m * (a1 + 1j * im) # C13 (rho1) b = inDataset1.GetRasterBand(4) rho1 = b.ReadAsArray(0, 0, cols, rows) b = inDataset1.GetRasterBand(5) im = b.ReadAsArray(0, 0, cols, rows) rho1 = m * (rho1 + 1j * im) # C22 (xsi1) b = inDataset1.GetRasterBand(6) xsi1 = m * b.ReadAsArray(0, 0, cols, rows) # C23 (b1) b = inDataset1.GetRasterBand(7) b1 = b.ReadAsArray(0, 0, cols, rows) b = inDataset1.GetRasterBand(8) im = b.ReadAsArray(0, 0, cols, rows) b1 = m * (b1 + 1j * im) # C33 (zeta1) b = inDataset1.GetRasterBand(9) zeta1 = m * b.ReadAsArray(0, 0, cols, rows) # C11 (k2) b = inDataset2.GetRasterBand(1) k2 = n * b.ReadAsArray(0, 0, cols, rows) # C12 (a2) b = inDataset2.GetRasterBand(2) a2 = b.ReadAsArray(0, 0, cols, rows) b = inDataset2.GetRasterBand(3) im = b.ReadAsArray(0, 0, cols, rows) a2 = n * (a2 + 1j * im) # C13 (rho2) b = inDataset2.GetRasterBand(4) rho2 = b.ReadAsArray(0, 0, cols, rows) b = inDataset2.GetRasterBand(5) im = b.ReadAsArray(0, 0, cols, rows) rho2 = n * (rho2 + 1j * im) # C22 (xsi2) b = inDataset2.GetRasterBand(6) xsi2 = n * b.ReadAsArray(0, 0, cols, rows) # C23 (b2) b = inDataset2.GetRasterBand(7) b2 = b.ReadAsArray(0, 0, cols, rows) b = inDataset2.GetRasterBand(8) im = b.ReadAsArray(0, 0, cols, rows) b2 = n * (b2 + 1j * im) # C33 (zeta2) b = inDataset2.GetRasterBand(9) zeta2 = n * b.ReadAsArray(0, 0, cols, rows) k3 = k1 + k2 a3 = a1 + a2 rho3 = rho1 + rho2 xsi3 = xsi1 + xsi2 b3 = b1 + b2 zeta3 = zeta1 + zeta2 det1 = k1 * xsi1 * zeta1 + 2 * np.real( a1 * b1 * np.conj(rho1)) - xsi1 * (abs(rho1)**2) - k1 * ( abs(b1)**2) - zeta1 * (abs(a1)**2) det2 = k2 * xsi2 * zeta2 + 2 * np.real( a2 * b2 * np.conj(rho2)) - xsi2 * (abs(rho2)**2) - k2 * ( abs(b2)**2) - zeta2 * (abs(a2)**2) det3 = k3 * xsi3 * zeta3 + 2 * np.real( a3 * b3 * np.conj(rho3)) - xsi3 * (abs(rho3)**2) - k3 * ( abs(b3)**2) - zeta3 * (abs(a3)**2) p = 3 f = p**2 cst = p * ((n + m) * np.log(n + m) - n * np.log(n) - m * np.log(m)) rho = 1. - (2. * p**2 - 1.) * (1. / n + 1. / m - 1. / (n + m)) / (6. * p) omega2 = -(p * p / 4.) * (1. - 1. / rho)**2 + p**2 * (p**2 - 1.) * ( 1. / n**2 + 1. / m**2 - 1. / (n + m)**2) / (24. * rho**2) elif bands == 4: print 'Dual polarimetry' # C11 (k1) b = inDataset1.GetRasterBand(1) k1 = m * b.ReadAsArray(0, 0, cols, rows) # C12 (a1) b = inDataset1.GetRasterBand(2) a1 = b.ReadAsArray(0, 0, cols, rows) b = inDataset1.GetRasterBand(3) im = b.ReadAsArray(0, 0, cols, rows) a1 = m * (a1 + 1j * im) # C22 (xsi1) b = inDataset1.GetRasterBand(4) xsi1 = m * b.ReadAsArray(0, 0, cols, rows) # C11 (k2) b = inDataset2.GetRasterBand(1) k2 = n * b.ReadAsArray(0, 0, cols, rows) # C12 (a2) b = inDataset2.GetRasterBand(2) a2 = b.ReadAsArray(0, 0, cols, rows) b = inDataset2.GetRasterBand(3) im = b.ReadAsArray(0, 0, cols, rows) a2 = n * (a2 + 1j * im) # C22 (xsi2) b = inDataset2.GetRasterBand(4) xsi2 = n * b.ReadAsArray(0, 0, cols, rows) k3 = k1 + k2 a3 = a1 + a2 xsi3 = xsi1 + xsi2 det1 = k1 * xsi1 - abs(a1)**2 det2 = k2 * xsi2 - abs(a2)**2 det3 = k3 * xsi3 - abs(a3)**2 p = 2 cst = p * ((n + m) * np.log(n + m) - n * np.log(n) - m * np.log(m)) f = p**2 rho = 1 - (2 * f - 1) * (1. / n + 1. / m - 1. / (n + m)) / (6. * p) omega2 = -f / 4. * (1 - 1. / rho)**2 + f * (f - 1) * ( 1. / n**2 + 1. / m**2 - 1. / (n + m)**2) / (24. * rho**2) elif bands == 1: print 'Single polarimetry' # C11 (k1) b = inDataset1.GetRasterBand(1) k1 = m * b.ReadAsArray(0, 0, cols, rows) # C11 (k2) b = inDataset2.GetRasterBand(1) k2 = n * b.ReadAsArray(0, 0, cols, rows) k3 = k1 + k2 det1 = k1 det2 = k2 det3 = k3 p = 1 cst = p * ((n + m) * np.log(n + m) - n * np.log(n) - m * np.log(m)) f = p**2 rho = 1 - (2. * f - 1) * (1. / n + 1. / m - 1. / (n + m)) / (6. * p) omega2 = -f / 4. * (1 - 1. / rho)**2 + f * (f - 1) * ( 1. / n**2 + 1. / m**2 - 1. / (n + m)**2) / (24. * rho**2) else: print 'Incorrect number of bands' return idx = np.where(det1 <= 0.0) det1[idx] = 0.0001 idx = np.where(det2 <= 0.0) det2[idx] = 0.0001 idx = np.where(det3 <= 0.0) det3[idx] = 0.0001 lnQ = cst + m * np.log(det1) + n * np.log(det2) - (n + m) * np.log(det3) # test statistic Z = -2 * rho * lnQ # change probabilty P = (1. - omega2) * stats.chi2.cdf(Z, [f]) + omega2 * stats.chi2.cdf( Z, [f + 4]) P = ndimage.filters.median_filter(P, size=(3, 3)) # change map a255 = np.ones((rows, cols), dtype=np.byte) * 255 a0 = a255 * 0 c11 = np.log(k1 + 0.0001) min1 = np.min(c11) max1 = np.max(c11) c11 = (c11 - min1) * 255.0 / (max1 - min1) c11 = np.where(c11 < 0, a0, c11) c11 = np.where(c11 > 255, a255, c11) c11 = np.where(P > (1.0 - sig), a0, c11) cmap = np.where(P > (1.0 - sig), a255, c11) # write to file system driver = gdal.GetDriverByName(fmt) outDataset = driver.Create(outfile, cols, rows, 2, GDT_Float32) geotransform = inDataset1.GetGeoTransform() if geotransform is not None: outDataset.SetGeoTransform(geotransform) projection = inDataset1.GetProjection() if projection is not None: outDataset.SetProjection(projection) outBand = outDataset.GetRasterBand(1) outBand.WriteArray(Z, 0, 0) outBand.FlushCache() outBand = outDataset.GetRasterBand(2) outBand.WriteArray(P, 0, 0) outBand.FlushCache() outDataset = None print 'test statistic and probabilities written to: %s' % outfile basename = os.path.basename(outfile) name, ext = os.path.splitext(basename) outfile = outfile.replace(name, name + '_cmap') outDataset = driver.Create(outfile, cols, rows, 3, GDT_Byte) geotransform = inDataset1.GetGeoTransform() if geotransform is not None: outDataset.SetGeoTransform(geotransform) projection = inDataset1.GetProjection() if projection is not None: outDataset.SetProjection(projection) outBand = outDataset.GetRasterBand(1) outBand.WriteArray(cmap, 0, 0) outBand.FlushCache() outBand = outDataset.GetRasterBand(2) outBand.WriteArray(c11, 0, 0) outBand.FlushCache() outBand = outDataset.GetRasterBand(3) outBand.WriteArray(c11, 0, 0) outBand.FlushCache() outDataset = None print 'change map image written to: %s' % outfile print 'elapsed time: ' + str(time.time() - start)
def main(): gdal.AllRegister() path = auxil.select_directory('Choose working directory') # path = 'd:\\imagery\\CRC\\Chapters6-7' if path: os.chdir(path) infile = auxil.select_infile(title='Select a class probability image') if infile: inDataset = gdal.Open(infile, GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize classes = inDataset.RasterCount else: return outfile, fmt = auxil.select_outfilefmt() if not outfile: return nitr = auxil.select_integer(3, 'Select number of iterations') print '=========================' print ' PLR' print '=========================' print 'infile: %s' % infile print 'iterations: %i' % nitr start = time.time() prob_image = np.zeros((classes, rows, cols)) for k in range(classes): band = inDataset.GetRasterBand(k + 1) prob_image[k, :, :] = band.ReadAsArray(0, 0, cols, rows).astype(float) # compatibility matrix Pmn = np.zeros((classes, classes)) n_samples = (cols - 1) * (rows - 1) samplem = np.reshape(prob_image[:, 0:rows - 1, 0:cols - 1], (classes, n_samples)) samplen = np.reshape(prob_image[:, 1:rows, 0:cols - 1], (classes, n_samples)) sampleu = np.reshape(prob_image[:, 0:rows - 1, 1:cols], (classes, n_samples)) max_samplem = np.amax(samplem, axis=0) max_samplen = np.amax(samplen, axis=0) max_sampleu = np.amax(sampleu, axis=0) print 'estimating compatibility matrix...' for j in range(n_samples): if j % 50000 == 0: print '%i samples of %i' % (j, n_samples) m1 = np.where(samplem[:, j] == max_samplem[j])[0][0] n1 = np.where(samplen[:, j] == max_samplen[j])[0][0] if isinstance(m1, int) and isinstance(n1, int): Pmn[m1, n1] += 1 u1 = np.where(sampleu[:, j] == max_sampleu[j])[0][0] if isinstance(m1, int) and isinstance(u1, int): Pmn[m1, u1] += 1 for j in range(classes): n = np.sum(Pmn[j, :]) if n > 0: Pmn[j, :] /= n print Pmn itr = 0 temp = prob_image * 0 print 'label relaxation...' while itr < nitr: print 'iteration %i' % (itr + 1) Pm = np.zeros(classes) Pn = np.zeros(classes) for i in range(1, rows - 1): if i % 50 == 0: print '%i rows processed' % i for j in range(1, cols - 1): Pm[:] = prob_image[:, i, j] Pn[:] = prob_image[:, i - 1, j] / 4 Pn[:] += prob_image[:, i + 1, j] / 4 Pn[:] += prob_image[:, i, j - 1] / 4 Pn[:] += prob_image[:, i, j + 1] / 4 Pn = np.transpose(Pn) if np.sum(Pm) == 0: Pm_new = Pm else: Pm_new = Pm * (np.dot(Pmn, Pn)) / (np.dot( np.dot(Pm, Pmn), Pn)) temp[:, i, j] = Pm_new prob_image = temp itr += 1 # write to disk prob_image = np.byte(prob_image * 255) driver = gdal.GetDriverByName(fmt) outDataset = driver.Create(outfile, cols, rows, classes, GDT_Byte) projection = inDataset.GetProjection() geotransform = inDataset.GetGeoTransform() if geotransform is not None: outDataset.SetGeoTransform(geotransform) if projection is not None: outDataset.SetProjection(projection) for k in range(classes): outBand = outDataset.GetRasterBand(k + 1) outBand.WriteArray(prob_image[k, :, :], 0, 0) outBand.FlushCache() outDataset = None inDataset = None print 'result written to: ' + outfile print 'elapsed time: ' + str(time.time() - start) print '--done------------------------'
def main(): gdal.AllRegister() path = auxil.select_directory('Choose working directory') if path: os.chdir(path) # SAR image infile = auxil.select_infile(title='Choose SAR image') if infile: inDataset = gdal.Open(infile, GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize bands = inDataset.RasterCount else: return # spatial subset x0, y0, rows, cols = auxil.select_dims([0, 0, rows, cols]) # number of looks m = auxil.select_integer(5, msg='Number of looks') if not m: return # output file outfile, fmt = auxil.select_outfilefmt() if not outfile: return # get filter weights from span image b = np.ones((rows, cols)) band = inDataset.GetRasterBand(1) span = band.ReadAsArray(x0, y0, cols, rows).ravel() if bands == 9: band = inDataset.GetRasterBand(6) span += band.ReadAsArray(x0, y0, cols, rows).ravel() band = inDataset.GetRasterBand(9) span += band.ReadAsArray(x0, y0, cols, rows).ravel() elif bands == 4: band = inDataset.GetRasterBand(4) span += band.ReadAsArray(x0, y0, cols, rows).ravel() edge_idx = np.zeros((rows, cols), dtype=int) print '=========================' print ' MMSE_FILTER' print '=========================' print time.asctime() print 'infile: %s' % infile print 'number of looks: %i' % m print 'Determining filter weights from span image' start = time.time() print 'row: ', sys.stdout.flush() for j in range(3, rows - 3): if j % 50 == 0: print '%i ' % j, sys.stdout.flush() windex = get_windex(j, cols) for i in range(3, cols - 3): wind = np.reshape(span[windex], (7, 7)) # 3x3 compression w = congrid.congrid(wind, (3, 3), method='spline', centre=True) # get appropriate edge mask es = [np.sum(edges[p] * w) for p in range(4)] idx = np.argmax(es) if idx == 0: if np.abs(w[1, 1] - w[1, 0]) < np.abs(w[1, 1] - w[1, 2]): edge_idx[j, i] = 0 else: edge_idx[j, i] = 4 elif idx == 1: if np.abs(w[1, 1] - w[2, 0]) < np.abs(w[1, 1] - w[0, 2]): edge_idx[j, i] = 1 else: edge_idx[j, i] = 5 elif idx == 2: if np.abs(w[1, 1] - w[0, 1]) < np.abs(w[1, 1] - w[2, 1]): edge_idx[j, i] = 6 else: edge_idx[j, i] = 2 elif idx == 3: if np.abs(w[1, 1] - w[0, 0]) < np.abs(w[1, 1] - w[2, 2]): edge_idx[j, i] = 7 else: edge_idx[j, i] = 3 edge = templates[edge_idx[j, i]] wind = wind.ravel()[edge] gbar = np.mean(wind) varg = np.var(wind) if varg > 0: b[j, i] = np.max( ((1.0 - gbar**2 / (varg * m)) / (1.0 + 1.0 / m), 0.0)) windex += 1 print ' done' # filter the image outim = np.zeros((rows, cols), dtype=np.float32) driver = gdal.GetDriverByName(fmt) outDataset = driver.Create(outfile, cols, rows, bands, GDT_Float32) geotransform = inDataset.GetGeoTransform() if geotransform is not None: gt = list(geotransform) gt[0] = gt[0] + x0 * gt[1] gt[3] = gt[3] + y0 * gt[5] outDataset.SetGeoTransform(tuple(gt)) projection = inDataset.GetProjection() if projection is not None: outDataset.SetProjection(projection) print 'Filtering covariance matrix elememnts' for k in range(1, bands + 1): print 'band: %i' % (k) band = inDataset.GetRasterBand(k) band = band.ReadAsArray(0, 0, cols, rows) gbar = band * 0.0 # get window means for j in range(3, rows - 3): windex = get_windex(j, cols) for i in range(3, cols - 3): wind = band.ravel()[windex] edge = templates[edge_idx[j, i]] wind = wind[edge] gbar[j, i] = np.mean(wind) windex += 1 # apply adaptive filter and write to disk outim = np.reshape(gbar + b * (band - gbar), (rows, cols)) outBand = outDataset.GetRasterBand(k) outBand.WriteArray(outim, 0, 0) outBand.FlushCache() outDataset = None print 'result written to: ' + outfile print 'elapsed time: ' + str(time.time() - start)
def main(): gdal.AllRegister() path = auxil.select_directory('Choose working directory') if path: os.chdir(path) infile = auxil.select_infile(title='Select an image') if infile: inDataset = gdal.Open(infile, GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize bands = inDataset.RasterCount else: return pos = auxil.select_pos(bands) if not pos: return dims = auxil.select_dims([0, 0, cols, rows]) if dims: x0, y0, cols, rows = dims else: return m = auxil.select_integer(1000, 'Select training sample size') K = auxil.select_integer(6, 'Select number of clusters') outfile, outfmt = auxil.select_outfilefmt() if not outfile: return kernel = auxil.select_integer(1, 'Select kernel: 0=linear, 1=Gaussian') print '=========================' print ' kkmeans' print '=========================' print 'infile: ' + infile print 'samples: ' + str(m) if kernel == 0: print 'kernel: ' + 'linear' else: print 'kernel: ' + 'Gaussian' start = time.time() # input data matrix XX = np.zeros((cols * rows, bands)) k = 0 for b in pos: band = inDataset.GetRasterBand(b) band = band.ReadAsArray(x0, y0, cols, rows).astype(float) XX[:, k] = np.ravel(band) k += 1 # training data matrix idx = np.fix(np.random.random(m) * (cols * rows)).astype(np.integer) X = XX[idx, :] print 'kernel matrix...' # uncentered kernel matrix KK, gma = auxil.kernelMatrix(X, kernel=kernel) if gma is not None: print 'gamma: ' + str(round(gma, 6)) # initial (random) class labels labels = np.random.randint(K, size=m) # iteration change = True itr = 0 onesm = np.mat(np.ones(m, dtype=float)) while change and (itr < 100): change = False U = np.zeros((K, m)) for i in range(m): U[labels[i], i] = 1 M = np.diag(1.0 / (np.sum(U, axis=1) + 1.0)) MU = np.mat(np.dot(M, U)) Z = (onesm.T) * np.diag(MU * KK * (MU.T)) - 2 * KK * (MU.T) Z = np.array(Z) labels1 = (np.argmin(Z, axis=1) % K).ravel() if np.sum(labels1 != labels): change = True labels = labels1 itr += 1 print 'iterations: %i' % itr # classify image print 'classifying...' i = 0 A = np.diag(MU * KK * (MU.T)) A = np.tile(A, (cols, 1)) class_image = np.zeros((rows, cols), dtype=np.byte) while i < rows: XXi = XX[i * cols:(i + 1) * cols, :] KKK, _ = auxil.kernelMatrix(X, XXi, gma=gma, kernel=kernel) Z = A - 2 * (KKK.T) * (MU.T) Z = np.array(Z) labels = np.argmin(Z, axis=1).ravel() class_image[i, :] = (labels % K) + 1 i += 1 sys.stdout.write("\n") # write to disk driver = gdal.GetDriverByName(outfmt) outDataset = driver.Create(outfile, cols, rows, 1, GDT_Byte) projection = inDataset.GetProjection() geotransform = inDataset.GetGeoTransform() if geotransform is not None: gt = list(geotransform) gt[0] = gt[0] + x0 * gt[1] gt[3] = gt[3] + y0 * gt[5] outDataset.SetGeoTransform(tuple(gt)) if projection is not None: outDataset.SetProjection(projection) outBand = outDataset.GetRasterBand(1) outBand.WriteArray(class_image, 0, 0) outBand.FlushCache() outDataset = None inDataset = None if (outfmt == 'ENVI') and (K < 19): # try to make an ENVI classification header file hdr = header.Header() headerfile = outfile + '.hdr' f = open(headerfile) line = f.readline() envihdr = '' while line: envihdr += line line = f.readline() f.close() hdr.read(envihdr) hdr['file type'] = 'ENVI Classification' hdr['classes'] = str(K) classlookup = '{0' for i in range(1, 3 * K): classlookup += ', ' + str(str(ctable[i])) classlookup += '}' hdr['class lookup'] = classlookup hdr['class names'] = [str(i + 1) for i in range(K)] f = open(headerfile, 'w') f.write(str(hdr)) f.close() print 'result written to: ' + outfile print 'elapsed time: ' + str(time.time() - start) print '--done------------------------'
def main(): gdal.AllRegister() path = auxil.select_directory('Choose working directory') if path: os.chdir(path) # MS image file1 = auxil.select_infile(title='Choose MS image') if file1: inDataset1 = gdal.Open(file1,GA_ReadOnly) cols = inDataset1.RasterXSize rows = inDataset1.RasterYSize bands = inDataset1.RasterCount else: return pos1 = auxil.select_pos(bands) if not pos1: return num_bands = len(pos1) dims = auxil.select_dims([0,0,cols,rows]) if dims: x10,y10,cols1,rows1 = dims else: return # PAN image file2 = auxil.select_infile(title='Choose PAN image') if file2: inDataset2 = gdal.Open(file2,GA_ReadOnly) cols = inDataset2.RasterXSize rows = inDataset2.RasterYSize bands = inDataset2.RasterCount else: return if bands>1: print 'Must be a single band (panchromatic) image' return dims=auxil.select_dims([0,0,cols,rows]) if dims: x20,y20,cols2,rows2 = dims else: return # outfile outfile, fmt = auxil.select_outfilefmt() if not outfile: return # resolution ratio ratio = auxil.select_integer(4, 'Resolution ratio (2 or 4)') if not ratio: return # MS registration band k1 = auxil.select_integer(1, 'MS band for registration') if not k1: return print '=========================' print ' ATWT Pansharpening' print '=========================' print time.asctime() print 'MS file: '+file1 print 'PAN file: '+file2 # image arrays band = inDataset1.GetRasterBand(1) tmp = band.ReadAsArray(0,0,1,1) dt = tmp.dtype MS = np.asarray(np.zeros((num_bands,rows1,cols1)),dtype = dt) # result will be float32 sharpened = np.zeros((num_bands,rows2,cols2),dtype=np.float32) k = 0 for b in pos1: band = inDataset1.GetRasterBand(b) MS[k,:,:] = band.ReadAsArray(x10,y10,cols1,rows1) k += 1 band = inDataset2.GetRasterBand(1) PAN = band.ReadAsArray(x20,y20,cols2,rows2) # if integer assume 11bit quantization, otherwise must be byte if PAN.dtype == np.int16: PAN = auxil.byteStretch(PAN,(0,2**11)) if MS.dtype == np.int16: MS = auxil.byteStretch(MS,(0,2**11)) # compress PAN to resolution of MS image using DWT panDWT = auxil.DWTArray(PAN,cols2,rows2) r = ratio while r > 1: panDWT.filter() r /= 2 bn0 = panDWT.get_quadrant(0) # register (and subset) MS image to compressed PAN image using MSband lines0,samples0 = bn0.shape bn1 = MS[k1,:,:] # register (and subset) MS image to compressed PAN image (scale,angle,shift) = auxil.similarity(bn0,bn1) tmp = np.zeros((num_bands,lines0,samples0)) for k in range(num_bands): bn1 = MS[k,:,:] bn2 = ndii.zoom(bn1, 1.0/scale) bn2 = ndii.rotate(bn2, angle) bn2 = ndii.shift(bn2, shift) tmp[k,:,:] = bn2[0:lines0,0:samples0] MS = tmp smpl = np.random.randint(cols2*rows2,size=100000) print 'Wavelet correlations:' # loop over MS bands for k in range(num_bands): msATWT = auxil.ATWTArray(PAN) r = ratio while r > 1: msATWT.filter() r /= 2 # sample PAN wavelet details X = msATWT.get_band(msATWT.num_iter) X = X.ravel()[smpl] # resize the ms band to scale of the pan image ms_band = ndii.zoom(MS[k,:,:],ratio) # sample details of MS band tmpATWT = auxil.ATWTArray(ms_band) r = ratio while r > 1: tmpATWT.filter() r /= 2 Y = tmpATWT.get_band(msATWT.num_iter) Y = Y.ravel()[smpl] # get band for injection bnd = tmpATWT.get_band(0) tmpATWT = None aa,bb,R = auxil.orthoregress(X,Y) print 'Band '+str(k+1)+': %8.3f'%R # inject the filtered MS band msATWT.inject(bnd) # normalize wavelet components and expand msATWT.normalize(aa,bb) r = ratio while r > 1: msATWT.invert() r /= 2 sharpened[k,:,:] = msATWT.get_band(0) # write to disk if outfile: driver = gdal.GetDriverByName(fmt) outDataset = driver.Create(outfile, cols2,rows2,num_bands,GDT_Float32) projection1 = inDataset1.GetProjection() geotransform1 = inDataset1.GetGeoTransform() geotransform2 = inDataset2.GetGeoTransform() if geotransform2 is not None: gt2 = list(geotransform2) if geotransform1 is not None: gt1 = list(geotransform1) gt1[0] += x10*gt2[1] # using PAN pixel sizes gt1[3] += y10*gt2[5] gt1[1] = gt2[1] gt1[2] = gt2[2] gt1[4] = gt2[4] gt1[5] = gt2[5] outDataset.SetGeoTransform(tuple(gt1)) if projection1 is not None: outDataset.SetProjection(projection1) for k in range(num_bands): outBand = outDataset.GetRasterBand(k+1) outBand.WriteArray(sharpened[k,:,:],0,0) outBand.FlushCache() outDataset = None print 'Result written to %s'%outfile inDataset1 = None inDataset2 = None
def main(): gdal.AllRegister() path = auxil.select_directory('Input directory') if path: os.chdir(path) # input image infile = auxil.select_infile(title='Image file') if infile: inDataset = gdal.Open(infile, GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize bands = inDataset.RasterCount projection = inDataset.GetProjection() geotransform = inDataset.GetGeoTransform() if geotransform is not None: gt = list(geotransform) else: print 'No geotransform available' return imsr = osr.SpatialReference() imsr.ImportFromWkt(projection) else: return pos = auxil.select_pos(bands) if not pos: return N = len(pos) rasterBands = [] for b in pos: rasterBands.append(inDataset.GetRasterBand(b)) # training algorithm trainalg = auxil.select_integer(1, msg='1:Maxlike,2:Backprop,3:Congrad,4:SVM') if not trainalg: return # training data (shapefile) trnfile = auxil.select_infile(filt='.shp', title='Train shapefile') if trnfile: trnDriver = ogr.GetDriverByName('ESRI Shapefile') trnDatasource = trnDriver.Open(trnfile, 0) trnLayer = trnDatasource.GetLayer() trnsr = trnLayer.GetSpatialRef() else: return tstfile = auxil.select_outfile(filt='.tst', title='Test results file') if not tstfile: print 'No test output' # outfile outfile, outfmt = auxil.select_outfilefmt(title='Classification file') if not outfile: return if trainalg in (2, 3, 4): # class probabilities file, hidden neurons probfile, probfmt = auxil.select_outfilefmt(title='Probabilities file') else: probfile = None if trainalg in (2, 3): L = auxil.select_integer(8, 'Number of hidden neurons') if not L: return # coordinate transformation from training to image projection ct = osr.CoordinateTransformation(trnsr, imsr) # number of classes K = 1 feature = trnLayer.GetNextFeature() while feature: classid = feature.GetField('CLASS_ID') if int(classid) > K: K = int(classid) feature = trnLayer.GetNextFeature() trnLayer.ResetReading() K += 1 print '=========================' print 'supervised classification' print '=========================' print time.asctime() print 'image: ' + infile print 'training: ' + trnfile if trainalg == 1: print 'Maximum Likelihood' elif trainalg == 2: print 'Neural Net (Backprop)' elif trainalg == 3: print 'Neural Net (Congrad)' else: print 'Support Vector Machine' # loop through the polygons Gs = [] # train observations ls = [] # class labels classnames = '{unclassified' classids = set() print 'reading training data...' for i in range(trnLayer.GetFeatureCount()): feature = trnLayer.GetFeature(i) classid = str(feature.GetField('CLASS_ID')) classname = feature.GetField('CLASS_NAME') if classid not in classids: classnames += ', ' + classname classids = classids | set(classid) l = [0 for i in range(K)] l[int(classid)] = 1.0 polygon = feature.GetGeometryRef() # transform to same projection as image polygon.Transform(ct) # convert to a Shapely object poly = shapely.wkt.loads(polygon.ExportToWkt()) # transform the boundary to pixel coords in numpy bdry = np.array(poly.boundary) bdry[:, 0] = bdry[:, 0] - gt[0] bdry[:, 1] = bdry[:, 1] - gt[3] GT = np.mat([[gt[1], gt[2]], [gt[4], gt[5]]]) bdry = bdry * np.linalg.inv(GT) # polygon in pixel coords polygon1 = asPolygon(bdry) # raster over the bounding rectangle minx, miny, maxx, maxy = map(int, list(polygon1.bounds)) pts = [] for i in range(minx, maxx + 1): for j in range(miny, maxy + 1): pts.append((i, j)) multipt = MultiPoint(pts) # intersection as list intersection = np.array(multipt.intersection(polygon1), dtype=np.int).tolist() # cut out the bounded image cube cube = np.zeros((maxy - miny + 1, maxx - minx + 1, len(rasterBands))) k = 0 for band in rasterBands: cube[:, :, k] = band.ReadAsArray(minx, miny, maxx - minx + 1, maxy - miny + 1) k += 1 # get the training vectors for (x, y) in intersection: Gs.append(cube[y - miny, x - minx, :]) ls.append(l) polygon = None polygon1 = None feature.Destroy() trnDatasource.Destroy() classnames += '}' m = len(ls) print str(m) + ' training pixel vectors were read in' Gs = np.array(Gs) ls = np.array(ls) # stretch the pixel vectors to [-1,1] for ffn maxx = np.max(Gs, 0) minx = np.min(Gs, 0) for j in range(N): Gs[:, j] = 2 * (Gs[:, j] - minx[j]) / (maxx[j] - minx[j]) - 1.0 # random permutation of training data idx = np.random.permutation(m) Gs = Gs[idx, :] ls = ls[idx, :] # setup output datasets driver = gdal.GetDriverByName(outfmt) outDataset = driver.Create(outfile, cols, rows, 1, GDT_Byte) projection = inDataset.GetProjection() geotransform = inDataset.GetGeoTransform() if geotransform is not None: outDataset.SetGeoTransform(tuple(gt)) if projection is not None: outDataset.SetProjection(projection) outBand = outDataset.GetRasterBand(1) if probfile: driver = gdal.GetDriverByName(probfmt) probDataset = driver.Create(probfile, cols, rows, K, GDT_Byte) if geotransform is not None: probDataset.SetGeoTransform(tuple(gt)) if projection is not None: probDataset.SetProjection(projection) probBands = [] for k in range(K): probBands.append(probDataset.GetRasterBand(k + 1)) if tstfile: # train on 2/3 training examples Gstrn = Gs[0:2 * m // 3, :] lstrn = ls[0:2 * m // 3, :] Gstst = Gs[2 * m // 3:, :] lstst = ls[2 * m // 3:, :] else: Gstrn = Gs lstrn = ls if trainalg == 1: classifier = sc.Maxlike(Gstrn, lstrn) elif trainalg == 2: classifier = sc.Ffnbp(Gstrn, lstrn, L) elif trainalg == 3: classifier = sc.Ffncg(Gstrn, lstrn, L) elif trainalg == 4: classifier = sc.Svm(Gstrn, lstrn) print 'training on %i pixel vectors...' % np.shape(Gstrn)[0] start = time.time() result = classifier.train() print 'elapsed time %s' % str(time.time() - start) if result: if trainalg in [2, 3]: cost = np.log10(result) ymax = np.max(cost) ymin = np.min(cost) xmax = len(cost) plt.plot(range(xmax), cost, 'k') plt.axis([0, xmax, ymin - 1, ymax]) plt.title('Log(Cross entropy)') plt.xlabel('Epoch') # classify the image print 'classifying...' start = time.time() tile = np.zeros((cols, N)) for row in range(rows): for j in range(N): tile[:, j] = rasterBands[j].ReadAsArray(0, row, cols, 1) tile[:, j] = 2 * (tile[:, j] - minx[j]) / (maxx[j] - minx[j]) - 1.0 cls, Ms = classifier.classify(tile) outBand.WriteArray(np.reshape(cls, (1, cols)), 0, row) if probfile: Ms = np.byte(Ms * 255) for k in range(K): probBands[k].WriteArray(np.reshape(Ms[k, :], (1, cols)), 0, row) outBand.FlushCache() print 'elapsed time %s' % str(time.time() - start) outDataset = None inDataset = None if probfile: for probBand in probBands: probBand.FlushCache() probDataset = None print 'class probabilities written to: %s' % probfile K = lstrn.shape[1] + 1 if (outfmt == 'ENVI') and (K < 19): # try to make an ENVI classification header file hdr = header.Header() headerfile = outfile + '.hdr' f = open(headerfile) line = f.readline() envihdr = '' while line: envihdr += line line = f.readline() f.close() hdr.read(envihdr) hdr['file type'] = 'ENVI Classification' hdr['classes'] = str(K) classlookup = '{0' for i in range(1, 3 * K): classlookup += ', ' + str(str(ctable[i])) classlookup += '}' hdr['class lookup'] = classlookup hdr['class names'] = classnames f = open(headerfile, 'w') f.write(str(hdr)) f.close() print 'thematic map written to: %s' % outfile if trainalg in [2, 3]: print 'please close the cross entropy plot to continue' plt.show() if tstfile: with open(tstfile, 'w') as f: print >> f, 'FFN test results for %s' % infile print >> f, time.asctime() print >> f, 'Classification image: %s' % outfile print >> f, 'Class probabilities image: %s' % probfile print >> f, lstst.shape[0], lstst.shape[1] classes, _ = classifier.classify(Gstst) labels = np.argmax(lstst, axis=1) + 1 for i in range(len(classes)): print >> f, classes[i], labels[i] f.close() print 'test results written to: %s' % tstfile print 'done' else: print 'an error occured' return
def main(): gdal.AllRegister() path = auxil.select_directory('Choose working directory') if path: os.chdir(path) infile = auxil.select_infile(title='Select an image') if infile: inDataset = gdal.Open(infile, GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize bands = inDataset.RasterCount else: return pos = auxil.select_pos(bands) if not pos: return bands = len(pos) dims = auxil.select_dims([0, 0, cols, rows]) if dims: x0, y0, cols, rows = dims else: return class_image = np.zeros((rows, cols), dtype=np.byte) K = auxil.select_integer(6, 'Number of clusters') max_scale = auxil.select_integer(2, 'Maximum scaling factor') max_scale = min((max_scale, 3)) min_scale = auxil.select_integer(0, 'Minimum scaling factor') min_scale = min((max_scale, min_scale)) T0 = auxil.select_float(0.5, 'Initial annealing temperature') beta = auxil.select_float(0.5, 'Spatial mixing parameter') outfile, outfmt = auxil.select_outfilefmt( 'Select output classification file') if not outfile: return probfile, probfmt = auxil.select_outfilefmt( 'Select output probability file (optional)') print '=========================' print ' EM clustering' print '=========================' print 'infile: %s' % infile print 'clusters: %i' % K print 'T0: %f' % T0 print 'beta: %f' % beta start = time.time() # read in image and compress DWTbands = [] for b in pos: band = inDataset.GetRasterBand(b) DWTband = auxil.DWTArray( band.ReadAsArray(x0, y0, cols, rows).astype(float), cols, rows) for i in range(max_scale): DWTband.filter() DWTbands.append(DWTband) rows, cols = DWTbands[0].get_quadrant(0).shape G = np.transpose( np.array([ DWTbands[i].get_quadrant(0, float=True).ravel() for i in range(bands) ])) # initialize membership matrix n = G.shape[0] U = np.random.random((K, n)) den = np.sum(U, axis=0) for j in range(K): U[j, :] = U[j, :] / den # cluster at minimum scale try: U, Ms, Cs, Ps, pdens = em(G, U, T0, beta, rows, cols) except: print 'em failed' return # sort clusters wrt partition density idx = np.argsort(pdens) idx = idx[::-1] U = U[idx, :] # clustering at increasing scales for i in range(max_scale - min_scale): # expand U and renormalize U = np.reshape(U, (K, rows, cols)) rows = rows * 2 cols = cols * 2 U = ndi.zoom(U, (1, 2, 2)) U = np.reshape(U, (K, rows * cols)) idx = np.where(U < 0.0) U[idx] = 0.0 den = np.sum(U, axis=0) for j in range(K): U[j, :] = U[j, :] / den # expand the image for i in range(bands): DWTbands[i].invert() G = np.transpose( np.array([ DWTbands[i].get_quadrant(0, float=True).ravel() for i in range(bands) ])) # cluster unfrozen = np.where(np.max(U, axis=0) < 0.90) try: U, Ms, Cs, Ps, pdens = em(G, U, 0.0, beta, rows, cols, unfrozen=unfrozen) except: print 'em failed' return print 'Cluster mean vectors' print Ms print 'Cluster covariance matrices' for k in range(K): print 'cluster: %i' % k print Cs[k] # up-sample class memberships if necessary if min_scale > 0: U = np.reshape(U, (K, rows, cols)) f = 2**min_scale rows = rows * f cols = cols * f U = ndi.zoom(U, (1, f, f)) U = np.reshape(U, (K, rows * cols)) idx = np.where(U < 0.0) U[idx] = 0.0 den = np.sum(U, axis=0) for j in range(K): U[j, :] = U[j, :] / den # classify labels = np.byte(np.argmax(U, axis=0) + 1) class_image[0:rows, 0:cols] = np.reshape(labels, (rows, cols)) rows1, cols1 = class_image.shape # write to disk driver = gdal.GetDriverByName(outfmt) outDataset = driver.Create(outfile, cols1, rows1, 1, GDT_Byte) projection = inDataset.GetProjection() geotransform = inDataset.GetGeoTransform() if geotransform is not None: gt = list(geotransform) gt[0] = gt[0] + x0 * gt[1] gt[3] = gt[3] + y0 * gt[5] outDataset.SetGeoTransform(tuple(gt)) if projection is not None: outDataset.SetProjection(projection) outBand = outDataset.GetRasterBand(1) outBand.WriteArray(class_image, 0, 0) outBand.FlushCache() outDataset = None # write class membership probability file if desired if probfile: driver = gdal.GetDriverByName(probfmt) outDataset = driver.Create(probfile, cols, rows, K, GDT_Byte) if geotransform is not None: outDataset.SetGeoTransform(tuple(gt)) if projection is not None: outDataset.SetProjection(projection) for k in range(K): probs = np.reshape(U[k, :], (rows, cols)) probs = np.byte(probs * 255) outBand = outDataset.GetRasterBand(k + 1) outBand.WriteArray(probs, 0, 0) outBand.FlushCache() outDataset = None print 'class probabilities written to: %s' % probfile inDataset = None if (outfmt == 'ENVI') and (K < 19): # try to make an ENVI classification header file hdr = header.Header() headerfile = outfile + '.hdr' f = open(headerfile) line = f.readline() envihdr = '' while line: envihdr += line line = f.readline() f.close() hdr.read(envihdr) hdr['file type'] = 'ENVI Classification' hdr['classes'] = str(K + 1) classlookup = '{0' for i in range(1, 3 * (K + 1)): classlookup += ', ' + str(str(ctable[i])) classlookup += '}' hdr['class lookup'] = classlookup hdr['class names'] = ['class %i' % i for i in range(K + 1)] f = open(headerfile, 'w') f.write(str(hdr)) f.close() print 'classification written to: ' + outfile print 'elapsed time: ' + str(time.time() - start) print '--done------------------------'
def main(): gdal.AllRegister() path = auxil.select_directory('Choose working directory') if path: os.chdir(path) infile = auxil.select_infile(title='Select an image') if infile: inDataset = gdal.Open(infile,GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize bands = inDataset.RasterCount else: return pos = auxil.select_pos(bands) if not pos: return dims = auxil.select_dims([0,0,cols,rows]) if dims: x0,y0,cols,rows = dims else: return m = auxil.select_integer(1000,'Select training sample size') K = auxil.select_integer(6,'Select number of clusters') outfile, outfmt = auxil.select_outfilefmt() if not outfile: return kernel = auxil.select_integer(1,'Select kernel: 0=linear, 1=Gaussian') print '=========================' print ' kkmeans' print '=========================' print 'infile: '+infile print 'samples: '+str(m) if kernel == 0: print 'kernel: '+'linear' else: print 'kernel: '+'Gaussian' start = time.time() # input data matrix XX = np.zeros((cols*rows,bands)) k = 0 for b in pos: band = inDataset.GetRasterBand(b) band = band.ReadAsArray(x0,y0,cols,rows).astype(float) XX[:,k] = np.ravel(band) k += 1 # training data matrix idx = np.fix(np.random.random(m)*(cols*rows)).astype(np.integer) X = XX[idx,:] print 'kernel matrix...' # uncentered kernel matrix KK, gma = auxil.kernelMatrix(X,kernel=kernel) if gma is not None: print 'gamma: '+str(round(gma,6)) # initial (random) class labels labels = np.random.randint(K,size = m) # iteration change = True itr = 0 onesm = np.mat(np.ones(m,dtype=float)) while change and (itr < 100): change = False U = np.zeros((K,m)) for i in range(m): U[labels[i],i] = 1 M = np.diag(1.0/(np.sum(U,axis=1)+1.0)) MU = np.mat(np.dot(M,U)) Z = (onesm.T)*np.diag(MU*KK*(MU.T)) - 2*KK*(MU.T) Z = np.array(Z) labels1 = (np.argmin(Z,axis=1) % K).ravel() if np.sum(labels1 != labels): change = True labels = labels1 itr += 1 print 'iterations: %i'%itr # classify image print 'classifying...' i = 0 A = np.diag(MU*KK*(MU.T)) A = np.tile(A,(cols,1)) class_image = np.zeros((rows,cols),dtype=np.byte) while i < rows: XXi = XX[i*cols:(i+1)*cols,:] KKK,_ = auxil.kernelMatrix(X,XXi,gma=gma,kernel=kernel) Z = A - 2*(KKK.T)*(MU.T) Z= np.array(Z) labels = np.argmin(Z,axis=1).ravel() class_image[i,:] = (labels % K) +1 i += 1 sys.stdout.write("\n") # write to disk driver = gdal.GetDriverByName(outfmt) outDataset = driver.Create(outfile,cols,rows,1,GDT_Byte) projection = inDataset.GetProjection() geotransform = inDataset.GetGeoTransform() if geotransform is not None: gt = list(geotransform) gt[0] = gt[0] + x0*gt[1] gt[3] = gt[3] + y0*gt[5] outDataset.SetGeoTransform(tuple(gt)) if projection is not None: outDataset.SetProjection(projection) outBand = outDataset.GetRasterBand(1) outBand.WriteArray(class_image,0,0) outBand.FlushCache() outDataset = None inDataset = None if (outfmt == 'ENVI') and (K<19): # try to make an ENVI classification header file hdr = header.Header() headerfile = outfile+'.hdr' f = open(headerfile) line = f.readline() envihdr = '' while line: envihdr += line line = f.readline() f.close() hdr.read(envihdr) hdr['file type'] ='ENVI Classification' hdr['classes'] = str(K) classlookup = '{0' for i in range(1,3*K): classlookup += ', '+str(str(ctable[i])) classlookup +='}' hdr['class lookup'] = classlookup hdr['class names'] = [str(i+1) for i in range(K)] f = open(headerfile,'w') f.write(str(hdr)) f.close() print 'result written to: '+outfile print 'elapsed time: '+str(time.time()-start) print '--done------------------------'
def main(): gdal.AllRegister() path = auxil.select_directory("Choose working directory") # path = 'd:\\imagery\\CRC\\Chapters6-7' if path: os.chdir(path) infile = auxil.select_infile(title="Select a class probability image") if infile: inDataset = gdal.Open(infile, GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize classes = inDataset.RasterCount else: return outfile, fmt = auxil.select_outfilefmt() if not outfile: return nitr = auxil.select_integer(3, "Select number of iterations") print "=========================" print " PLR" print "=========================" print "infile: %s" % infile print "iterations: %i" % nitr start = time.time() prob_image = np.zeros((classes, rows, cols)) for k in range(classes): band = inDataset.GetRasterBand(k + 1) prob_image[k, :, :] = band.ReadAsArray(0, 0, cols, rows).astype(float) # compatibility matrix Pmn = np.zeros((classes, classes)) n_samples = (cols - 1) * (rows - 1) samplem = np.reshape(prob_image[:, 0 : rows - 1, 0 : cols - 1], (classes, n_samples)) samplen = np.reshape(prob_image[:, 1:rows, 0 : cols - 1], (classes, n_samples)) sampleu = np.reshape(prob_image[:, 0 : rows - 1, 1:cols], (classes, n_samples)) max_samplem = np.amax(samplem, axis=0) max_samplen = np.amax(samplen, axis=0) max_sampleu = np.amax(sampleu, axis=0) print "estimating compatibility matrix..." for j in range(n_samples): if j % 50000 == 0: print "%i samples of %i" % (j, n_samples) m1 = np.where(samplem[:, j] == max_samplem[j])[0][0] n1 = np.where(samplen[:, j] == max_samplen[j])[0][0] if isinstance(m1, int) and isinstance(n1, int): Pmn[m1, n1] += 1 u1 = np.where(sampleu[:, j] == max_sampleu[j])[0][0] if isinstance(m1, int) and isinstance(u1, int): Pmn[m1, u1] += 1 for j in range(classes): n = np.sum(Pmn[j, :]) if n > 0: Pmn[j, :] /= n print Pmn itr = 0 temp = prob_image * 0 print "label relaxation..." while itr < nitr: print "iteration %i" % (itr + 1) Pm = np.zeros(classes) Pn = np.zeros(classes) for i in range(1, rows - 1): if i % 50 == 0: print "%i rows processed" % i for j in range(1, cols - 1): Pm[:] = prob_image[:, i, j] Pn[:] = prob_image[:, i - 1, j] / 4 Pn[:] += prob_image[:, i + 1, j] / 4 Pn[:] += prob_image[:, i, j - 1] / 4 Pn[:] += prob_image[:, i, j + 1] / 4 Pn = np.transpose(Pn) if np.sum(Pm) == 0: Pm_new = Pm else: Pm_new = Pm * (np.dot(Pmn, Pn)) / (np.dot(np.dot(Pm, Pmn), Pn)) temp[:, i, j] = Pm_new prob_image = temp itr += 1 # write to disk prob_image = np.byte(prob_image * 255) driver = gdal.GetDriverByName(fmt) outDataset = driver.Create(outfile, cols, rows, classes, GDT_Byte) projection = inDataset.GetProjection() geotransform = inDataset.GetGeoTransform() if geotransform is not None: outDataset.SetGeoTransform(geotransform) if projection is not None: outDataset.SetProjection(projection) for k in range(classes): outBand = outDataset.GetRasterBand(k + 1) outBand.WriteArray(prob_image[k, :, :], 0, 0) outBand.FlushCache() outDataset = None inDataset = None print "result written to: " + outfile print "elapsed time: " + str(time.time() - start) print "--done------------------------"
def main(): gdal.AllRegister() path = auxil.select_directory('Choose working directory') if path: os.chdir(path) # MS image file1 = auxil.select_infile(title='Choose MS image') if file1: inDataset1 = gdal.Open(file1, GA_ReadOnly) cols = inDataset1.RasterXSize rows = inDataset1.RasterYSize bands = inDataset1.RasterCount else: return pos1 = auxil.select_pos(bands) if not pos1: return num_bands = len(pos1) dims = auxil.select_dims([0, 0, cols, rows]) if dims: x10, y10, cols1, rows1 = dims else: return # PAN image file2 = auxil.select_infile(title='Choose PAN image') if file2: inDataset2 = gdal.Open(file2, GA_ReadOnly) bands = inDataset2.RasterCount else: return if bands > 1: print 'Must be a single band (panchromatic) image' return geotransform1 = inDataset1.GetGeoTransform() geotransform2 = inDataset2.GetGeoTransform() # outfile outfile, fmt = auxil.select_outfilefmt() if not outfile: return # resolution ratio ratio = auxil.select_integer(4, 'Resolution ratio (2 or 4)') if not ratio: return # MS registration band k1 = auxil.select_integer(1, 'MS band for registration') if not k1: return # fine adjust roll = auxil.select_integer(0, 'Fine adjust (-2 ... 2)') if roll is None: return print '=========================' print ' DWT Pansharpening' print '=========================' print time.asctime() print 'MS file: ' + file1 print 'PAN file: ' + file2 # image arrays band = inDataset1.GetRasterBand(1) tmp = band.ReadAsArray(0, 0, 1, 1) dt = tmp.dtype MS = np.asarray(np.zeros((num_bands, rows1, cols1)), dtype=dt) k = 0 for b in pos1: band = inDataset1.GetRasterBand(b) MS[k, :, :] = band.ReadAsArray(x10, y10, cols1, rows1) k += 1 # if integer assume 11bit quantization otherwise must be byte if MS.dtype == np.int16: fact = 8.0 MS = auxil.byteStretch(MS, (0, 2**11)) else: fact = 1.0 # read in corresponding spatial subset of PAN image if (geotransform1 is None) or (geotransform2 is None): print 'Image not georeferenced, aborting' return # upper left corner pixel in PAN gt1 = list(geotransform1) gt2 = list(geotransform2) ulx1 = gt1[0] + x10 * gt1[1] uly1 = gt1[3] + y10 * gt1[5] x20 = int(round(((ulx1 - gt2[0]) / gt2[1]))) y20 = int(round(((uly1 - gt2[3]) / gt2[5]))) cols2 = cols1 * ratio rows2 = rows1 * ratio band = inDataset2.GetRasterBand(1) PAN = band.ReadAsArray(x20, y20, cols2, rows2) # if integer assume 11-bit quantization, otherwise must be byte if PAN.dtype == np.int16: PAN = auxil.byteStretch(PAN, (0, 2**11)) # compress PAN to resolution of MS image panDWT = auxil.DWTArray(PAN, cols2, rows2) r = ratio while r > 1: panDWT.filter() r /= 2 bn0 = panDWT.get_quadrant(0) lines0, samples0 = bn0.shape bn1 = MS[k1 - 1, :, :] # register (and subset) MS image to compressed PAN image (scale, angle, shift) = auxil.similarity(bn0, bn1) tmp = np.zeros((num_bands, lines0, samples0)) for k in range(num_bands): bn1 = MS[k, :, :] bn2 = ndii.zoom(bn1, 1.0 / scale) bn2 = ndii.rotate(bn2, angle) bn2 = ndii.shift(bn2, shift) tmp[k, :, :] = bn2[0:lines0, 0:samples0] MS = tmp if roll != 0: # fine adjust PAN = np.roll(PAN, roll, axis=0) PAN = np.roll(PAN, roll, axis=1) panDWT = auxil.DWTArray(PAN, cols2, rows2) r = ratio while r > 1: panDWT.filter() r /= 2 # compress pan once more, extract wavelet quadrants, and restore panDWT.filter() fgpan = panDWT.get_quadrant(1) gfpan = panDWT.get_quadrant(2) ggpan = panDWT.get_quadrant(3) panDWT.invert() # output array sharpened = np.zeros((num_bands, rows2, cols2), dtype=np.float32) aa = np.zeros(3) bb = np.zeros(3) print 'Wavelet correlations:' for i in range(num_bands): # make copy of panDWT and inject ith ms band msDWT = copy.deepcopy(panDWT) msDWT.put_quadrant(MS[i, :, :], 0) # compress once more msDWT.filter() # determine wavelet normalization coefficents ms = msDWT.get_quadrant(1) aa[0], bb[0], R = auxil.orthoregress(fgpan.ravel(), ms.ravel()) Rs = 'Band ' + str(i + 1) + ': %8.3f' % R ms = msDWT.get_quadrant(2) aa[1], bb[1], R = auxil.orthoregress(gfpan.ravel(), ms.ravel()) Rs += '%8.3f' % R ms = msDWT.get_quadrant(3) aa[2], bb[2], R = auxil.orthoregress(ggpan.ravel(), ms.ravel()) Rs += '%8.3f' % R print Rs # restore once and normalize wavelet coefficients msDWT.invert() msDWT.normalize(aa, bb) # restore completely and collect result r = 1 while r < ratio: msDWT.invert() r *= 2 sharpened[i, :, :] = msDWT.get_quadrant(0) sharpened *= fact # write to disk driver = gdal.GetDriverByName(fmt) outDataset = driver.Create(outfile, cols2, rows2, num_bands, GDT_Float32) projection1 = inDataset1.GetProjection() if projection1 is not None: outDataset.SetProjection(projection1) gt1 = list(geotransform1) gt1[0] += x10 * ratio gt1[3] -= y10 * ratio gt1[1] = gt2[1] gt1[2] = gt2[2] gt1[4] = gt2[4] gt1[5] = gt2[5] outDataset.SetGeoTransform(tuple(gt1)) for k in range(num_bands): outBand = outDataset.GetRasterBand(k + 1) outBand.WriteArray(sharpened[k, :, :], 0, 0) outBand.FlushCache() outDataset = None print 'Result written to %s' % outfile inDataset1 = None inDataset2 = None
def main(): gdal.AllRegister() path = auxil.select_directory('Choose input directory') if path: os.chdir(path) # input image infile = auxil.select_infile(title='Choose image file') if infile: inDataset = gdal.Open(infile,GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize bands = inDataset.RasterCount projection = inDataset.GetProjection() geotransform = inDataset.GetGeoTransform() if geotransform is not None: gt = list(geotransform) else: print 'No geotransform available' return imsr = osr.SpatialReference() imsr.ImportFromWkt(projection) else: return pos = auxil.select_pos(bands) if not pos: return N = len(pos) rasterBands = [] for b in pos: rasterBands.append(inDataset.GetRasterBand(b)) # training data (shapefile) trnfile = auxil.select_infile(filt='.shp',title='Choose train shapefile') if trnfile: trnDriver = ogr.GetDriverByName('ESRI Shapefile') trnDatasource = trnDriver.Open(trnfile,0) trnLayer = trnDatasource.GetLayer() trnsr = trnLayer.GetSpatialRef() else: return # hidden neurons L = auxil.select_integer(8,'number of hidden neurons') if not L: return # outfile outfile, fmt = auxil.select_outfilefmt() if not outfile: return # coordinate transformation from training to image projection ct= osr.CoordinateTransformation(trnsr,imsr) # number of classes feature = trnLayer.GetNextFeature() while feature: classid = feature.GetField('CLASS_ID') feature = trnLayer.GetNextFeature() trnLayer.ResetReading() K = int(classid)+1 print '=========================' print ' ffncg' print '=========================' print time.asctime() print 'image: '+infile print 'training: '+trnfile # loop through the polygons Gs = [] # train observations ls = [] # class labels print 'reading training data...' for i in range(trnLayer.GetFeatureCount()): feature = trnLayer.GetFeature(i) classid = feature.GetField('CLASS_ID') l = [0 for i in range(K)] l[int(classid)] = 1.0 polygon = feature.GetGeometryRef() # transform to same projection as image polygon.Transform(ct) # convert to a Shapely object poly = shapely.wkt.loads(polygon.ExportToWkt()) # transform the boundary to pixel coords in numpy bdry = np.array(poly.boundary) bdry[:,0] = bdry[:,0]-gt[0] bdry[:,1] = bdry[:,1]-gt[3] GT = np.mat([[gt[1],gt[2]],[gt[4],gt[5]]]) bdry = bdry*np.linalg.inv(GT) # polygon in pixel coords polygon1 = asPolygon(bdry) # raster over the bounding rectangle minx,miny,maxx,maxy = map(int,list(polygon1.bounds)) pts = [] for i in range(minx,maxx+1): for j in range(miny,maxy+1): pts.append((i,j)) multipt = MultiPoint(pts) # intersection as list intersection = np.array(multipt.intersection(polygon1),dtype=np.int).tolist() # cut out the bounded image cube cube = np.zeros((maxy-miny+1,maxx-minx+1,len(rasterBands))) k=0 for band in rasterBands: cube[:,:,k] = band.ReadAsArray(minx,miny,maxx-minx+1,maxy-miny+1) k += 1 # get the training vectors for (x,y) in intersection: Gs.append(cube[y-miny,x-minx,:]) ls.append(l) polygon = None polygon1 = None feature.Destroy() trnDatasource.Destroy() m = len(ls) print str(m) + ' training pixel vectors were read in' Gs = np.array(Gs) ls = np.array(ls) # stretch the pixel vectors to [-1,1] maxx = np.max(Gs,0) minx = np.min(Gs,0) for j in range(N): Gs[:,j] = 2*(Gs[:,j]-minx[j])/(maxx[j]-minx[j]) - 1.0 # random permutation of training data idx = np.random.permutation(m) Gs = Gs[idx,:] ls = ls[idx,:] # setup output dataset driver = gdal.GetDriverByName(fmt) outDataset = driver.Create(outfile,cols,rows,1,GDT_Byte) projection = inDataset.GetProjection() geotransform = inDataset.GetGeoTransform() if geotransform is not None: outDataset.SetGeoTransform(tuple(gt)) if projection is not None: outDataset.SetProjection(projection) outBand = outDataset.GetRasterBand(1) # train on 9/10 training examples Gstrn = Gs[0:9*m//10,:] lstrn = ls[0:9*m//10,:] affn = Ffncg(Gstrn,lstrn,L) print 'training on %i pixel vectors...' % np.shape(Gstrn)[0] start = time.time() cost = affn.train(epochs=epochs) print 'elapsed time %s' %str(time.time()-start) if cost is not None: # cost = np.log10(cost) ymax = np.max(cost) ymin = np.min(cost) xmax = len(cost) plt.plot(range(xmax),cost,'k') plt.axis([0,xmax,ymin-1,ymax]) plt.title('Cross entropy') plt.xlabel('Epoch') # classify the image print 'classifying...' tile = np.zeros((cols,N)) for row in range(rows): for j in range(N): tile[:,j] = rasterBands[j].ReadAsArray(0,row,cols,1) tile[:,j] = 2*(tile[:,j]-minx[j])/(maxx[j]-minx[j]) - 1.0 cls, _ = affn.classify(tile) outBand.WriteArray(np.reshape(cls,(1,cols)),0,row) outBand.FlushCache() outDataset = None inDataset = None print 'thematic map written to: ' + outfile print 'please close the cross entropy plot to continue' plt.show() else: print 'an error occured' return print 'submitting cross-validation to multyvac' start = time.time() jid = mv.submit(traintst,Gs,ls,L,_layer='ms_image_analysis') print 'submission time: %s' %str(time.time()-start) start = time.time() job = mv.get(jid) result = job.get_result(job) print 'execution time: %s' %str(time.time()-start) print 'misclassification rate: %f' %np.mean(result) print 'standard deviation: %f' %np.std(result) print '--------done---------------------'
def main(): gdal.AllRegister() path = auxil.select_directory('Choose working directory') if path: os.chdir(path) # SAR image infile = auxil.select_infile(title='Choose SAR image') if infile: inDataset = gdal.Open(infile,GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize bands = inDataset.RasterCount else: return # spatial subset x0,y0,rows,cols=auxil.select_dims([0,0,rows,cols]) # number of looks m = auxil.select_integer(5,msg='Number of looks') if not m: return # number of iterations niter = auxil.select_integer(1,msg='Number of iterations') # output file outfile,fmt = auxil.select_outfilefmt() if not outfile: return # process diagonal bands only driver = gdal.GetDriverByName(fmt) if bands == 9: outDataset = driver.Create(outfile,cols,rows,3,GDT_Float32) inimage = np.zeros((3,rows,cols)) band = inDataset.GetRasterBand(1) inimage[0] = band.ReadAsArray(x0,y0,cols,rows) band = inDataset.GetRasterBand(6) inimage[1] = band.ReadAsArray(x0,y0,cols,rows) band = inDataset.GetRasterBand(9) inimage[2] = band.ReadAsArray(x0,y0,cols,rows) elif bands == 4: outDataset = driver.Create(outfile,cols,rows,2,GDT_Float32) inimage = np.zeros((2,rows,cols)) band = inDataset.GetRasterBand(1) inimage[0] = band.ReadAsArray(x0,y0,cols,rows) band = inDataset.GetRasterBand(4) inimage[1] = band.ReadAsArray(x0,y0,cols,rows) else: outDataset = driver.Create(outfile,cols,rows,1,GDT_Float32) inimage = inDataset.GetRasterBand(1) outimage = np.copy(inimage) print '=========================' print ' GAMMA MAP FILTER' print '=========================' print time.asctime() print 'infile: %s'%infile print 'number of looks: %i'%m print 'number of iterations: %i'%niter start = time.time() itr = 0 while itr < niter: print 'iteration %i'%(itr+1) if bands == 9: for k in range(3): outimage[k] = gamma_filter(k,inimage,outimage,rows,cols,m) elif bands == 4: for k in range(2): outimage[k] = gamma_filter(k,inimage,outimage,rows,cols,m) else: outimage = gamma_filter(0,inimage,outimage,rows,cols,m) itr += 1 geotransform = inDataset.GetGeoTransform() if geotransform is not None: gt = list(geotransform) gt[0] = gt[0] + x0*gt[1] gt[3] = gt[3] + y0*gt[5] outDataset.SetGeoTransform(tuple(gt)) projection = inDataset.GetProjection() if projection is not None: outDataset.SetProjection(projection) if bands == 9: for k in range(3): outBand = outDataset.GetRasterBand(k+1) outBand.WriteArray(outimage[k],0,0) outBand.FlushCache() elif bands == 4: for k in range(2): outBand = outDataset.GetRasterBand(k+1) outBand.WriteArray(outimage[k],0,0) outBand.FlushCache() else: outBand = outDataset.GetRasterBand(1) outBand.WriteArray(outimage,0,0) outBand.FlushCache() outDataset = None print 'result written to: '+outfile print 'elapsed time: '+str(time.time()-start)
def main(): print '================================' print 'Complex Wishart Change Detection' print '================================' print time.asctime() gdal.AllRegister() path = auxil.select_directory('Choose working directory') if path: os.chdir(path) # first SAR image infile1 = auxil.select_infile(title='Choose first SAR image') if infile1: inDataset1 = gdal.Open(infile1,GA_ReadOnly) cols = inDataset1.RasterXSize rows = inDataset1.RasterYSize bands = inDataset1.RasterCount else: return m = auxil.select_integer(5,msg='Number of looks') if not m: return print 'first filename: %s'%infile1 print 'number of looks: %i'%m # second SAR image infile2 = auxil.select_infile(title='Choose second SAR image') if not infile2: return n = auxil.select_integer(5,msg='Number of looks') if not n: return print 'second filename: %s'%infile2 print 'number of looks: %i'%n # output file outfile,fmt = auxil.select_outfilefmt() if not outfile: return # significance level sig = auxil.select_float(0.01, 'Choose significance level') print 'Signifcane level: %f'%sig start = time.time() print 'co-registering...' registerSAR.registerSAR(infile1,infile2,'warp.tif','GTiff') infile2 = 'warp.tif' inDataset2 = gdal.Open(infile2,GA_ReadOnly) cols2 = inDataset2.RasterXSize rows2 = inDataset2.RasterYSize bands2 = inDataset2.RasterCount if (bands != bands2) or (cols != cols2) or (rows != rows2): print 'Size mismatch' return if bands == 9: print 'Quad polarimetry' # C11 (k1) b = inDataset1.GetRasterBand(1) k1 = m*b.ReadAsArray(0,0,cols,rows) # C12 (a1) b = inDataset1.GetRasterBand(2) a1 = b.ReadAsArray(0,0,cols,rows) b = inDataset1.GetRasterBand(3) im = b.ReadAsArray(0,0,cols,rows) a1 = m*(a1 + 1j*im) # C13 (rho1) b = inDataset1.GetRasterBand(4) rho1 = b.ReadAsArray(0,0,cols,rows) b = inDataset1.GetRasterBand(5) im = b.ReadAsArray(0,0,cols,rows) rho1 = m*(rho1 + 1j*im) # C22 (xsi1) b = inDataset1.GetRasterBand(6) xsi1 = m*b.ReadAsArray(0,0,cols,rows) # C23 (b1) b = inDataset1.GetRasterBand(7) b1 = b.ReadAsArray(0,0,cols,rows) b = inDataset1.GetRasterBand(8) im = b.ReadAsArray(0,0,cols,rows) b1 = m*(b1 + 1j*im) # C33 (zeta1) b = inDataset1.GetRasterBand(9) zeta1 = m*b.ReadAsArray(0,0,cols,rows) # C11 (k2) b = inDataset2.GetRasterBand(1) k2 = n*b.ReadAsArray(0,0,cols,rows) # C12 (a2) b = inDataset2.GetRasterBand(2) a2 = b.ReadAsArray(0,0,cols,rows) b = inDataset2.GetRasterBand(3) im = b.ReadAsArray(0,0,cols,rows) a2 = n*(a2 + 1j*im) # C13 (rho2) b = inDataset2.GetRasterBand(4) rho2 = b.ReadAsArray(0,0,cols,rows) b = inDataset2.GetRasterBand(5) im = b.ReadAsArray(0,0,cols,rows) rho2 = n*(rho2 + 1j*im) # C22 (xsi2) b = inDataset2.GetRasterBand(6) xsi2 = n*b.ReadAsArray(0,0,cols,rows) # C23 (b2) b = inDataset2.GetRasterBand(7) b2 = b.ReadAsArray(0,0,cols,rows) b = inDataset2.GetRasterBand(8) im = b.ReadAsArray(0,0,cols,rows) b2 = n*(b2 + 1j*im) # C33 (zeta2) b = inDataset2.GetRasterBand(9) zeta2 = n*b.ReadAsArray(0,0,cols,rows) k3 = k1 + k2 a3 = a1 + a2 rho3 = rho1 + rho2 xsi3 = xsi1 + xsi2 b3 = b1 + b2 zeta3 = zeta1 + zeta2 det1 = k1*xsi1*zeta1 + 2*np.real(a1*b1*np.conj(rho1)) - xsi1*(abs(rho1)**2) - k1*(abs(b1)**2) - zeta1*(abs(a1)**2) det2 = k2*xsi2*zeta2 + 2*np.real(a2*b2*np.conj(rho2)) - xsi2*(abs(rho2)**2) - k2*(abs(b2)**2) - zeta2*(abs(a2)**2) det3 = k3*xsi3*zeta3 + 2*np.real(a3*b3*np.conj(rho3)) - xsi3*(abs(rho3)**2) - k3*(abs(b3)**2) - zeta3*(abs(a3)**2) p = 3 f = p**2 cst = p*((n+m)*np.log(n+m)-n*np.log(n)-m*np.log(m)) rho = 1. - (2.*p**2-1.)*(1./n + 1./m - 1./(n+m))/(6.*p) omega2 = -(p*p/4.)*(1. - 1./rho)**2 + p**2*(p**2-1.)*(1./n**2 + 1./m**2 - 1./(n+m)**2)/(24.*rho**2) elif bands == 4: print 'Dual polarimetry' # C11 (k1) b = inDataset1.GetRasterBand(1) k1 = m*b.ReadAsArray(0,0,cols,rows) # C12 (a1) b = inDataset1.GetRasterBand(2) a1 = b.ReadAsArray(0,0,cols,rows) b = inDataset1.GetRasterBand(3) im = b.ReadAsArray(0,0,cols,rows) a1 = m*(a1 + 1j*im) # C22 (xsi1) b = inDataset1.GetRasterBand(4) xsi1 = m*b.ReadAsArray(0,0,cols,rows) # C11 (k2) b = inDataset2.GetRasterBand(1) k2 = n*b.ReadAsArray(0,0,cols,rows) # C12 (a2) b = inDataset2.GetRasterBand(2) a2 = b.ReadAsArray(0,0,cols,rows) b = inDataset2.GetRasterBand(3) im = b.ReadAsArray(0,0,cols,rows) a2 = n*(a2 + 1j*im) # C22 (xsi2) b = inDataset2.GetRasterBand(4) xsi2 = n*b.ReadAsArray(0,0,cols,rows) k3 = k1 + k2 a3 = a1 + a2 xsi3 = xsi1 + xsi2 det1 = k1*xsi1 - abs(a1)**2 det2 = k2*xsi2 - abs(a2)**2 det3 = k3*xsi3 - abs(a3)**2 p = 2 cst = p*((n+m)*np.log(n+m)-n*np.log(n)-m*np.log(m)) f = p**2 rho = 1-(2*f-1)*(1./n+1./m-1./(n+m))/(6.*p) omega2 = -f/4.*(1-1./rho)**2 + f*(f-1)*(1./n**2+1./m**2-1./(n+m)**2)/(24.*rho**2) elif bands == 1: print 'Single polarimetry' # C11 (k1) b = inDataset1.GetRasterBand(1) k1 = m*b.ReadAsArray(0,0,cols,rows) # C11 (k2) b = inDataset2.GetRasterBand(1) k2 = n*b.ReadAsArray(0,0,cols,rows) k3 = k1 + k2 det1 = k1 det2 = k2 det3 = k3 p = 1 cst = p*((n+m)*np.log(n+m)-n*np.log(n)-m*np.log(m)) f = p**2 rho = 1-(2.*f-1)*(1./n+1./m-1./(n+m))/(6.*p) omega2 = -f/4.*(1-1./rho)**2+f*(f-1)*(1./n**2+1./m**2-1./(n+m)**2)/(24.*rho**2) else: print 'Incorrect number of bands' return idx = np.where(det1 <= 0.0) det1[idx] = 0.0001 idx = np.where(det2 <= 0.0) det2[idx] = 0.0001 idx = np.where(det3 <= 0.0) det3[idx] = 0.0001 lnQ = cst+m*np.log(det1)+n*np.log(det2)-(n+m)*np.log(det3) # test statistic Z = -2*rho*lnQ # change probabilty P = (1.-omega2)*stats.chi2.cdf(Z,[f])+omega2*stats.chi2.cdf(Z,[f+4]) P = ndimage.filters.median_filter(P, size = (3,3)) # change map a255 = np.ones((rows,cols),dtype=np.byte)*255 a0 = a255*0 c11 = np.log(k1+0.0001) min1 =np.min(c11) max1 = np.max(c11) c11 = (c11-min1)*255.0/(max1-min1) c11 = np.where(c11<0,a0,c11) c11 = np.where(c11>255,a255,c11) c11 = np.where(P>(1.0-sig),a0,c11) cmap = np.where(P>(1.0-sig),a255,c11) # write to file system driver = gdal.GetDriverByName(fmt) outDataset = driver.Create(outfile,cols,rows,2,GDT_Float32) geotransform = inDataset1.GetGeoTransform() if geotransform is not None: outDataset.SetGeoTransform(geotransform) projection = inDataset1.GetProjection() if projection is not None: outDataset.SetProjection(projection) outBand = outDataset.GetRasterBand(1) outBand.WriteArray(Z,0,0) outBand.FlushCache() outBand = outDataset.GetRasterBand(2) outBand.WriteArray(P,0,0) outBand.FlushCache() outDataset = None print 'test statistic and probabilities written to: %s'%outfile basename = os.path.basename(outfile) name, ext = os.path.splitext(basename) outfile=outfile.replace(name,name+'_cmap') outDataset = driver.Create(outfile,cols,rows,3,GDT_Byte) geotransform = inDataset1.GetGeoTransform() if geotransform is not None: outDataset.SetGeoTransform(geotransform) projection = inDataset1.GetProjection() if projection is not None: outDataset.SetProjection(projection) outBand = outDataset.GetRasterBand(1) outBand.WriteArray(cmap,0,0) outBand.FlushCache() outBand = outDataset.GetRasterBand(2) outBand.WriteArray(c11,0,0) outBand.FlushCache() outBand = outDataset.GetRasterBand(3) outBand.WriteArray(c11,0,0) outBand.FlushCache() outDataset = None print 'change map image written to: %s'%outfile print 'elapsed time: '+str(time.time()-start)
def main(): gdal.AllRegister() path = auxil.select_directory('Input directory') if path: os.chdir(path) # input image infile = auxil.select_infile(title='Image file') if infile: inDataset = gdal.Open(infile,GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize bands = inDataset.RasterCount projection = inDataset.GetProjection() geotransform = inDataset.GetGeoTransform() if geotransform is not None: gt = list(geotransform) else: print 'No geotransform available' return imsr = osr.SpatialReference() imsr.ImportFromWkt(projection) else: return pos = auxil.select_pos(bands) if not pos: return N = len(pos) rasterBands = [] for b in pos: rasterBands.append(inDataset.GetRasterBand(b)) # training algorithm trainalg = auxil.select_integer(1,msg='1:Maxlike,2:Backprop,3:Congrad,4:SVM') if not trainalg: return # training data (shapefile) trnfile = auxil.select_infile(filt='.shp',title='Train shapefile') if trnfile: trnDriver = ogr.GetDriverByName('ESRI Shapefile') trnDatasource = trnDriver.Open(trnfile,0) trnLayer = trnDatasource.GetLayer() trnsr = trnLayer.GetSpatialRef() else: return tstfile = auxil.select_outfile(filt='.tst', title='Test results file') if not tstfile: print 'No test output' # outfile outfile, outfmt = auxil.select_outfilefmt(title='Classification file') if not outfile: return if trainalg in (2,3,4): # class probabilities file, hidden neurons probfile, probfmt = auxil.select_outfilefmt(title='Probabilities file') else: probfile = None if trainalg in (2,3): L = auxil.select_integer(8,'Number of hidden neurons') if not L: return # coordinate transformation from training to image projection ct= osr.CoordinateTransformation(trnsr,imsr) # number of classes K = 1 feature = trnLayer.GetNextFeature() while feature: classid = feature.GetField('CLASS_ID') if int(classid)>K: K = int(classid) feature = trnLayer.GetNextFeature() trnLayer.ResetReading() K += 1 print '=========================' print 'supervised classification' print '=========================' print time.asctime() print 'image: '+infile print 'training: '+trnfile if trainalg == 1: print 'Maximum Likelihood' elif trainalg == 2: print 'Neural Net (Backprop)' elif trainalg ==3: print 'Neural Net (Congrad)' else: print 'Support Vector Machine' # loop through the polygons Gs = [] # train observations ls = [] # class labels classnames = '{unclassified' classids = set() print 'reading training data...' for i in range(trnLayer.GetFeatureCount()): feature = trnLayer.GetFeature(i) classid = str(feature.GetField('CLASS_ID')) classname = feature.GetField('CLASS_NAME') if classid not in classids: classnames += ', '+ classname classids = classids | set(classid) l = [0 for i in range(K)] l[int(classid)] = 1.0 polygon = feature.GetGeometryRef() # transform to same projection as image polygon.Transform(ct) # convert to a Shapely object poly = shapely.wkt.loads(polygon.ExportToWkt()) # transform the boundary to pixel coords in numpy bdry = np.array(poly.boundary) bdry[:,0] = bdry[:,0]-gt[0] bdry[:,1] = bdry[:,1]-gt[3] GT = np.mat([[gt[1],gt[2]],[gt[4],gt[5]]]) bdry = bdry*np.linalg.inv(GT) # polygon in pixel coords polygon1 = asPolygon(bdry) # raster over the bounding rectangle minx,miny,maxx,maxy = map(int,list(polygon1.bounds)) pts = [] for i in range(minx,maxx+1): for j in range(miny,maxy+1): pts.append((i,j)) multipt = MultiPoint(pts) # intersection as list intersection = np.array(multipt.intersection(polygon1),dtype=np.int).tolist() # cut out the bounded image cube cube = np.zeros((maxy-miny+1,maxx-minx+1,len(rasterBands))) k=0 for band in rasterBands: cube[:,:,k] = band.ReadAsArray(minx,miny,maxx-minx+1,maxy-miny+1) k += 1 # get the training vectors for (x,y) in intersection: Gs.append(cube[y-miny,x-minx,:]) ls.append(l) polygon = None polygon1 = None feature.Destroy() trnDatasource.Destroy() classnames += '}' m = len(ls) print str(m) + ' training pixel vectors were read in' Gs = np.array(Gs) ls = np.array(ls) # stretch the pixel vectors to [-1,1] for ffn maxx = np.max(Gs,0) minx = np.min(Gs,0) for j in range(N): Gs[:,j] = 2*(Gs[:,j]-minx[j])/(maxx[j]-minx[j]) - 1.0 # random permutation of training data idx = np.random.permutation(m) Gs = Gs[idx,:] ls = ls[idx,:] # setup output datasets driver = gdal.GetDriverByName(outfmt) outDataset = driver.Create(outfile,cols,rows,1,GDT_Byte) projection = inDataset.GetProjection() geotransform = inDataset.GetGeoTransform() if geotransform is not None: outDataset.SetGeoTransform(tuple(gt)) if projection is not None: outDataset.SetProjection(projection) outBand = outDataset.GetRasterBand(1) if probfile: driver = gdal.GetDriverByName(probfmt) probDataset = driver.Create(probfile,cols,rows,K,GDT_Byte) if geotransform is not None: probDataset.SetGeoTransform(tuple(gt)) if projection is not None: probDataset.SetProjection(projection) probBands = [] for k in range(K): probBands.append(probDataset.GetRasterBand(k+1)) if tstfile: # train on 2/3 training examples Gstrn = Gs[0:2*m//3,:] lstrn = ls[0:2*m//3,:] Gstst = Gs[2*m//3:,:] lstst = ls[2*m//3:,:] else: Gstrn = Gs lstrn = ls if trainalg == 1: classifier = sc.Maxlike(Gstrn,lstrn) elif trainalg == 2: classifier = sc.Ffnbp(Gstrn,lstrn,L) elif trainalg == 3: classifier = sc.Ffncg(Gstrn,lstrn,L) elif trainalg == 4: classifier = sc.Svm(Gstrn,lstrn) print 'training on %i pixel vectors...' % np.shape(Gstrn)[0] start = time.time() result = classifier.train() print 'elapsed time %s' %str(time.time()-start) if result: if trainalg in [2,3]: cost = np.log10(result) ymax = np.max(cost) ymin = np.min(cost) xmax = len(cost) plt.plot(range(xmax),cost,'k') plt.axis([0,xmax,ymin-1,ymax]) plt.title('Log(Cross entropy)') plt.xlabel('Epoch') # classify the image print 'classifying...' start = time.time() tile = np.zeros((cols,N)) for row in range(rows): for j in range(N): tile[:,j] = rasterBands[j].ReadAsArray(0,row,cols,1) tile[:,j] = 2*(tile[:,j]-minx[j])/(maxx[j]-minx[j]) - 1.0 cls, Ms = classifier.classify(tile) outBand.WriteArray(np.reshape(cls,(1,cols)),0,row) if probfile: Ms = np.byte(Ms*255) for k in range(K): probBands[k].WriteArray(np.reshape(Ms[k,:],(1,cols)),0,row) outBand.FlushCache() print 'elapsed time %s' %str(time.time()-start) outDataset = None inDataset = None if probfile: for probBand in probBands: probBand.FlushCache() probDataset = None print 'class probabilities written to: %s'%probfile K = lstrn.shape[1]+1 if (outfmt == 'ENVI') and (K<19): # try to make an ENVI classification header file hdr = header.Header() headerfile = outfile+'.hdr' f = open(headerfile) line = f.readline() envihdr = '' while line: envihdr += line line = f.readline() f.close() hdr.read(envihdr) hdr['file type'] ='ENVI Classification' hdr['classes'] = str(K) classlookup = '{0' for i in range(1,3*K): classlookup += ', '+str(str(ctable[i])) classlookup +='}' hdr['class lookup'] = classlookup hdr['class names'] = classnames f = open(headerfile,'w') f.write(str(hdr)) f.close() print 'thematic map written to: %s'%outfile if trainalg in [2,3]: print 'please close the cross entropy plot to continue' plt.show() if tstfile: with open(tstfile,'w') as f: print >>f, 'FFN test results for %s'%infile print >>f, time.asctime() print >>f, 'Classification image: %s'%outfile print >>f, 'Class probabilities image: %s'%probfile print >>f, lstst.shape[0],lstst.shape[1] classes, _ = classifier.classify(Gstst) labels = np.argmax(lstst,axis=1)+1 for i in range(len(classes)): print >>f, classes[i], labels[i] f.close() print 'test results written to: %s'%tstfile print 'done' else: print 'an error occured' return
def main(): gdal.AllRegister() path = auxil.select_directory('Choose working directory') if path: os.chdir(path) # SAR image infile = auxil.select_infile(title='Choose SAR image') if infile: inDataset = gdal.Open(infile, GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize bands = inDataset.RasterCount else: return # spatial subset x0, y0, rows, cols = auxil.select_dims([0, 0, rows, cols]) # number of looks m = auxil.select_integer(5, msg='Number of looks') if not m: return # number of iterations niter = auxil.select_integer(1, msg='Number of iterations') # output file outfile, fmt = auxil.select_outfilefmt() if not outfile: return # process diagonal bands only driver = gdal.GetDriverByName(fmt) if bands == 9: outDataset = driver.Create(outfile, cols, rows, 3, GDT_Float32) inimage = np.zeros((3, rows, cols)) band = inDataset.GetRasterBand(1) inimage[0] = band.ReadAsArray(x0, y0, cols, rows) band = inDataset.GetRasterBand(6) inimage[1] = band.ReadAsArray(x0, y0, cols, rows) band = inDataset.GetRasterBand(9) inimage[2] = band.ReadAsArray(x0, y0, cols, rows) elif bands == 4: outDataset = driver.Create(outfile, cols, rows, 2, GDT_Float32) inimage = np.zeros((2, rows, cols)) band = inDataset.GetRasterBand(1) inimage[0] = band.ReadAsArray(x0, y0, cols, rows) band = inDataset.GetRasterBand(4) inimage[1] = band.ReadAsArray(x0, y0, cols, rows) else: outDataset = driver.Create(outfile, cols, rows, 1, GDT_Float32) inimage = inDataset.GetRasterBand(1) outimage = np.copy(inimage) print '=========================' print ' GAMMA MAP FILTER' print '=========================' print time.asctime() print 'infile: %s' % infile print 'number of looks: %i' % m print 'number of iterations: %i' % niter start = time.time() itr = 0 while itr < niter: print 'iteration %i' % (itr + 1) if bands == 9: for k in range(3): outimage[k] = gamma_filter(k, inimage, outimage, rows, cols, m) elif bands == 4: for k in range(2): outimage[k] = gamma_filter(k, inimage, outimage, rows, cols, m) else: outimage = gamma_filter(0, inimage, outimage, rows, cols, m) itr += 1 geotransform = inDataset.GetGeoTransform() if geotransform is not None: gt = list(geotransform) gt[0] = gt[0] + x0 * gt[1] gt[3] = gt[3] + y0 * gt[5] outDataset.SetGeoTransform(tuple(gt)) projection = inDataset.GetProjection() if projection is not None: outDataset.SetProjection(projection) if bands == 9: for k in range(3): outBand = outDataset.GetRasterBand(k + 1) outBand.WriteArray(outimage[k], 0, 0) outBand.FlushCache() elif bands == 4: for k in range(2): outBand = outDataset.GetRasterBand(k + 1) outBand.WriteArray(outimage[k], 0, 0) outBand.FlushCache() else: outBand = outDataset.GetRasterBand(1) outBand.WriteArray(outimage, 0, 0) outBand.FlushCache() outDataset = None print 'result written to: ' + outfile print 'elapsed time: ' + str(time.time() - start)
def main(): gdal.AllRegister() path = auxil.select_directory('Choose working directory') if path: os.chdir(path) # MS image file1 = auxil.select_infile(title='Choose MS image') if file1: inDataset1 = gdal.Open(file1, GA_ReadOnly) cols = inDataset1.RasterXSize rows = inDataset1.RasterYSize bands = inDataset1.RasterCount else: return pos1 = auxil.select_pos(bands) if not pos1: return num_bands = len(pos1) dims = auxil.select_dims([0, 0, cols, rows]) if dims: x10, y10, cols1, rows1 = dims else: return # PAN image file2 = auxil.select_infile(title='Choose PAN image') if file2: inDataset2 = gdal.Open(file2, GA_ReadOnly) bands = inDataset2.RasterCount else: return if bands > 1: print 'Must be a single band (panchromatic) image' return geotransform1 = inDataset1.GetGeoTransform() geotransform2 = inDataset2.GetGeoTransform() # outfile outfile, fmt = auxil.select_outfilefmt() if not outfile: return # resolution ratio ratio = auxil.select_integer(4, 'Resolution ratio (2 or 4)') if not ratio: return # MS registration band k1 = auxil.select_integer(1, 'MS band for registration') if not k1: return print '=========================' print ' ATWT Pansharpening' print '=========================' print time.asctime() print 'MS file: ' + file1 print 'PAN file: ' + file2 # read in MS image band = inDataset1.GetRasterBand(1) tmp = band.ReadAsArray(0, 0, 1, 1) dt = tmp.dtype MS = np.asarray(np.zeros((num_bands, rows1, cols1)), dtype=dt) k = 0 for b in pos1: band = inDataset1.GetRasterBand(b) MS[k, :, :] = band.ReadAsArray(x10, y10, cols1, rows1) k += 1 # if integer assume 11-bit quantization, otherwise must be byte if MS.dtype == np.int16: fact = 8.0 MS = auxil.byteStretch(MS, (0, 2**11)) else: fact = 1.0 # read in corresponding spatial subset of PAN image if (geotransform1 is None) or (geotransform2 is None): print 'Image not georeferenced, aborting' return # upper left corner pixel in PAN gt1 = list(geotransform1) gt2 = list(geotransform2) ulx1 = gt1[0] + x10 * gt1[1] uly1 = gt1[3] + y10 * gt1[5] x20 = int(round(((ulx1 - gt2[0]) / gt2[1]))) y20 = int(round(((uly1 - gt2[3]) / gt2[5]))) cols2 = cols1 * ratio rows2 = rows1 * ratio band = inDataset2.GetRasterBand(1) PAN = band.ReadAsArray(x20, y20, cols2, rows2) # if integer assume 11-bit quantization, otherwise must be byte if PAN.dtype == np.int16: PAN = auxil.byteStretch(PAN, (0, 2**11)) # out array sharpened = np.zeros((num_bands, rows2, cols2), dtype=np.float32) # compress PAN to resolution of MS image using DWT panDWT = auxil.DWTArray(PAN, cols2, rows2) r = ratio while r > 1: panDWT.filter() r /= 2 bn0 = panDWT.get_quadrant(0) # register (and subset) MS image to compressed PAN image using selected MSband lines0, samples0 = bn0.shape bn1 = MS[k1 - 1, :, :] # register (and subset) MS image to compressed PAN image (scale, angle, shift) = auxil.similarity(bn0, bn1) tmp = np.zeros((num_bands, lines0, samples0)) for k in range(num_bands): bn1 = MS[k, :, :] bn2 = ndii.zoom(bn1, 1.0 / scale) bn2 = ndii.rotate(bn2, angle) bn2 = ndii.shift(bn2, shift) tmp[k, :, :] = bn2[0:lines0, 0:samples0] MS = tmp smpl = np.random.randint(cols2 * rows2, size=100000) print 'Wavelet correlations:' # loop over MS bands for k in range(num_bands): msATWT = auxil.ATWTArray(PAN) r = ratio while r > 1: msATWT.filter() r /= 2 # sample PAN wavelet details X = msATWT.get_band(msATWT.num_iter) X = X.ravel()[smpl] # resize the ms band to scale of the pan image ms_band = ndii.zoom(MS[k, :, :], ratio) # sample details of MS band tmpATWT = auxil.ATWTArray(ms_band) r = ratio while r > 1: tmpATWT.filter() r /= 2 Y = tmpATWT.get_band(msATWT.num_iter) Y = Y.ravel()[smpl] # get band for injection bnd = tmpATWT.get_band(0) tmpATWT = None aa, bb, R = auxil.orthoregress(X, Y) print 'Band ' + str(k + 1) + ': %8.3f' % R # inject the filtered MS band msATWT.inject(bnd) # normalize wavelet components and expand msATWT.normalize(aa, bb) r = ratio while r > 1: msATWT.invert() r /= 2 sharpened[k, :, :] = msATWT.get_band(0) sharpened *= fact # rescale dynamic range msATWT = None # write to disk driver = gdal.GetDriverByName(fmt) outDataset = driver.Create(outfile, cols2, rows2, num_bands, GDT_Float32) gt1[0] += x10 * ratio gt1[3] -= y10 * ratio gt1[1] = gt2[1] gt1[2] = gt2[2] gt1[4] = gt2[4] gt1[5] = gt2[5] outDataset.SetGeoTransform(tuple(gt1)) projection1 = inDataset1.GetProjection() if projection1 is not None: outDataset.SetProjection(projection1) for k in range(num_bands): outBand = outDataset.GetRasterBand(k + 1) outBand.WriteArray(sharpened[k, :, :], 0, 0) outBand.FlushCache() outDataset = None print 'Result written to %s' % outfile inDataset1 = None inDataset2 = None
def main(): gdal.AllRegister() path = auxil.select_directory('Choose working directory') if path: os.chdir(path) infile = auxil.select_infile(title='Select an image') if infile: inDataset = gdal.Open(infile, GA_ReadOnly) cols = inDataset.RasterXSize rows = inDataset.RasterYSize bands = inDataset.RasterCount else: return pos = auxil.select_pos(bands) if not pos: return dims = auxil.select_dims([0, 0, cols, rows]) if dims: x0, y0, cols, rows = dims else: return m = auxil.select_integer(2000, 'Select sample size (0 for k-means)') n = auxil.select_integer(10, 'Select number of eigenvalues') outfile, fmt = auxil.select_outfilefmt() if not outfile: return kernel = auxil.select_integer(1, 'Select kernel: 0=linear, 1=Gaussian') print '=========================' print ' kPCA' print '=========================' print 'infile: ' + infile print 'samples: ' + str(m) if kernel == 0: print 'kernel: ' + 'linear' else: print 'kernel: ' + 'Gaussian' start = time.time() if kernel == 0: n = min(bands, n) # construct data design matrices XX = zeros((cols * rows, bands)) k = 0 for b in pos: band = inDataset.GetRasterBand(b) band = band.ReadAsArray(x0, y0, cols, rows).astype(float) XX[:, k] = ravel(band) k += 1 if m > 0: idx = fix(random.random(m) * (cols * rows)).astype(integer) X = XX[idx, :] else: print 'running k-means on 100 cluster centers...' X, _ = kmeans(XX, 100, iter=1) m = 100 print 'centered kernel matrix...' # centered kernel matrix K, gma = auxil.kernelMatrix(X, kernel=kernel) meanK = sum(K) / (m * m) rowmeans = mat(sum(K, axis=0) / m) if gma is not None: print 'gamma: ' + str(round(gma, 6)) K = auxil.center(K) print 'diagonalizing...' # diagonalize try: w, v = linalg.eigh(K, eigvals=(m - n, m - 1)) idx = range(n) idx.reverse() w = w[idx] v = v[:, idx] # variance of PCs var = w / m except linalg.LinAlgError: print 'eigenvalue computation failed' sys.exit() # dual variables (normalized eigenvectors) alpha = mat(v) * mat(diag(1 / sqrt(w))) print 'projecting...' # projecting image = zeros((rows, cols, n)) for i in range(rows): XXi = XX[i * cols:(i + 1) * cols, :] KK, gma = auxil.kernelMatrix(X, XXi, kernel=kernel, gma=gma) # centering on training data: # subtract column means colmeans = mat(sum(KK, axis=0) / m) onesm = mat(ones(m)) KK = KK - onesm.T * colmeans # subtract row means onesc = mat(ones(cols)) KK = KK - rowmeans.T * onesc # add overall mean KK = KK + meanK # project image[i, :, :] = KK.T * alpha # write to disk driver = gdal.GetDriverByName(fmt) outDataset = driver.Create(outfile, cols, rows, n, GDT_Float32) projection = inDataset.GetProjection() geotransform = inDataset.GetGeoTransform() if geotransform is not None: gt = list(geotransform) gt[0] = gt[0] + x0 * gt[1] gt[3] = gt[3] + y0 * gt[5] outDataset.SetGeoTransform(tuple(gt)) if projection is not None: outDataset.SetProjection(projection) for k in range(n): outBand = outDataset.GetRasterBand(k + 1) outBand.WriteArray(image[:, :, k], 0, 0) outBand.FlushCache() outDataset = None inDataset = None print 'result written to: ' + outfile print 'elapsed time: ' + str(time.time() - start) plt.plot(range(1, n + 1), var, 'k-') plt.title('kernel PCA') plt.xlabel('principal component') plt.ylabel('Variance') plt.show() print '--done------------------------'