def compute_basis(img_codename,i): print("\n--COMPUTING BASIS ELEMENT %d FOR image %s--\n" % (i,img_codename)) imgpath = '../pickled/' + img_codename img = rbepwt.Image() img.load_pickle(imgpath) #set all coefficients to 0 curlevel = img.rbepwt.levels + 1 approxvec = img.rbepwt.region_collection_at_level[curlevel].values for lev in range(1,img.rbepwt.levels+1): img.rbepwt.wavelet_details[lev] = np.zeros_like(img.rbepwt.wavelet_details[lev]) img.rbepwt.region_collection_at_level[curlevel].values = np.zeros_like(approxvec) #search where the i-th basis element is prevlen = 0 curlen = len(approxvec) curidx = i if i >= curlen: approx = False while curidx >= curlen: curlevel -= 1 prevlen += curlen curidx = i - prevlen curlen = len(img.rbepwt.wavelet_details[curlevel]) else: approx = True #set the appropriate basis element to 1 #ipdb.set_trace() if approx: img.rbepwt.region_collection_at_level[curlevel].values[i] = 1 else: img.rbepwt.wavelet_details[curlevel][curidx] = 1 img.decode_rbepwt() return(img)
def encode_and_write_log(): outdir = '../wavelet_test/' logfile = outdir + 'log' levels = 16 coefs = 512 wavelets = pywt.wavelist() cc = rbepwt.Image() cc.read('img/cameraman256.png') #cc.read('img/gradient64.jpg') cc.segment(scale=200, sigma=2, min_size=10) for wav in wavelets: print("Encoding image with wavelet %s" % wav) start_time = timeit.default_timer() cc.encode_rbepwt(levels, wav, 'easypath', euclidean_distance=True) time = timeit.default_timer() - start_time cc.save_pickle(outdir + 'cameraman-encoded-' + wav) cc.rbepwt.threshold_coefs(coefs) cc.decode_rbepwt() cc.save_pickle(outdir + 'cameraman-' + wav + str(coefs)) cc.save_decoded(outdir + 'cameraman-' + wav + str(coefs) + 'image.png', title=None) psnr = cc.psnr() ssim = cc.ssim() vsi = cc.vsi() haarpsi = cc.haarpsi() logline = ",".join( (wav, str(time), str(psnr), str(ssim), str(vsi), str(haarpsi))) #logline = wav+str(time) print(logline) outfile = open(logfile, 'a') outfile.write(logline + '\n') outfile.close()
def generate_epwt(): n = 4 mat = 255*np.random.random((n,n)) im = rbepwt.Image() im.read_array(mat) im.encode_epwt(1,'haar') im.rbepwt.region_collection_at_level[1][0].show(show_path=True,px_value=False,path_color='green',border_thickness=0.02,alternate_markers=True) im.rbepwt.region_collection_at_level[2][0].show(show_path=True,px_value=False,path_color='red',border_thickness=0.02) print(im.rbepwt.region_collection_at_level[1].points) print(im.rbepwt.region_collection_at_level[2].points)
def compute_basis_at_level(img_codename,level,i): imgpath = '../pickled/' + img_codename img = rbepwt.Image() img.load_pickle(imgpath) #set all coefficients to 0 curlevel = img.rbepwt.levels + 1 prevlen = 0 approxvec = img.rbepwt.region_collection_at_level[curlevel].values curlen = len(approxvec) while curlevel > level: prevlen += curlen curlevel -= 1 curlen = len(img.rbepwt.wavelet_details[curlevel]) return(compute_basis(img_codename,prevlen+i))
def find_n_largest_coef(img_codename,level,n): """Returns the index of the n largest coeffs at level""" imgpath = '../pickled/' + img_codename img = rbepwt.Image() img.load_pickle(imgpath) if level == img.rbepwt.levels + 1: maxlevels = img.rbepwt.levels vec = img.rbepwt.region_collection_at_level[maxlevels+1].values else: vec = img.rbepwt.wavelet_details[level] sortidx = np.argsort(vec) return(sortidx[:n])
def recompute_table(save=None): table = pd.DataFrame(columns=['image','encoding','wavelet','levels','coefficients','psnr','ssim','vsi','haarpsi',\ 'bits','segmentation encoding cost', 'sparse coding cost', 'total encoding cost', 'q index']) img_names = ['cameraman256-tbes', 'cameraman256', 'house256', 'peppers256'] #img_names = ['peppers256'] encodings = ['easypath', 'gradpath', 'epwt-easypath', 'tensor'] #encodings = ['gradpath','tensor'] for thresh in thresholds: for imgname in img_names: for enc in encodings: if enc == 'tensor' and imgname == 'cameraman256-tbes': continue print("working on %s with encoding %s and threshold %d" % (imgname, enc, thresh)) img = rbepwt.Image() if enc == 'tensor': levs = '4' #levs = '8' else: levs = '16' loadstr = savedir + imgname + '-' + enc + '-bior4.4' + '-' + levs + 'levels--' + str( thresh) #loadstr = savedir+imgname+'-'+enc+'-haar'+'-'+levs+'levels--'+str(thresh) print('Loading pickle: %s ' % loadstr) img.load_pickle(loadstr) if enc != 'tensor': img.segmentation.__build_borders_set__() img.segmentation.compute_encoding() #img.segmentation_method = 'Felzenszwalb-Huttenlocher' #img.segmentation_method = 'TBES' segmcost = img.segmentation.compute_encoding_length() psnr = img.psnr() ssim = img.ssim() vsi = img.vsi() haarpsi = img.haarpsi() bits = 64 sparse_coding_cost = img.sparse_coding_cost(bits) cost = img.encoding_cost(bits) val = img.quality_cost_index(bits) table.loc[len(table)] = [imgname.rstrip('256'),enc,'bior4.4',int(levs),thresh,psnr,ssim,vsi,haarpsi,\ bits,segmcost,sparse_coding_cost,cost,val] #table.loc[len(table)] = [imgname.rstrip('256'),enc,'haar',int(levs),thresh,psnr,ssim,vsi,haarpsi] if save is not None: table_file = open(save, 'wb') pickle.dump(table, table_file) table_file.close() return (table)
def threshold_decode(imgpath,thresh,filepath,path_type='easypath',save=True): img = rbepwt.Image() img.load_pickle(imgpath) img.threshold_coefs(thresh) if path_type == 'tensor': t0 = time.time() img.decode_dwt() t1 = time.time() else: t0 = time.time() img.decode_rbepwt() t1 = time.time() print('Decoding took %s seconds' % (t1 - t0)) if img.segmentation_method == 'tbes': filepath += '--tbes' if save: print("Saving decoded pickle (%d coeffs) as: %s" % (thresh,filepath)) img.save_pickle(filepath)
def decoded_plots(table, save=False): imgnames = ['cameraman256', 'peppers256', 'house256'] #imgnames = ['peppers256','house256'] #imgname = 'peppers256' #imgname = 'house256' #thresholds = [512,1024,2048,4096] thresholds = [512] for imgname in imgnames: #firstsave = True #change to False to save original image and segmentation firstsave = False for ncoefs in thresholds: #for enc in ['easypath','gradpath','epwt-easypath','tensor']: #for enc in ['easypath','gradpath']: for enc in ['easypath']: img = rbepwt.Image() if enc == 'tensor': levs = '4' loadstr = savedir + imgname + '-' + enc + '-bior4.4' + '-' + levs + 'levels--' + str( ncoefs) else: levs = '16' loadstr = savedir + imgname + '-' + enc + '-bior4.4' + '-' + levs + 'levels-euclidean--' + str( ncoefs) img.load_pickle(loadstr) if save: fname = "-".join( (imgname, enc, 'bior4.4', str(ncoefs), levs)) + '.png' img.save_decoded(export_dir + fname, title=None) if not firstsave: firstsave = True orig_fname = imgname + '.png' img.save(export_dir + orig_fname) seg_fname = "-".join( (imgname, "segmentation")) + '.png' img.save_segmentation(title=None, filepath=export_dir + seg_fname) else: img.show_decoded(title='')
def encode(filepath,wavelet='bior4.4',levels=16,path_type='easypath',loadsegm=None,paths_only_at_first_level=False,save=True): filename,ext = os.path.splitext(filepath) img = filename.split('/')[-1] i = rbepwt.Image() i.read(filepath) if path_type != 'tensor': if loadsegm is None: print("Working on segementation of image %s ..." % filepath) t0 = time.time() i.segment(scale=200,sigma=2,min_size=10) t1 = time.time() print('Segmenting took %s seconds' % (t1 - t0)) else: i.load_mat_segmentation(loadsegm) #i.segment(method='kmeans',nclusters=30) print("Encoding image %s ..." % filepath) if path_type == 'tensor': t0 = time.time() i.encode_dwt(levels,wavelet) t1 = time.time() else: t0 = time.time() i.encode_rbepwt(levels,wavelet,path_type=path_type,paths_first_level = paths_only_at_first_level) t1 = time.time() print('Encoding took %s seconds' % (t1 - t0)) bonustring = '' if paths_only_at_first_level: bonustring = '-ponly_first_level' if i.segmentation_method == 'tbes': bonustring +='-tbes' pickled_string=img+bonustring+'-%s-%s-%dlevels'%(path_type,wavelet,levels) if save: print("Saving encoded pickle as: %s" % pickleddir + pickled_string) i.save_pickle(pickleddir + pickled_string) return(pickled_string)
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # #This script was used to generate the "difficult" segmentation talked about in the paper import numpy as np import matplotlib.pyplot as plt import rbepwt n = 50 length = 0.7 z1 = np.ones((n, n / 2)) z2 = np.ones((n, n / 2)) for (i, j), v in np.ndenumerate(z2): z2[i, j] = min(1, i / (n * length)) z = np.concatenate((z1, z2), axis=1) print("greyvalue increment: %f" % (1 / (n * length))) img = rbepwt.Image() img.read_array(z) #img.segment(scale=n*n*5,sigma=0,min_size=10) img.segment(scale=n * 10, sigma=0, min_size=10) img.show_segmentation(colorbar=False, border=True) #img.show(title=None,border=True) #plt.imshow(z,cmap=plt.cm.gray,interpolation='none') #plt.show()
def plot(image_name, bits=8): #encodings = ['gradpath','tensor'] for e in encodings: qri = np.zeros(len(sparsity)) if e == 'easypath' or e[:4] == 'tbes': #lsty = '-' lsty = 'None' mrkr = '_' lco = 'g' elif e == 'gradpath': #lsty = '-.' lsty = 'None' mrkr = '|' lco = 'r' elif e == 'epwt-easypath': #lsty = '--' lsty = 'None' mrkr = 'x' lco = 'b' elif e == 'tensor': #lsty = ':' lsty = 'None' lco = 'k' mrkr = '.' for idx, s in enumerate(sparsity): #print("working on %s with encoding %s and threshold %d" % (imgname,enc,thresh)) img = rbepwt.Image() if e == 'tensor': levs = '4' #levs = '8' else: levs = '16' loadstr = savedir + image_name + '-' + e + '-bior4.4' + '-' + levs + 'levels--' + str( s) #loadstr = savedir+imgname+'-'+enc+'-haar'+'-'+levs+'levels--'+str(thresh) #print('Loading pickle: %s ' % loadstr) img.load_pickle(loadstr) img.segmentation_method = 'None(Tensor)' segmcost = 0 if e != 'tensor': img.segmentation.__build_borders_set__() img.segmentation.compute_encoding() img.segmentation_method = 'Felzenszwalb-Huttenlocher' #img.segmentation_method = 'TBES' segmcost = img.segmentation.compute_encoding_length() sparse_coding_cost = img.sparse_coding_cost(bits) cost = img.encoding_cost(bits) val = img.quality_cost_index(bits) qri[idx] = val plt.plot(sparsity, qri, color=lco, linestyle=lsty, marker=mrkr) #,markersize=msize,markeredgewidth=2) orgmodestr = '|' + image_name + '|' + img.segmentation_method + '|' + str( img.nonzero_coefs()) + '|' + str(bits) + '|' + str( segmcost) + '|' + str(sparse_coding_cost) + '|' + str( cost) + '|' + str(val) + '|' + str( img.psnr()) + '|' + str(img.haarpsi()) + '|' print(orgmodestr) plt.xticks(sparsity) plt.xlim(420, 5000) plt.legend() plt.show()
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # #Here we tested the effects of convoluting the decoded values with a gaussian kernel. #The results weren't promising and this wasn't included in the paper. import matplotlib.pyplot as plt import rbepwt import numpy as np import copy cam = rbepwt.Image() cam.load_pickle('../decoded_pickles-euclidean/cameraman256-easypath-bior4.4-16levels--512') sigma = 1 #r = cam.rbepwt.region_collection_at_level[1][1] #r.filter(0.5) #rimg = r.get_enclosing_img() #plt.imshow(rimg,cmap=plt.cm.gray) #plt.show() cam.filter(sigma) haarpsi = cam.haarpsi() filteredhaarpsi = cam.haarpsi(filtered=True) print('%40s%10f\n%40s%10f' % ('HaarPSI of decoded image: ',haarpsi,'HaarPSI of filtered decoded image: ',filteredhaarpsi)) cam.show_decoded(title='Decoded') cam.show_filtered(title='Filtered decoded')
#wav = 'haar' levels = 12 #img = 'gradient64' #img = 'sampleimg4' img = 'house256' #img = 'cameraman256' #ext = '.jpg' ext = '.png' ptype = 'easypath' #ptype = 'gradpath' imgpath = 'img/'+img+ext pickledpath='../pickled/'+img+'-%s-%s-%dlevels'%(ptype,wav,levels) #pickledpath = '../pickled/gradient64-easypath-haar-12levels' ncoefs = 51 fasti = rbepwt.Image() #fasti.load_or_compute(imgpath,pickled_string,levels,wav) fasti.load_pickle(pickledpath) if threshold: fasti.rbepwt.threshold_coefs(ncoefs) start = timeit.default_timer() fasti.decode_rbepwt() tot_time = timeit.default_timer() - start print("psnr of fast decode: %f " %fasti.psnr()) print("tot time of decode:", tot_time) if show_decodes: fasti.show_decoded(title = 'Fast Decode') if full_decode: fulli = rbepwt.Image() fulli.load_pickle(pickledpath)