def TestSubAdd(): ten1 = tensor(np.arange(10)) ten2 = tensor(np.arange(10)) ten = base.tensor_sub(ten1, ten2) print(ten.data) ten = base.tensor_add(ten1, ten2) print(ten.data)
def random_tensor(shape=None, sparsity=0.5, seed=1234, distributed='normal', normal=None, uniform=None, plot=True): np.random.seed(seed) tensor_size = prod(shape) if distributed == 'normal': mean, std = normal data = np.random.normal(mean, std, tensor_size) if plot == True: n, bins, patches = plt.hist(data, 30, normed=True, facecolor='blue', alpha=0.5) y = mlab.normpdf(bins, mean, std) plt.plot(bins, y, 'r--') plt.xlabel('Expectation') plt.ylabel('Probability') plt.title('Histogram of Normal Distribution:$\mu =' + str(mean) + '$, $\sigma=' + str(std) + '$') plt.axvline(norm.ppf(sparsity) * std + mean) plt.show() data[data <= norm.ppf(sparsity) * std + mean] = 0 if distributed == 'uniform': low, high = uniform data = np.random.uniform(low, high, size=tensor_size) if plot == True: n, bins, patches = plt.hist(data, 30, normed=True, facecolor='blue', alpha=0.5) plt.xlabel('Expectation') plt.ylabel('Probability') plt.title('Histogram of Uniform Distribution:$low =' + str(low) + '$, $high =' + str(high) + '$') plt.axvline((high - low) * sparsity) plt.show() data[data <= (high - low) * sparsity] = 0 return tensor(data.reshape(shape))
def tensor_times_mat(ten, mat, mode): shp = ten.shape ndim = ten.ndims order = float2front(ten.order, mode) newdata = np.dot(mat, unfold(X, mode).data) p = mat.shape[0] newshp = [p] newshp.extend(shp[0:mode]) newshp.extend(shp[mode + 1:ndim]) T = newdata.reshape(newshp) T = np.transpose(T, [order.index(i) for i in range(len(order))]) return tensor(T)
def tensor_times_mat(X,mat,mode): shp = X.shape ndim = X.ndims() order = [] order.extend([mode]) order.extend(range(0, mode)) order.extend(range(mode + 1, ndim)) newdata = np.dot(mat,unfold(X,mode).data) p = mat.shape[0] newshp = [p] newshp.extend(shp[0:mode]) newshp.extend(shp[mode+1:ndim]) T = newdata.reshape(newshp) T = np.transpose(T,[order.index(i) for i in range(len(order))]) return tl.tensor(T)
def TestNorm(): ten1 = tensor(np.arange(3)) print(ten1.data) print(base.tennorm(ten1)**2)
from skimage.measure import compare_psnr im = Image.open("Lenna.png") print('Image Mode and Size:' + str(im.mode) + ',' + str(im.size)) data = np.array(im) print('Image Shape:' + str(data.shape)) data_float = data.astype(np.float32) time0 = time.time() tucker_ranks = [200, 200, 3] tensor_data = tensor(data) U, core = tk.hooi(tensor_data, ranks=tucker_ranks, tol=10e-8, print_enable=True, init='hosvd', plot_enable=True) data_reconstruction = tk.tucker2tensor(U, core) print('Image Reconstruction Shape:' + str(data_reconstruction.shape)) def convert2uin8(tensor): im = tensor.data im -= im.min() im /= im.max() im *= 255 return im.astype(np.uint8) im_reconstruction = convert2uin8(data_reconstruction)
import numpy as np from tensorcomlib.decomposition import tucker as tk from tensorcomlib import base from tensorcomlib import tensor def TestHooI(data): #eps: range(0,1) U, core = tk.hooi(data, eps=0.08, tol=1e-10, plot=True) ndim = data.ndims() modelist = list(range(ndim)) data1 = base.tensor_multi_times_mat(core, U, modelist=modelist, transpose=False) print("Original tensor:",data.data.reshape(1024)[1:10]) print("Tucker tensor:",data1.data.reshape(1024)[1:10]) if __name__ == '__main__': np.random.seed(5) B = np.round(np.random.randint(1, 10, (8, 16, 8))) print(B.shape) data = tensor.tensor(B) TestHooI(data)
def sub(X1, X2): return tensor(X1.data - X2.data)
def teninner(X1,X2): if(X1.shape != X2.shape): raise ValueError("All the tensor's shape must be same!") res = (X1.data) * (X2.data) return tl.tensor(res,X1.shape)
def tenrands(shp): data = np.random.random(shp) return tl.tensor(data,shp)
def tensor_contraction(X1,X2): return tl.tensor(np.tensordot(X1.data,X2.data,axes=2))
def tenzeros(shp): data = np.ndarray(shp) data.fill(0) return tl.tensor(data,shp)
def add(X1, X2): return tensor(X1.data + X2.data)
def tensor_times_vec(X,vec,mode): ndim = X.ndims return tl.tensor(T)
def fold(G, shape, mode): return tl.tensor(X)
def tensor_sub(X1,X2): return tl.tensor(X1.data-X2.data)
def tensor_add(X1,X2): return tl.tensor(X1.data+X2.data)
def TestUnfoldAndFold(): ten1 = tensor(np.arange(24).reshape(3, 4, 2)) U = [base.unfold(ten1, i) for i in range(3)] print([U[i].shape for i in range(3)]) ten = [base.fold(U[i], ten1.shape, i) for i in range(3)] print([ten[i].shape for i in range(3)])
def tenones(shp): data = np.ndarray(shp) data.fill(1) return tl.tensor(data,shp)
def kronecker(ten1,ten2): res = np.kron(ten1.data,ten2.data) return tl.tensor(res,res.shape)
def tenouter(X1,X2): return tl.tensor(np.tensordot(X1.data, X2.data, axes=0))