def decompositionFromPool(self, rpool): kernel = rpool['kernel_obj'] self.X = rpool['train_features'] if rpool.has_key('basis_vectors'): basis_vectors = rpool['basis_vectors'] else: basis_vectors = None if "bias" in rpool: self.bias = float(rpool["bias"]) else: self.bias = 0. if basis_vectors != None or self.X.shape[1] > self.X.shape[0]: #First possibility: subset of regressors has been invoked if basis_vectors != None: K_r = kernel.getKM(self.X).T Krr = kernel.getKM(basis_vectors) svals, evecs, U, Z = decomposition.decomposeSubsetKM(K_r, Krr) #Second possibility: dual mode if more attributes than examples else: K = kernel.getKM(self.X).T svals, evecs = decomposition.decomposeKernelMatrix(K) U, Z = None, None #Third possibility, primal decomposition else: #Invoking getPrimalDataMatrix adds the bias feature X = getPrimalDataMatrix(self.X,self.bias) svals, evecs, U = decomposition.decomposeDataMatrix(X.T) U, Z = None, None return svals, evecs, U, Z
def decompositionFromPool(self, rpool): kernel = rpool['kernel_obj'] self.X = rpool['train_features'] if rpool.has_key('basis_vectors'): basis_vectors = rpool['basis_vectors'] else: basis_vectors = None if "bias" in rpool: self.bias = float(rpool["bias"]) else: self.bias = 0. if basis_vectors != None or self.X.shape[1] > self.X.shape[0]: #First possibility: subset of regressors has been invoked if basis_vectors != None: K_r = kernel.getKM(self.X).T Krr = kernel.getKM(basis_vectors) svals, evecs, U, Z = decomposition.decomposeSubsetKM(K_r, Krr) #Second possibility: dual mode if more attributes than examples else: K = kernel.getKM(self.X).T svals, evecs = decomposition.decomposeKernelMatrix(K) U, Z = None, None #Third possibility, primal decomposition else: #Invoking getPrimalDataMatrix adds the bias feature X = getPrimalDataMatrix(self.X, self.bias) svals, evecs, U = decomposition.decomposeDataMatrix(X.T) U, Z = None, None return svals, evecs, U, Z
def decompositionFromPool(self, rpool): kernel = rpool[data_sources.KERNEL_OBJ] self.X = rpool[data_sources.TRAIN_FEATURES] if rpool.has_key(data_sources.BASIS_VECTORS): bvectors = rpool[data_sources.BASIS_VECTORS] else: bvectors = None if "bias" in rpool: self.bias = float(rpool["bias"]) else: self.bias = 0. if bvectors != None or self.X.shape[1] > self.X.shape[0]: K = kernel.getKM(self.X).T #First possibility: subset of regressors has been invoked if bvectors != None: svals, evecs, U, Z = decomposition.decomposeSubsetKM( K, bvectors) #Second possibility: dual mode if more attributes than examples else: svals, evecs = decomposition.decomposeKernelMatrix(K) U, Z = None, None #Third possibility, primal decomposition else: #Invoking getPrimalDataMatrix adds the bias feature X = getPrimalDataMatrix(self.X, self.bias) svals, evecs, U = decomposition.decomposeDataMatrix(X.T) U, Z = None, None return svals, evecs, U, Z
def decompositionFromPool(self, rpool): kernel = rpool[data_sources.KERNEL_OBJ] self.X = rpool[data_sources.TRAIN_FEATURES] if rpool.has_key(data_sources.BASIS_VECTORS): bvectors = rpool[data_sources.BASIS_VECTORS] else: bvectors = None if "bias" in rpool: self.bias = float(rpool["bias"]) else: self.bias = 0. if bvectors != None or self.X.shape[1] > self.X.shape[0]: K = kernel.getKM(self.X).T #First possibility: subset of regressors has been invoked if bvectors != None: svals, evecs, U, Z = decomposition.decomposeSubsetKM(K, bvectors) #Second possibility: dual mode if more attributes than examples else: svals, evecs = decomposition.decomposeKernelMatrix(K) U, Z = None, None #Third possibility, primal decomposition else: #Invoking getPrimalDataMatrix adds the bias feature X = getPrimalDataMatrix(self.X,self.bias) svals, evecs, U = decomposition.decomposeDataMatrix(X.T) U, Z = None, None return svals, evecs, U, Z
def decompositionFromPool(self, rpool): kernel = rpool['kernel_obj'] self.X = array_tools.as_2d_array(rpool['X'], True) if rpool.has_key('basis_vectors'): basis_vectors = array_tools.as_2d_array(rpool['basis_vectors'], True) if not self.X.shape[1] == basis_vectors.shape[1]: raise Exception("X and basis_vectors have different number of columns") else: basis_vectors = None if "bias" in rpool: self.bias = float(rpool["bias"]) else: self.bias = 1. if basis_vectors != None or self.X.shape[1] > self.X.shape[0]: #First possibility: subset of regressors has been invoked if basis_vectors != None: K_r = kernel.getKM(self.X).T Krr = kernel.getKM(basis_vectors) svals, evecs, U, Z = decomposition.decomposeSubsetKM(K_r, Krr) #Second possibility: dual mode if more attributes than examples else: K = kernel.getKM(self.X).T svals, evecs = decomposition.decomposeKernelMatrix(K) U, Z = None, None #Third possibility, primal decomposition else: #Invoking getPrimalDataMatrix adds the bias feature X = getPrimalDataMatrix(self.X,self.bias) svals, evecs, U = decomposition.decomposeDataMatrix(X.T) U, Z = None, None return svals, evecs, U, Z
def decompositionFromPool(self, rpool): """Builds decomposition representing the training data from resource pool. Default implementation builds and decomposes the kernel matrix itself (standard case), or the empirical kernel map of the training data, if reduced set approximation is used. Inheriting classes may also re-implement this by decomposing the feature map of the data (e.g. linear kernel with low-dimensional data). @param rpool: resource pool @type rpool: dict @return: svals, evecs, U, Z @rtype: tuple of numpy matrices """ train_X = rpool['X'] kernel = rpool['kernel_obj'] if rpool.has_key('basis_vectors'): basis_vectors = rpool['basis_vectors'] if not train_X.shape[1] == basis_vectors.shape[1]: raise Exception("X and basis_vectors have different number of columns") K_r = kernel.getKM(train_X).T Krr = kernel.getKM(basis_vectors) svals, evecs, U, Z = decomposition.decomposeSubsetKM(K_r, Krr) else: K = kernel.getKM(train_X).T svals, evecs = decomposition.decomposeKernelMatrix(K) U, Z = None, None return svals, evecs, U, Z
def decompositionFromPool(self, rpool): K_train = rpool['kernel_matrix'] if rpool.has_key('basis_vectors'): svals, rsvecs, U, Z = decomposition.decomposeSubsetKM(K_train, rpool['basis_vectors']) else: svals, rsvecs = decomposition.decomposeKernelMatrix(K_train) U, Z = None, None return svals, rsvecs, U, Z
def decompositionFromPool(self, rpool): K_train = rpool[data_sources.KMATRIX] if rpool.has_key(data_sources.BASIS_VECTORS): svals, rsvecs, U, Z = decomposition.decomposeSubsetKM(K_train, rpool[data_sources.BASIS_VECTORS]) else: svals, rsvecs = decomposition.decomposeKernelMatrix(K_train) U, Z = None, None return svals, rsvecs, U, Z
def decompositionFromPool(self, rpool): K_train = rpool[data_sources.KMATRIX] if rpool.has_key(data_sources.BASIS_VECTORS): svals, rsvecs, U, Z = decomposition.decomposeSubsetKM( K_train, rpool[data_sources.BASIS_VECTORS]) else: svals, rsvecs = decomposition.decomposeKernelMatrix(K_train) U, Z = None, None return svals, rsvecs, U, Z
def decompositionFromPool(self, rpool): K_train = rpool['kernel_matrix'] if rpool.has_key('basis_vectors'): svals, rsvecs, U, Z = decomposition.decomposeSubsetKM( K_train, rpool['basis_vectors']) else: svals, rsvecs = decomposition.decomposeKernelMatrix(K_train) U, Z = None, None return svals, rsvecs, U, Z
def decompositionFromPool(self, rpool): K_train = rpool['kernel_matrix'] if rpool.has_key('basis_vectors'): if not K_train.shape[1] == rpool["basis_vectors"].shape[1]: raise Exception("When using basis vectors, both kernel matrices must contain equal number of columns") svals, rsvecs, U, Z = decomposition.decomposeSubsetKM(K_train.T, rpool['basis_vectors']) else: svals, rsvecs = decomposition.decomposeKernelMatrix(K_train) U, Z = None, None return svals, rsvecs, U, Z
def decompositionFromPool(self, rpool): """Builds decomposition representing the training data from resource pool. Default implementation builds and decomposes the kernel matrix itself (standard case), or the empirical kernel map of the training data, if reduced set approximation is used. Inheriting classes may also re-implement this by decomposing the feature map of the data (e.g. linear kernel with low-dimensional data). @param rpool: resource pool @type rpool: dict @return: svals, evecs, U, Z @rtype: tuple of numpy matrices """ train_X = rpool[data_sources.TRAIN_FEATURES] kernel = rpool[data_sources.KERNEL_OBJ] if rpool.has_key(data_sources.BASIS_VECTORS): bvectors = rpool[data_sources.BASIS_VECTORS] K = kernel.getKM(train_X).T svals, evecs, U, Z = decomposition.decomposeSubsetKM(K, bvectors) else: K = kernel.getKM(train_X).T svals, evecs = decomposition.decomposeKernelMatrix(K) U, Z = None, None return svals, evecs, U, Z
def decompositionFromPool(self, rpool): """Builds decomposition representing the training data from resource pool. Default implementation builds and decomposes the kernel matrix itself (standard case), or the empirical kernel map of the training data, if reduced set approximation is used. Inheriting classes may also re-implement this by decomposing the feature map of the data (e.g. linear kernel with low-dimensional data). @param rpool: resource pool @type rpool: dict @return: svals, evecs, U, Z @rtype: tuple of numpy matrices """ train_X = rpool['train_features'] kernel = rpool['kernel_obj'] if rpool.has_key('basis_vectors'): basis_vectors = rpool['basis_vectors'] K_r = kernel.getKM(train_X).T Krr = kernel.getKM(basis_vectors) svals, evecs, U, Z = decomposition.decomposeSubsetKM(K_r, Krr) else: K = kernel.getKM(train_X).T svals, evecs = decomposition.decomposeKernelMatrix(K) U, Z = None, None return svals, evecs, U, Z