Exemplo n.º 1
0
 def projectSlice(self, X, n, iters=100, epsilon=1e-10, convTol=1e-4):
     """ 
     Project a slice, solving for the factors of the nth mode
     
     Parameters
     ------------
     X : the tensor to project onto the basis
     n : the mode to project onto
     iters : the max number of inner iterations
     epsilon : parameter to avoid dividing by zero
     convTol : the convergence tolerance
     
     Output
     -----------
     the projection matrix
     """
     ## Setup the 'initial guess'
     F = []
     for m in range(X.ndims()):
         if m == n:
             F.append(np.random.rand(X.shape[m], self.R))
         else:
             ## double check the shape is the right dimensions
             if (self.basis[m].shape[0] != X.shape[m]):
                 raise ValueError("Shape of the tensor X is incorrect")
             F.append(self.basis[m])
     #print(F)
     M = ktensor.ktensor(np.ones(self.R), F)
     #print(M)
     ## Solve for the subproblem
     M, Phi, totIter, kktMV = CP_APR.solveForModeB(X, M, n, iters, epsilon,
                                                   convTol)
     #print(M)
     ## scale by summing across the rows
     totWeight = np.sum(M.U[n], axis=1)
     print totWeight.shape
     zeroIdx = np.where(totWeight < 1e-100)[0]
     if len(zeroIdx) > 0:
         # for the zero ones we're going to evenly distribute
         evenDist = np.repeat(1.0 / self.R, len(zeroIdx) * self.R)
         M.U[n][zeroIdx, :] = evenDist.reshape((len(zeroIdx), self.R))
         totWeight = np.sum(M.U[n], axis=1)
     twMat = np.repeat(totWeight, self.R).reshape(X.shape[n], self.R)
     M.U[n] = M.U[n] / twMat
     #print(M)
     return M.U[n]
Exemplo n.º 2
0
 def projectSlice(self, X, n, iters=10, epsilon=1e-10, convTol=1e-4):
     """ 
     Project a slice, solving for the factors of the nth mode
     
     Parameters
     ------------
     X : the tensor to project onto the basis
     n : the mode to project onto
     iters : the max number of inner iterations
     epsilon : parameter to avoid dividing by zero
     convTol : the convergence tolerance
     
     Output
     -----------
     the projection matrix
     """
     ## Setup the 'initial guess'
     F = []
     for m in range(X.ndims()):
         if m == n:
             F.append(np.random.rand(X.shape[m], self.R));
         else:
             ## double check the shape is the right dimensions
             if (self.basis[m].shape[0] != X.shape[m]):
                 raise ValueError("Shape of the tensor X is incorrect");
             F.append(self.basis[m])
     M = ktensor.ktensor(np.ones(self.R), F);
     ## Solve for the subproblem
     M, Phi, totIter, kktMV = CP_APR.solveForModeB(X, M, n, iters, epsilon, convTol)
     ## scale by summing across the rows
     totWeight = np.sum(M.U[n], axis=1)
     zeroIdx = np.where(totWeight < 1e-100)[0]
     if len(zeroIdx) > 0:
         # for the zero ones we're going to evenly distribute
         evenDist = np.repeat(1.0 / self.R, len(zeroIdx)*self.R)
         M.U[n][zeroIdx, :] = evenDist.reshape((len(zeroIdx), self.R))
         totWeight = np.sum(M.U[n], axis=1)
     twMat = np.repeat(totWeight, self.R).reshape(X.shape[n], self.R)
     M.U[n] = M.U[n] / twMat
     return M.U[n]
Exemplo n.º 3
0
 def solveUnsharedMode(self, mode, isConverged):
     """ 
     Solve the unshared mode problem
     This is simply the same as the MM approach for CP-APR
     
     Parameters
     ----------
     mode : a length 2 array that contains the ith tensor in position 0 and the nth mode in position 1
     isConverged : passing along the convergence parameter
     """
     i = mode[0]
     n = mode[1]
     ## Shift the weight in factorization M(i) from lambda_i to mode n
     self.M[i].redistribute(n)
     self.M[i], Phi, iter, kttModeViolation = CP_APR.solveForModeB(self.X[i], self.M[i], n, self.maxInnerIters, self.epsilon, self.tol)
     if (iter > 0):
         isConverged = False
     # Shift weight from mode n back to lambda
     self.M[i].normalize_mode(n,1)
     ## Normalize the lambda to all the others
     self.shareLambda(i)
     return Phi, iter, kttModeViolation, isConverged