def computeDecomp(self, gamma=None, gradual=True): ## random initialize if not existing if self.M[REG_LOCATION] == None and self.M[AUG_LOCATION] == None: self.initialize() ## Kkeep track of the iteration information iterInfo = OrderedDict(sorted({}.items(), key=lambda t:t[1])) lastLL = tensorTools.loglikelihood(self.X, self.M) ## projection factor starts at 0 (unless there's no gradual) xi = 0 if gradual else 1 ## if nothing is set, we're just not going to do any hard-thresholding if gamma == None: gamma = list(np.repeat(0, self.N)) ## for outer iterations for iteration in range(self.maxIters): startIter = time.time() for n in range(self.N): startMode = time.time() ## first we calculate the "augmented" tensor matricization self.M[AUG_LOCATION].redistribute(n) xsubs = self.X.subs[:,n] B, Pi, inI1, kktModeViolation1 = self.__solveSignalTensor(xsubs, self.M[AUG_LOCATION].U[n], n) ## hard threshold based on the xi and gamma thr = xi * gamma[n] if (thr > 0): self.M[REG_LOCATION].U[n] = tensorTools.hardThresholdMatrix(self.M[REG_LOCATION].U[n], thr) # renormalize the mode self.M[REG_LOCATION].normalize_mode(n, 1) ## recalculate B using the new matrix B = np.dot(self.M[REG_LOCATION].U[n], np.diag(self.M[REG_LOCATION].lmbda)) elapsed1 = time.time() - startMode # now that we are done, we can calculate the new 'unaugmented matricization' inI2, kktModeViolation2 = self.__solveAugmentedTensor(xsubs, B, Pi, n) elapsed2 = time.time() - startMode ll = tensorTools.loglikelihood(self.X, self.M) iterInfo[str((iteration, n))] = { "Time": [elapsed1, elapsed2], "KKTViolation": [kktModeViolation1, kktModeViolation2], "Iterations": [inI1, inI2], "LL": ll} if gradual: xiTemp = 1-np.min([1, (np.absolute(lastLL - ll) / np.max(np.absolute([lastLL,ll])))]) if xiTemp > xi: ## take the mean of the two xi = (xi + xiTemp) / 2 print("Iteration {0}: Xi = {1}, dll = {2}, time = {3}".format(iteration, xi, np.abs(lastLL - ll), time.time() - startIter)) if np.abs(lastLL - ll) < self.dlTol and xi >= 0.99: break; lastLL = ll return iterInfo
maxiters=MAX_ITER) Yinfo = spntf.computeDecomp(gamma=gamma) ## calculate all the request entries marbleElapse = time.time() - startTime marbleFMS, marbleFOS, marbleNNZ = calculateValues( TM, spntf.M[SP_NTF.REG_LOCATION]) np.random.seed(seed) startTime = time.time() YCP, ycpstats, mstats = CP_APR.cp_apr(X, R=R, maxinner=INNER_ITER, maxiters=MAX_ITER) cpaprElapse = time.time() - startTime cpaprFMS, cpaprFOS, cpaprNNZ = calculateValues(TM, YCP) for n in range(YCP.ndims()): YCP.U[n] = tensorTools.hardThresholdMatrix(YCP.U[n], gamma[n]) limestoneFMS, limestoneFOS, limestoneNNZ = calculateValues(TM, YCP) sampleResult = { "Order": ["Marble", "CPAPR", "Limestone"], "FMS": [marbleFMS, cpaprFMS, limestoneFMS], "FOS": [marbleFOS, cpaprFOS, limestoneFOS], "CompTime": [marbleElapse, cpaprElapse, cpaprElapse], "NNZ": [marbleNNZ, cpaprNNZ, limestoneNNZ] } data[str(sample)] = sampleResult with open('results/simulation-{0}.json'.format(exptID), 'w') as outfile: json.dump(data, outfile)
seed = sample*1000 np.random.seed(seed) ## solve the solution startTime = time.time() spntf = SP_NTF.SP_NTF(X, R=R, alpha=alpha, maxinner=INNER_ITER, maxiters=MAX_ITER) Yinfo = spntf.computeDecomp(gamma=gamma) ## calculate all the request entries marbleElapse = time.time() - startTime marbleFMS, marbleFOS, marbleNNZ = calculateValues(TM, spntf.M[SP_NTF.REG_LOCATION]) np.random.seed(seed) startTime = time.time() YCP, ycpstats, mstats = CP_APR.cp_apr(X, R=R, maxinner=INNER_ITER, maxiters=MAX_ITER) cpaprElapse = time.time() - startTime cpaprFMS, cpaprFOS, cpaprNNZ = calculateValues(TM, YCP) for n in range(YCP.ndims()): YCP.U[n] = tensorTools.hardThresholdMatrix(YCP.U[n], gamma[n]) limestoneFMS, limestoneFOS, limestoneNNZ = calculateValues(TM, YCP) sampleResult = { "Order": ["Marble", "CPAPR", "Limestone"], "FMS":[marbleFMS, cpaprFMS, limestoneFMS], "FOS":[marbleFOS, cpaprFOS, limestoneFOS], "CompTime": [marbleElapse, cpaprElapse, cpaprElapse], "NNZ": [marbleNNZ, cpaprNNZ, limestoneNNZ] } data[str(sample)] = sampleResult with open('results/simulation-{0}.json'.format(exptID), 'w') as outfile: json.dump(data, outfile)