def testComputeR(self): U = numpy.random.rand(10, 5) V = numpy.random.rand(15, 5) Z = U.dot(V.T) u = 1.0 r = SparseUtilsCython.computeR(U, V, u, indsPerRow=1000) tol = 0.1 self.assertTrue(numpy.linalg.norm(Z.max(1) - r)/numpy.linalg.norm(Z.max(1)) < tol) u = 0.0 r = SparseUtilsCython.computeR(U, V, u, indsPerRow=1000) self.assertTrue(numpy.linalg.norm(Z.min(1) - r)/numpy.linalg.norm(Z.min(1)) < tol) u = 0.3 r = SparseUtilsCython.computeR(U, V, u, indsPerRow=1000) r2 = numpy.percentile(Z, u*100.0, 1) #nptst.assert_array_almost_equal(r, r2, 2) self.assertTrue(numpy.linalg.norm(r - r2)/numpy.linalg.norm(r) < tol) #Try a larger matrix U = numpy.random.rand(100, 5) V = numpy.random.rand(105, 5) Z = U.dot(V.T) r = SparseUtilsCython.computeR(U, V, u) r2 = numpy.percentile(Z, u*100.0, 1) self.assertTrue(numpy.linalg.norm(r-r2) < 0.5)
def localAUCApprox(positiveArray, U, V, w, numAucSamples=50, r=None, allArray=None): """ Compute the estimated local AUC for the score functions UV^T relative to X with quantile w. The AUC is computed using positiveArray which is a tuple (indPtr, colInds) assuming allArray is None. If allArray is not None then positive items are chosen from positiveArray and negative ones are chosen to complement allArray. """ if type(positiveArray) != tuple: positiveArray = SparseUtils.getOmegaListPtr(positiveArray) indPtr, colInds = positiveArray U = numpy.ascontiguousarray(U) V = numpy.ascontiguousarray(V) if r is None: r = SparseUtilsCython.computeR(U, V, w, numAucSamples) if allArray is None: return MCEvaluatorCython.localAUCApprox(indPtr, colInds, indPtr, colInds, U, V, numAucSamples, r) else: allIndPtr, allColInd = allArray return MCEvaluatorCython.localAUCApprox(indPtr, colInds, allIndPtr, allColInd, U, V, numAucSamples, r)
def localAucsLmbdas(args): trainX, testX, testOmegaList, learner = args (m, n) = trainX.shape localAucs = numpy.zeros(learner.lmbdas.shape[0]) for j, lmbda in enumerate(learner.lmbdas): learner.lmbda = lmbda U, V = learner.learnModel(trainX) r = SparseUtilsCython.computeR(U, V, 1-learner.u, learner.numAucSamples) localAucs[j] = MCEvaluator.localAUCApprox(testX, U, V, testOmegaList, learner.numAucSamples, r) logging.debug("Local AUC: " + str(localAucs[j]) + " with k = " + str(learner.k) + " and lmbda= " + str(learner.lmbda)) return localAucs
def localAucsLmbdas(args): trainX, testX, testOmegaList, learner = args (m, n) = trainX.shape localAucs = numpy.zeros(learner.lmbdas.shape[0]) for j, lmbda in enumerate(learner.lmbdas): learner.lmbda = lmbda U, V = learner.learnModel(trainX) r = SparseUtilsCython.computeR(U, V, 1 - learner.u, learner.numAucSamples) localAucs[j] = MCEvaluator.localAUCApprox(testX, U, V, testOmegaList, learner.numAucSamples, r) logging.debug("Local AUC: " + str(localAucs[j]) + " with k = " + str(learner.k) + " and lmbda= " + str(learner.lmbda)) return localAucs
def localAUCApprox2(X, U, V, w, numAucSamples=50, omegaList=None): """ Compute the estimated local AUC for the score functions UV^T relative to X with quantile w. """ #For now let's compute the full matrix Z = U.dot(V.T) localAuc = numpy.zeros(X.shape[0]) allInds = numpy.arange(X.shape[1]) U = numpy.ascontiguousarray(U) V = numpy.ascontiguousarray(V) r = SparseUtilsCython.computeR(U, V, w, numAucSamples) if omegaList == None: omegaList = SparseUtils.getOmegaList(X) for i in range(X.shape[0]): omegai = omegaList[i] omegaBari = numpy.setdiff1d(allInds, omegai, assume_unique=True) if omegai.shape[0] * omegaBari.shape[0] != 0: partialAuc = 0 for j in range(numAucSamples): ind = numpy.random.randint(omegai.shape[0] * omegaBari.shape[0]) p = omegai[int(ind / omegaBari.shape[0])] q = omegaBari[ind % omegaBari.shape[0]] if Z[i, p] > Z[i, q] and Z[i, p] > r[i]: partialAuc += 1 localAuc[i] = partialAuc / float(numAucSamples) localAuc = localAuc.mean() return localAuc
def localAUCApprox2(X, U, V, w, numAucSamples=50, omegaList=None): """ Compute the estimated local AUC for the score functions UV^T relative to X with quantile w. """ #For now let's compute the full matrix Z = U.dot(V.T) localAuc = numpy.zeros(X.shape[0]) allInds = numpy.arange(X.shape[1]) U = numpy.ascontiguousarray(U) V = numpy.ascontiguousarray(V) r = SparseUtilsCython.computeR(U, V, w, numAucSamples) if omegaList==None: omegaList = SparseUtils.getOmegaList(X) for i in range(X.shape[0]): omegai = omegaList[i] omegaBari = numpy.setdiff1d(allInds, omegai, assume_unique=True) if omegai.shape[0] * omegaBari.shape[0] != 0: partialAuc = 0 for j in range(numAucSamples): ind = numpy.random.randint(omegai.shape[0]*omegaBari.shape[0]) p = omegai[int(ind/omegaBari.shape[0])] q = omegaBari[ind % omegaBari.shape[0]] if Z[i, p] > Z[i, q] and Z[i, p] > r[i]: partialAuc += 1 localAuc[i] = partialAuc/float(numAucSamples) localAuc = localAuc.mean() return localAuc
def localAUC(positiveArray, U, V, w, numRowInds=None): """ Compute the local AUC for the score functions UV^T relative to X with quantile w. """ if numRowInds == None: numRowInds = V.shape[0] if type(positiveArray) != tuple: positiveArray = SparseUtils.getOmegaListPtr(positiveArray) #For now let's compute the full matrix Z = U.dot(V.T) r = SparseUtilsCython.computeR(U, V, w, numRowInds) localAuc = numpy.zeros(U.shape[0]) allInds = numpy.arange(V.shape[0]) indPtr, colInds = positiveArray for i in range(U.shape[0]): omegai = colInds[indPtr[i]:indPtr[i + 1]] omegaBari = numpy.setdiff1d(allInds, omegai, assume_unique=True) if omegai.shape[0] * omegaBari.shape[0] != 0: partialAuc = 0 for p in omegai: for q in omegaBari: if Z[i, p] > Z[i, q] and Z[i, p] > r[i]: partialAuc += 1 localAuc[i] = partialAuc / float( omegai.shape[0] * omegaBari.shape[0]) localAuc = localAuc.mean() return localAuc
def localAUCApprox(positiveArray, U, V, w, numAucSamples=50, r=None, allArray=None): """ Compute the estimated local AUC for the score functions UV^T relative to X with quantile w. The AUC is computed using positiveArray which is a tuple (indPtr, colInds) assuming allArray is None. If allArray is not None then positive items are chosen from positiveArray and negative ones are chosen to complement allArray. """ if type(positiveArray) != tuple: positiveArray = SparseUtils.getOmegaListPtr(positiveArray) indPtr, colInds = positiveArray U = numpy.ascontiguousarray(U) V = numpy.ascontiguousarray(V) if r is None: r = SparseUtilsCython.computeR(U, V, w, numAucSamples) if allArray is None: return MCEvaluatorCython.localAUCApprox(indPtr, colInds, indPtr, colInds, U, V, numAucSamples, r) else: allIndPtr, allColInd = allArray return MCEvaluatorCython.localAUCApprox(indPtr, colInds, allIndPtr, allColInd, U, V, numAucSamples, r)
def localAUC(positiveArray, U, V, w, numRowInds=None): """ Compute the local AUC for the score functions UV^T relative to X with quantile w. """ if numRowInds == None: numRowInds = V.shape[0] if type(positiveArray) != tuple: positiveArray = SparseUtils.getOmegaListPtr(positiveArray) #For now let's compute the full matrix Z = U.dot(V.T) r = SparseUtilsCython.computeR(U, V, w, numRowInds) localAuc = numpy.zeros(U.shape[0]) allInds = numpy.arange(V.shape[0]) indPtr, colInds = positiveArray for i in range(U.shape[0]): omegai = colInds[indPtr[i]:indPtr[i+1]] omegaBari = numpy.setdiff1d(allInds, omegai, assume_unique=True) if omegai.shape[0] * omegaBari.shape[0] != 0: partialAuc = 0 for p in omegai: for q in omegaBari: if Z[i, p] > Z[i, q] and Z[i, p] > r[i]: partialAuc += 1 localAuc[i] = partialAuc/float(omegai.shape[0] * omegaBari.shape[0]) localAuc = localAuc.mean() return localAuc
def profileLocalAucApprox(self): m = 500 n = 1000 k = 10 X, U, s, V = SparseUtils.generateSparseBinaryMatrix((m, n), k, csarray=True, verbose=True) u = 0.1 w = 1 - u numAucSamples = 200 omegaList = SparseUtils.getOmegaList(X) r = SparseUtilsCython.computeR(U, V, w, numAucSamples) numRuns = 10 def run(): for i in range(numRuns): MCEvaluator.localAUCApprox(X, U, V, omegaList, numAucSamples, r) ProfileUtils.profile('run()', globals(), locals())
k = 20 X = SparseUtils.generateSparseBinaryMatrix((m,n), k, 0.95) logging.debug("Number of non zero elements: " + str(X.nnz)) lmbda = 0.0 numAucSamples = 1000 u = 0.1 sigma = 1 nu = 1 nuBar = 1 project = False omegaList = SparseUtils.getOmegaList(X) U = numpy.random.rand(m, k) V = numpy.random.rand(n, k) r = SparseUtilsCython.computeR(U, V, 1-u, numAucSamples) numPoints = 50 sampleSize = 10 numAucSamplesList = numpy.linspace(1, 50, numPoints) norms = numpy.zeros(numPoints) originalU = U.copy() for s in range(sampleSize): print(s) i = numpy.random.randint(m) rowInds = numpy.array([i], numpy.uint) vec1 = derivativeUi(X, U, V, omegaList, i, lmbda, r) vec1 = vec1/numpy.linalg.norm(vec1)
rReal = numpy.mean(Z, 1) errors[0, i, j] = numpy.linalg.norm(rReal - r) r = computeR(U, V, aucSamples, numpy.median) rReal = numpy.median(Z, 1) errors[1, i, j] = numpy.linalg.norm(rReal - r) r = computeR(U, V, aucSamples, numpy.min, 1) rReal = numpy.min(Z, 1) errors[2, i, j] = numpy.linalg.norm(rReal - r) r = computeR(U, V, aucSamples, numpy.max, 1) rReal = numpy.max(Z, 1) errors[3, i, j] = numpy.linalg.norm(rReal - r) r = SparseUtilsCython.computeR(U, V, w, aucSamples) rReal = numpy.percentile(Z, w*100.0, 1) errors[4, i, j] = numpy.linalg.norm(rReal - r) meanErrors = numpy.mean(errors, 2) print(meanErrors) plt.plot(numAucSamples, meanErrors[0, :], label="mean") plt.plot(numAucSamples, meanErrors[1, :], label="median") plt.plot(numAucSamples, meanErrors[2, :], label="min") plt.plot(numAucSamples, meanErrors[3, :], label="max") plt.plot(numAucSamples, meanErrors[4, :], label="u=0.1") plt.legend() plt.show()
def run(): for i in range(numRuns): SparseUtilsCython.computeR(U, V, w, indsPerRow)
def recordResults( self, muU, muV, trainMeasures, testMeasures, loopInd, rowSamples, indPtr, colInds, testIndPtr, testColInds, allIndPtr, allColInds, gi, gp, gq, trainX, startTime, ): sigmaU = self.getSigma(loopInd, self.alpha, muU.shape[0]) sigmaV = self.getSigma(loopInd, self.alpha, muU.shape[0]) r = SparseUtilsCython.computeR(muU, muV, self.w, self.numRecordAucSamples) objArr = self.objectiveApprox((indPtr, colInds), muU, muV, r, gi, gp, gq, full=True) if trainMeasures == None: trainMeasures = [] trainMeasures.append( [ objArr.sum(), MCEvaluator.localAUCApprox((indPtr, colInds), muU, muV, self.w, self.numRecordAucSamples, r), time.time() - startTime, loopInd, ] ) printStr = "iter " + str(loopInd) + ":" printStr += " sigmaU=" + str("%.4f" % sigmaU) printStr += " sigmaV=" + str("%.4f" % sigmaV) printStr += " train: obj~" + str("%.4f" % trainMeasures[-1][0]) printStr += " LAUC~" + str("%.4f" % trainMeasures[-1][1]) if testIndPtr is not None: testMeasuresRow = [] testMeasuresRow.append( self.objectiveApprox( (testIndPtr, testColInds), muU, muV, r, gi, gp, gq, allArray=(allIndPtr, allColInds) ) ) testMeasuresRow.append( MCEvaluator.localAUCApprox( (testIndPtr, testColInds), muU, muV, self.w, self.numRecordAucSamples, r, allArray=(allIndPtr, allColInds), ) ) testOrderedItems = MCEvaluatorCython.recommendAtk(muU, muV, numpy.max(self.recommendSize), trainX) printStr += " validation: obj~" + str("%.4f" % testMeasuresRow[0]) printStr += " LAUC~" + str("%.4f" % testMeasuresRow[1]) try: for p in self.recommendSize: f1Array, orderedItems = MCEvaluator.f1AtK( (testIndPtr, testColInds), testOrderedItems, p, verbose=True ) testMeasuresRow.append(f1Array[rowSamples].mean()) except: f1Array, orderedItems = MCEvaluator.f1AtK( (testIndPtr, testColInds), testOrderedItems, self.recommendSize, verbose=True ) testMeasuresRow.append(f1Array[rowSamples].mean()) printStr += " f1@" + str(self.recommendSize) + "=" + str("%.4f" % testMeasuresRow[-1]) try: for p in self.recommendSize: mrr, orderedItems = MCEvaluator.mrrAtK((testIndPtr, testColInds), testOrderedItems, p, verbose=True) testMeasuresRow.append(mrr[rowSamples].mean()) except: mrr, orderedItems = MCEvaluator.mrrAtK( (testIndPtr, testColInds), testOrderedItems, self.recommendSize, verbose=True ) testMeasuresRow.append(mrr[rowSamples].mean()) printStr += " mrr@" + str(self.recommendSize) + "=" + str("%.4f" % testMeasuresRow[-1]) testMeasures.append(testMeasuresRow) printStr += " ||U||=" + str("%.3f" % numpy.linalg.norm(muU)) printStr += " ||V||=" + str("%.3f" % numpy.linalg.norm(muV)) if self.bound: trainObj = objArr.sum() expectationBound = self.computeBound(trainX, muU, muV, trainObj, self.delta) printStr += " bound=" + str("%.3f" % expectationBound) trainMeasures[-1].append(expectationBound) return printStr
logging.debug("Starting training") logging.debug(maxLocalAuc) #modelSelectX = trainX[0:100, :] #maxLocalAuc.learningRateSelect(trainX) #maxLocalAuc.modelSelect(trainX) #ProfileUtils.profile('U, V, trainObjs, trainAucs, testObjs, testAucs, iterations, time = maxLocalAuc.learnModel(trainX, testX=testX, verbose=True)', globals(), locals()) U, V, trainMeasures, testMeasures, iterations, time = maxLocalAuc.learnModel(trainX, verbose=True) p = 10 trainOrderedItems = MCEvaluator.recommendAtk(U, V, p) testOrderedItems = MCEvaluatorCython.recommendAtk(U, V, p, trainX) r = SparseUtilsCython.computeR(U, V, maxLocalAuc.w, maxLocalAuc.numRecordAucSamples) trainObjVec = maxLocalAuc.objectiveApprox(trainOmegaPtr, U, V, r, maxLocalAuc.gi, maxLocalAuc.gp, maxLocalAuc.gq, full=True) testObjVec = maxLocalAuc.objectiveApprox(testOmegaPtr, U, V, r, maxLocalAuc.gi, maxLocalAuc.gp, maxLocalAuc.gq, allArray=allOmegaPtr, full=True) itemCounts = numpy.array(X.sum(0)+1, numpy.int32) beta = 0.5 for p in [1, 3, 5, 10]: trainPrecision = MCEvaluator.precisionAtK(trainOmegaPtr, trainOrderedItems, p) testPrecision = MCEvaluator.precisionAtK(testOmegaPtr, testOrderedItems, p) logging.debug("Train/test precision@" + str(p) + "=" + str(trainPrecision) + "/" + str(testPrecision)) for p in [1, 3, 5, 10]: trainRecall = MCEvaluator.stratifiedRecallAtK(trainOmegaPtr, trainOrderedItems, p, itemCounts, beta) testRecall = MCEvaluator.stratifiedRecallAtK(testOmegaPtr, testOrderedItems, p, itemCounts, beta) logging.debug("Train/test stratified recall@" + str(p) + "=" + str(trainRecall) + "/" + str(testRecall))
def recordResults(self, muU, muV, trainMeasures, testMeasures, loopInd, rowSamples, indPtr, colInds, testIndPtr, testColInds, allIndPtr, allColInds, gi, gp, gq, trainX, startTime): sigmaU = self.getSigma(loopInd, self.alpha, muU.shape[0]) sigmaV = self.getSigma(loopInd, self.alpha, muU.shape[0]) r = SparseUtilsCython.computeR(muU, muV, self.w, self.numRecordAucSamples) objArr = self.objectiveApprox((indPtr, colInds), muU, muV, r, gi, gp, gq, full=True) if trainMeasures == None: trainMeasures = [] trainMeasures.append([ objArr.sum(), MCEvaluator.localAUCApprox((indPtr, colInds), muU, muV, self.w, self.numRecordAucSamples, r), time.time() - startTime, loopInd ]) printStr = "iter " + str(loopInd) + ":" printStr += " sigmaU=" + str('%.4f' % sigmaU) printStr += " sigmaV=" + str('%.4f' % sigmaV) printStr += " train: obj~" + str('%.4f' % trainMeasures[-1][0]) printStr += " LAUC~" + str('%.4f' % trainMeasures[-1][1]) if testIndPtr is not None: testMeasuresRow = [] testMeasuresRow.append( self.objectiveApprox((testIndPtr, testColInds), muU, muV, r, gi, gp, gq, allArray=(allIndPtr, allColInds))) testMeasuresRow.append( MCEvaluator.localAUCApprox((testIndPtr, testColInds), muU, muV, self.w, self.numRecordAucSamples, r, allArray=(allIndPtr, allColInds))) testOrderedItems = MCEvaluatorCython.recommendAtk( muU, muV, numpy.max(self.recommendSize), trainX) printStr += " validation: obj~" + str('%.4f' % testMeasuresRow[0]) printStr += " LAUC~" + str('%.4f' % testMeasuresRow[1]) try: for p in self.recommendSize: f1Array, orderedItems = MCEvaluator.f1AtK( (testIndPtr, testColInds), testOrderedItems, p, verbose=True) testMeasuresRow.append(f1Array[rowSamples].mean()) except: f1Array, orderedItems = MCEvaluator.f1AtK( (testIndPtr, testColInds), testOrderedItems, self.recommendSize, verbose=True) testMeasuresRow.append(f1Array[rowSamples].mean()) printStr += " f1@" + str(self.recommendSize) + "=" + str( '%.4f' % testMeasuresRow[-1]) try: for p in self.recommendSize: mrr, orderedItems = MCEvaluator.mrrAtK( (testIndPtr, testColInds), testOrderedItems, p, verbose=True) testMeasuresRow.append(mrr[rowSamples].mean()) except: mrr, orderedItems = MCEvaluator.mrrAtK( (testIndPtr, testColInds), testOrderedItems, self.recommendSize, verbose=True) testMeasuresRow.append(mrr[rowSamples].mean()) printStr += " mrr@" + str(self.recommendSize) + "=" + str( '%.4f' % testMeasuresRow[-1]) testMeasures.append(testMeasuresRow) printStr += " ||U||=" + str('%.3f' % numpy.linalg.norm(muU)) printStr += " ||V||=" + str('%.3f' % numpy.linalg.norm(muV)) if self.bound: trainObj = objArr.sum() expectationBound = self.computeBound(trainX, muU, muV, trainObj, self.delta) printStr += " bound=" + str('%.3f' % expectationBound) trainMeasures[-1].append(expectationBound) return printStr
def recordResults(self, X, trainX, testX, learner, fileName): """ Save results for a particular recommendation """ if self.algoArgs.skipRecordResults: logging.debug("Skipping final evaluation of algorithm") return allTrainMeasures = [] allTestMeasures = [] allMetaData = [] for i in range(self.algoArgs.recordFolds): metaData = [] w = 1-self.algoArgs.u logging.debug("Computing recommendation errors") maxItems = self.ps[-1] start = time.time() if type(learner) == IterativeSoftImpute: trainIterator = iter([trainX]) ZList = learner.learnModel(trainIterator) U, s, V = ZList.next() U = U*s #trainX = sppy.csarray(trainX) #testX = sppy.csarray(testX) U = numpy.ascontiguousarray(U) V = numpy.ascontiguousarray(V) else: learner.learnModel(trainX) U = learner.U V = learner.V learnTime = time.time()-start metaData.append(learnTime) logging.debug("Getting all omega") allOmegaPtr = SparseUtils.getOmegaListPtr(X) logging.debug("Getting train omega") trainOmegaPtr = SparseUtils.getOmegaListPtr(trainX) logging.debug("Getting test omega") testOmegaPtr = SparseUtils.getOmegaListPtr(testX) logging.debug("Getting recommendations") trainOrderedItems = MCEvaluator.recommendAtk(U, V, maxItems) testOrderedItems = MCEvaluatorCython.recommendAtk(U, V, maxItems, trainX) colNames = [] trainMeasures = [] testMeasures = [] for p in self.ps: trainMeasures.append(MCEvaluator.precisionAtK(trainOmegaPtr, trainOrderedItems, p)) testMeasures.append(MCEvaluator.precisionAtK(testOmegaPtr, testOrderedItems, p)) colNames.append("precision@" + str(p)) for p in self.ps: trainMeasures.append(MCEvaluator.recallAtK(trainOmegaPtr, trainOrderedItems, p)) testMeasures.append(MCEvaluator.recallAtK(testOmegaPtr, testOrderedItems, p)) colNames.append("recall@" + str(p)) for p in self.ps: trainMeasures.append(MCEvaluator.f1AtK(trainOmegaPtr, trainOrderedItems, p)) testMeasures.append(MCEvaluator.f1AtK(testOmegaPtr, testOrderedItems, p)) colNames.append("f1@" + str(p)) for p in self.ps: trainMeasures.append(MCEvaluator.mrrAtK(trainOmegaPtr, trainOrderedItems, p)) testMeasures.append(MCEvaluator.mrrAtK(testOmegaPtr, testOrderedItems, p)) colNames.append("mrr@" + str(p)) try: r = SparseUtilsCython.computeR(U, V, w, self.algoArgs.numRecordAucSamples) trainMeasures.append(MCEvaluator.localAUCApprox(trainOmegaPtr, U, V, w, self.algoArgs.numRecordAucSamples, r=r)) testMeasures.append(MCEvaluator.localAUCApprox(testOmegaPtr, U, V, w, self.algoArgs.numRecordAucSamples, allArray=allOmegaPtr, r=r)) w = 0.0 r = SparseUtilsCython.computeR(U, V, w, self.algoArgs.numRecordAucSamples) trainMeasures.append(MCEvaluator.localAUCApprox(trainOmegaPtr, U, V, w, self.algoArgs.numRecordAucSamples, r=r)) testMeasures.append(MCEvaluator.localAUCApprox(testOmegaPtr, U, V, w, self.algoArgs.numRecordAucSamples, allArray=allOmegaPtr, r=r)) colNames.append("LAUC@" + str(self.algoArgs.u)) colNames.append("AUC") except: logging.debug("Could not compute AUCs") raise trainMeasures = numpy.array(trainMeasures) testMeasures = numpy.array(testMeasures) metaData = numpy.array(metaData) allTrainMeasures.append(trainMeasures) allTestMeasures.append(testMeasures) allMetaData.append(metaData) allTrainMeasures = numpy.array(allTrainMeasures) allTestMeasures = numpy.array(allTestMeasures) allMetaData = numpy.array(allMetaData) meanTrainMeasures = numpy.mean(allTrainMeasures, 0) meanTestMeasures = numpy.mean(allTestMeasures, 0) meanMetaData = numpy.mean(allMetaData, 0) logging.debug("Mean metrics") for i, colName in enumerate(colNames): logging.debug(colName + ":" + str('%.4f' % meanTrainMeasures[i]) + "/" + str('%.4f' % meanTestMeasures[i])) numpy.savez(fileName, meanTrainMeasures, meanTestMeasures, meanMetaData, trainOrderedItems, testOrderedItems) logging.debug("Saved file as " + fileName)