def testDerivativeViApprox(self): """ We'll test the case in which we apprormate using a large number of samples for the AUC and see if we get close to the exact derivative """ m = 20 n = 30 k = 3 X = SparseUtils.generateSparseBinaryMatrix((m, n), k, csarray=True) for i in range(m): X[i, 0] = 1 X[i, 1] = 0 w = 0.1 eps = 0.001 learner = MaxAUCLogistic(k, w) learner.normalise = False learner.lmbdaU = 0 learner.lmbdaV = 0 learner.numAucSamples = n indPtr, colInds = SparseUtils.getOmegaListPtr(X) U = numpy.random.rand(X.shape[0], k) V = numpy.random.rand(X.shape[1], k) gp = numpy.random.rand(n) gp /= gp.sum() gq = numpy.random.rand(n) gq /= gq.sum() permutedRowInds = numpy.array(numpy.random.permutation(m), numpy.uint32) permutedColInds = numpy.array(numpy.random.permutation(n), numpy.uint32) maxLocalAuc = MaxLocalAUC(k, w) normGp, normGq = maxLocalAuc.computeNormGpq(indPtr, colInds, gp, gq, m) numRuns = 200 numTests = 5 #Let's compare against using the exact derivative for i in numpy.random.permutation(m)[0:numTests]: U = numpy.random.rand(X.shape[0], k) V = numpy.random.rand(X.shape[1], k) dv1 = numpy.zeros(k) for j in range(numRuns): dv1 += learner.derivativeViApprox(indPtr, colInds, U, V, gp, gq, normGp, normGq, permutedRowInds, permutedColInds, i) dv1 /= numRuns dv2 = learner.derivativeVi(indPtr, colInds, U, V, gp, gq, i) dv3 = numpy.zeros(k) for j in range(k): eps = 10**-6 tempV = V.copy() tempV[i,j] += eps obj1 = learner.objective(indPtr, colInds, indPtr, colInds, U, tempV, gp, gq) tempV = V.copy() tempV[i,j] -= eps obj2 = learner.objective(indPtr, colInds, indPtr, colInds, U, tempV, gp, gq) dv3[j] = (obj1-obj2)/(2*eps) print(dv1, dv2, dv3) nptst.assert_array_almost_equal(dv1, dv2, 3) learner.lmbdaV = 0.5 learner.rho = 0.5 for i in numpy.random.permutation(m)[0:numTests]: U = numpy.random.rand(X.shape[0], k) V = numpy.random.rand(X.shape[1], k) dv1 = numpy.zeros(k) for j in range(numRuns): dv1 += learner.derivativeViApprox(indPtr, colInds, U, V, gp, gq, normGp, normGq, permutedRowInds, permutedColInds, i) dv1 /= numRuns dv2 = learner.derivativeVi(indPtr, colInds, U, V, gp, gq, i) print(dv1, dv2) nptst.assert_array_almost_equal(dv1, dv2, 3) learner.numRowSamples = 10 numRuns = 1000 for i in numpy.random.permutation(m)[0:numTests]: U = numpy.random.rand(X.shape[0], k) V = numpy.random.rand(X.shape[1], k) dv1 = numpy.zeros(k) for j in range(numRuns): dv1 += learner.derivativeViApprox(indPtr, colInds, U, V, gp, gq, normGp, normGq, permutedRowInds, permutedColInds, i) dv1 /= numRuns dv2 = learner.derivativeVi(indPtr, colInds, U, V, gp, gq, i) print(dv1, dv2) nptst.assert_array_almost_equal(dv1, dv2, 3) maxLocalAuc.numRowSamples = m maxLocalAuc.numAucSamples = 20 maxLocalAuc.lmbdaV = 0 numRuns = 1000 print("Final test") #for i in numpy.random.permutation(m)[0:numTests]: for i in range(m): U = numpy.random.rand(X.shape[0], k) V = numpy.random.rand(X.shape[1], k) dv1 = numpy.zeros(k) for j in range(numRuns): dv1 += learner.derivativeViApprox(indPtr, colInds, U, V, gp, gq, normGp, normGq, permutedRowInds, permutedColInds, i) dv1 /= numRuns #dv1 = learner.derivativeVi(indPtr, colInds, U, V, gp, gq, i) dv2 = learner.derivativeVi(indPtr, colInds, U, V, gp, gq, i) print(i, dv1, dv2) nptst.assert_array_almost_equal(dv1, dv2, 3)
def testDerivativeViApprox(self): """ We'll test the case in which we apprormate using a large number of samples for the AUC and see if we get close to the exact derivative """ m = 20 n = 30 k = 3 X = SparseUtils.generateSparseBinaryMatrix((m, n), k, csarray=True) for i in range(m): X[i, 0] = 1 X[i, 1] = 0 w = 0.1 eps = 0.001 learner = MaxAUCLogistic(k, w) learner.normalise = False learner.lmbdaU = 0 learner.lmbdaV = 0 learner.numAucSamples = n indPtr, colInds = SparseUtils.getOmegaListPtr(X) U = numpy.random.rand(X.shape[0], k) V = numpy.random.rand(X.shape[1], k) gp = numpy.random.rand(n) gp /= gp.sum() gq = numpy.random.rand(n) gq /= gq.sum() permutedRowInds = numpy.array(numpy.random.permutation(m), numpy.uint32) permutedColInds = numpy.array(numpy.random.permutation(n), numpy.uint32) maxLocalAuc = MaxLocalAUC(k, w) normGp, normGq = maxLocalAuc.computeNormGpq(indPtr, colInds, gp, gq, m) numRuns = 200 numTests = 5 #Let's compare against using the exact derivative for i in numpy.random.permutation(m)[0:numTests]: U = numpy.random.rand(X.shape[0], k) V = numpy.random.rand(X.shape[1], k) dv1 = numpy.zeros(k) for j in range(numRuns): dv1 += learner.derivativeViApprox(indPtr, colInds, U, V, gp, gq, normGp, normGq, permutedRowInds, permutedColInds, i) dv1 /= numRuns dv2 = learner.derivativeVi(indPtr, colInds, U, V, gp, gq, i) dv3 = numpy.zeros(k) for j in range(k): eps = 10**-6 tempV = V.copy() tempV[i, j] += eps obj1 = learner.objective(indPtr, colInds, indPtr, colInds, U, tempV, gp, gq) tempV = V.copy() tempV[i, j] -= eps obj2 = learner.objective(indPtr, colInds, indPtr, colInds, U, tempV, gp, gq) dv3[j] = (obj1 - obj2) / (2 * eps) print(dv1, dv2, dv3) nptst.assert_array_almost_equal(dv1, dv2, 3) learner.lmbdaV = 0.5 learner.rho = 0.5 for i in numpy.random.permutation(m)[0:numTests]: U = numpy.random.rand(X.shape[0], k) V = numpy.random.rand(X.shape[1], k) dv1 = numpy.zeros(k) for j in range(numRuns): dv1 += learner.derivativeViApprox(indPtr, colInds, U, V, gp, gq, normGp, normGq, permutedRowInds, permutedColInds, i) dv1 /= numRuns dv2 = learner.derivativeVi(indPtr, colInds, U, V, gp, gq, i) print(dv1, dv2) nptst.assert_array_almost_equal(dv1, dv2, 3) learner.numRowSamples = 10 numRuns = 1000 for i in numpy.random.permutation(m)[0:numTests]: U = numpy.random.rand(X.shape[0], k) V = numpy.random.rand(X.shape[1], k) dv1 = numpy.zeros(k) for j in range(numRuns): dv1 += learner.derivativeViApprox(indPtr, colInds, U, V, gp, gq, normGp, normGq, permutedRowInds, permutedColInds, i) dv1 /= numRuns dv2 = learner.derivativeVi(indPtr, colInds, U, V, gp, gq, i) print(dv1, dv2) nptst.assert_array_almost_equal(dv1, dv2, 3) maxLocalAuc.numRowSamples = m maxLocalAuc.numAucSamples = 20 maxLocalAuc.lmbdaV = 0 numRuns = 1000 print("Final test") #for i in numpy.random.permutation(m)[0:numTests]: for i in range(m): U = numpy.random.rand(X.shape[0], k) V = numpy.random.rand(X.shape[1], k) dv1 = numpy.zeros(k) for j in range(numRuns): dv1 += learner.derivativeViApprox(indPtr, colInds, U, V, gp, gq, normGp, normGq, permutedRowInds, permutedColInds, i) dv1 /= numRuns #dv1 = learner.derivativeVi(indPtr, colInds, U, V, gp, gq, i) dv2 = learner.derivativeVi(indPtr, colInds, U, V, gp, gq, i) print(i, dv1, dv2) nptst.assert_array_almost_equal(dv1, dv2, 3)