Exemplo n.º 1
0
    def testDerivativeUiApprox(self): 
        """
        We'll test the case in which we apprormate using a large number of samples 
        for the AUC and see if we get close to the exact derivative 
        """
        m = 20 
        n = 30 
        k = 3 
        X = SparseUtils.generateSparseBinaryMatrix((m, n), k, csarray=True)
        
        w = 0.1
        learner = MaxAUCSquare(k, w)
        learner.normalise = False
        learner.lmbdaU = 0
        learner.lmbdaV = 0
        learner.rho = 1.0
        learner.numAucSamples = 10

        U = numpy.random.rand(X.shape[0], k)
        V = numpy.random.rand(X.shape[1], k)

        gp = numpy.random.rand(n)
        gp /= gp.sum()        
        gq = numpy.random.rand(n)
        gq /= gq.sum()     

        
        numRuns = 200 
        numTests = 5
        
        indPtr, colInds = SparseUtils.getOmegaListPtr(X)
        permutedRowInds = numpy.arange(m, dtype=numpy.uint32)
        permutedColInds = numpy.arange(n, dtype=numpy.uint32)

        #Test with small number of AUC samples, but normalise 
        learner.numAucSamples = 50
        numRuns = 200
        
        for i in numpy.random.permutation(m)[0:numTests]:  
            U = numpy.random.rand(X.shape[0], k)
            V = numpy.random.rand(X.shape[1], k)    
            
            
            
            du1 = numpy.zeros(k)
            for j in range(numRuns): 
                VDot, VDotDot, WDot, WDotDot = learner.computeMeansVW(indPtr, colInds, U, V, permutedRowInds, permutedColInds, gp, gq)
                du1 += learner.derivativeUiApprox(U, V, VDot, VDotDot, WDot, WDotDot, i)
            du1 /= numRuns
            du2 = learner.derivativeUi(indPtr, colInds, U, V, gp, gq, i) 
            #print(du1, du2)
            print(du1/numpy.linalg.norm(du1), du2/numpy.linalg.norm(du2))
            #print(numpy.linalg.norm(du1 - du2)/numpy.linalg.norm(du1))
            nptst.assert_array_almost_equal(du1, du2, 2)

        #Let's compare against using the exact derivative 
        for i in numpy.random.permutation(m)[0:numTests]:  
            U = numpy.random.rand(X.shape[0], k)
            V = numpy.random.rand(X.shape[1], k)            
            
            du1 = numpy.zeros(k)
            for j in range(numRuns): 
                VDot, VDotDot, WDot, WDotDot = learner.computeMeansVW(indPtr, colInds, U, V, permutedRowInds, permutedColInds, gp, gq)
                du1 += learner.derivativeUiApprox(U, V, VDot, VDotDot, WDot, WDotDot, i)
            du1 /= numRuns
            du2 = learner.derivativeUi(indPtr, colInds, U, V, gp, gq, i)   
            
            print(du1/numpy.linalg.norm(du1), du2/numpy.linalg.norm(du2))
            nptst.assert_array_almost_equal(du1, du2, 2)
            
            
        learner.lmbdaV = 0.5 
        
        for i in numpy.random.permutation(m)[0:numTests]:  
            U = numpy.random.rand(X.shape[0], k)
            V = numpy.random.rand(X.shape[1], k)            
            
            du1 = numpy.zeros(k)
            for j in range(numRuns): 
                VDot, VDotDot, WDot, WDotDot = learner.computeMeansVW(indPtr, colInds, U, V, permutedRowInds, permutedColInds, gp, gq)
                du1 += learner.derivativeUiApprox(U, V, VDot, VDotDot, WDot, WDotDot, i)
            du1 /= numRuns
            du2 = learner.derivativeUi(indPtr, colInds, U, V, gp, gq, i)   
            nptst.assert_array_almost_equal(du1, du2, 2)
            print(du1/numpy.linalg.norm(du1), du2/numpy.linalg.norm(du2))
Exemplo n.º 2
0
    def testDerivativeUiApprox(self):
        """
        We'll test the case in which we apprormate using a large number of samples 
        for the AUC and see if we get close to the exact derivative 
        """
        m = 20
        n = 30
        k = 3
        X = SparseUtils.generateSparseBinaryMatrix((m, n), k, csarray=True)

        w = 0.1
        learner = MaxAUCSquare(k, w)
        learner.normalise = False
        learner.lmbdaU = 0
        learner.lmbdaV = 0
        learner.rho = 1.0
        learner.numAucSamples = 10

        U = numpy.random.rand(X.shape[0], k)
        V = numpy.random.rand(X.shape[1], k)

        gp = numpy.random.rand(n)
        gp /= gp.sum()
        gq = numpy.random.rand(n)
        gq /= gq.sum()

        numRuns = 200
        numTests = 5

        indPtr, colInds = SparseUtils.getOmegaListPtr(X)
        permutedRowInds = numpy.arange(m, dtype=numpy.uint32)
        permutedColInds = numpy.arange(n, dtype=numpy.uint32)

        #Test with small number of AUC samples, but normalise
        learner.numAucSamples = 50
        numRuns = 200

        for i in numpy.random.permutation(m)[0:numTests]:
            U = numpy.random.rand(X.shape[0], k)
            V = numpy.random.rand(X.shape[1], k)

            du1 = numpy.zeros(k)
            for j in range(numRuns):
                VDot, VDotDot, WDot, WDotDot = learner.computeMeansVW(
                    indPtr, colInds, U, V, permutedRowInds, permutedColInds,
                    gp, gq)
                du1 += learner.derivativeUiApprox(U, V, VDot, VDotDot, WDot,
                                                  WDotDot, i)
            du1 /= numRuns
            du2 = learner.derivativeUi(indPtr, colInds, U, V, gp, gq, i)
            #print(du1, du2)
            print(du1 / numpy.linalg.norm(du1), du2 / numpy.linalg.norm(du2))
            #print(numpy.linalg.norm(du1 - du2)/numpy.linalg.norm(du1))
            nptst.assert_array_almost_equal(du1, du2, 2)

        #Let's compare against using the exact derivative
        for i in numpy.random.permutation(m)[0:numTests]:
            U = numpy.random.rand(X.shape[0], k)
            V = numpy.random.rand(X.shape[1], k)

            du1 = numpy.zeros(k)
            for j in range(numRuns):
                VDot, VDotDot, WDot, WDotDot = learner.computeMeansVW(
                    indPtr, colInds, U, V, permutedRowInds, permutedColInds,
                    gp, gq)
                du1 += learner.derivativeUiApprox(U, V, VDot, VDotDot, WDot,
                                                  WDotDot, i)
            du1 /= numRuns
            du2 = learner.derivativeUi(indPtr, colInds, U, V, gp, gq, i)

            print(du1 / numpy.linalg.norm(du1), du2 / numpy.linalg.norm(du2))
            nptst.assert_array_almost_equal(du1, du2, 2)

        learner.lmbdaV = 0.5

        for i in numpy.random.permutation(m)[0:numTests]:
            U = numpy.random.rand(X.shape[0], k)
            V = numpy.random.rand(X.shape[1], k)

            du1 = numpy.zeros(k)
            for j in range(numRuns):
                VDot, VDotDot, WDot, WDotDot = learner.computeMeansVW(
                    indPtr, colInds, U, V, permutedRowInds, permutedColInds,
                    gp, gq)
                du1 += learner.derivativeUiApprox(U, V, VDot, VDotDot, WDot,
                                                  WDotDot, i)
            du1 /= numRuns
            du2 = learner.derivativeUi(indPtr, colInds, U, V, gp, gq, i)
            nptst.assert_array_almost_equal(du1, du2, 2)
            print(du1 / numpy.linalg.norm(du1), du2 / numpy.linalg.norm(du2))