Exemplo n.º 1
0
    def testProject(self):
        # Test if it is the same as KCCA
        numExamples = 50
        numFeatures = 50
        X = numpy.random.rand(numExamples, numFeatures)
        Y = numpy.random.rand(numExamples, numFeatures)

        tau = 0.0
        tol = 10 ** --6
        k = 5

        kernel = LinearKernel()
        cca = PrimalDualCCA(kernel, tau, tau)
        alpha, v, lmbdas = cca.learnModel(X, Y)
        XU, YU = cca.project(X, Y, k)
        kernel = LinearKernel()
        kcca = KernelCCA(kernel, kernel, tau)
        alpha, beta, lmbdas = kcca.learnModel(X, Y)
        XU2, YU2 = kcca.project(X, Y, k)

        self.assertTrue(numpy.linalg.norm(XU - XU2) < tol)
        self.assertTrue(numpy.linalg.norm(YU - YU2) < tol)

        # Now try with different tau
        tau = 0.5
        cca = PrimalDualCCA(kernel, tau, tau)
        alpha, v, lmbdas = cca.learnModel(X, Y)
        XU, YU = cca.project(X, Y, k)

        kernel = LinearKernel()
        kcca = KernelCCA(kernel, kernel, tau)
        alpha, beta, lmbdas = kcca.learnModel(X, Y)
        XU2, YU2 = kcca.project(X, Y, k)

        self.assertTrue(numpy.linalg.norm(XU - XU2) < tol)
        self.assertTrue(numpy.linalg.norm(YU - YU2) < tol)

        self.assertTrue(numpy.linalg.norm(numpy.dot(XU.T, XU) - numpy.ones(k)) < tol)
        self.assertTrue(numpy.linalg.norm(numpy.dot(YU.T, YU) - numpy.ones(k)) < tol)
Exemplo n.º 2
0
    def testProject(self):
        #Test if it is the same as KCCA
        numExamples = 50
        numFeatures = 50
        X = numpy.random.rand(numExamples, numFeatures)
        Y = numpy.random.rand(numExamples, numFeatures)

        tau = 0.0
        tol = 10**--6
        k = 5

        kernel = LinearKernel()
        cca = PrimalDualCCA(kernel, tau, tau)
        alpha, v, lmbdas = cca.learnModel(X, Y)
        XU, YU = cca.project(X, Y, k)
        kernel = LinearKernel()
        kcca = KernelCCA(kernel, kernel, tau)
        alpha, beta, lmbdas = kcca.learnModel(X, Y)
        XU2, YU2 = kcca.project(X, Y, k)

        self.assertTrue(numpy.linalg.norm(XU-XU2) < tol)
        self.assertTrue(numpy.linalg.norm(YU-YU2) < tol)

        #Now try with different tau
        tau = 0.5
        cca = PrimalDualCCA(kernel, tau, tau)
        alpha, v, lmbdas = cca.learnModel(X, Y)
        XU, YU = cca.project(X, Y, k)

        kernel = LinearKernel()
        kcca = KernelCCA(kernel, kernel, tau)
        alpha, beta, lmbdas = kcca.learnModel(X, Y)
        XU2, YU2 = kcca.project(X, Y, k)

        self.assertTrue(numpy.linalg.norm(XU-XU2) < tol)
        self.assertTrue(numpy.linalg.norm(YU-YU2) < tol)

        self.assertTrue(numpy.linalg.norm(numpy.dot(XU.T, XU) - numpy.ones(k)) < tol)
        self.assertTrue(numpy.linalg.norm(numpy.dot(YU.T, YU) - numpy.ones(k)) < tol)