Esempio n. 1
0
    def testProject(self):
        #Test if it is the same as KCCA
        numExamples = 50
        numFeatures = 10
        X = numpy.random.rand(numExamples, numFeatures)
        Y = numpy.random.rand(numExamples, numFeatures)

        tau = 0.0
        tol = 10**--6
        k = 5

        cca = PrimalCCA(tau)
        u, v, lmbdas = cca.learnModel(X, Y)
        XU, YU = cca.project(X, Y, k)

        kernel = LinearKernel()
        kcca = KernelCCA(kernel, kernel, tau)
        alpha, beta, lmbdas2 = kcca.learnModel(X, Y)
        XU2, YU2 = kcca.project(X, Y, k)

        #Seem to get an error in this for some reason
        #self.assertTrue(numpy.linalg.norm(XU-XU2) < tol)
        #self.assertTrue(numpy.linalg.norm(YU-YU2) < tol)

        #Now try with different tau
        tau = 0.5
        cca = PrimalCCA(tau)
        u, v, lmbdas = cca.learnModel(X, Y)
        XU, YU = cca.project(X, Y, k)

        kernel = LinearKernel()
        kcca = KernelCCA(kernel, kernel, tau)
        alpha, beta, lmbdas = kcca.learnModel(X, Y)
        XU2, YU2 = kcca.project(X, Y, k)

        self.assertTrue(numpy.linalg.norm(XU - XU2) < tol)
        self.assertTrue(numpy.linalg.norm(YU - YU2) < tol)

        self.assertTrue(
            numpy.linalg.norm(numpy.dot(XU.T, XU) - numpy.ones(k)) < tol)
        self.assertTrue(
            numpy.linalg.norm(numpy.dot(YU.T, YU) - numpy.ones(k)) < tol)
Esempio n. 2
0
    def testProject(self):
        #Test if it is the same as KCCA
        numExamples = 50
        numFeatures = 10
        X = numpy.random.rand(numExamples, numFeatures)
        Y = numpy.random.rand(numExamples, numFeatures)

        tau = 0.0
        tol = 10**--6
        k = 5

        cca = PrimalCCA(tau)
        u, v, lmbdas = cca.learnModel(X, Y)
        XU, YU = cca.project(X, Y, k)

        kernel = LinearKernel()
        kcca = KernelCCA(kernel, kernel, tau)
        alpha, beta, lmbdas2 = kcca.learnModel(X, Y)
        XU2, YU2 = kcca.project(X, Y, k)

        #Seem to get an error in this for some reason 
        #self.assertTrue(numpy.linalg.norm(XU-XU2) < tol)
        #self.assertTrue(numpy.linalg.norm(YU-YU2) < tol)

        #Now try with different tau
        tau = 0.5
        cca = PrimalCCA(tau)
        u, v, lmbdas = cca.learnModel(X, Y)
        XU, YU = cca.project(X, Y, k)

        kernel = LinearKernel()
        kcca = KernelCCA(kernel, kernel, tau)
        alpha, beta, lmbdas = kcca.learnModel(X, Y)
        XU2, YU2 = kcca.project(X, Y, k)

        self.assertTrue(numpy.linalg.norm(XU-XU2) < tol)
        self.assertTrue(numpy.linalg.norm(YU-YU2) < tol)

        self.assertTrue(numpy.linalg.norm(numpy.dot(XU.T, XU) - numpy.ones(k)) < tol)
        self.assertTrue(numpy.linalg.norm(numpy.dot(YU.T, YU) - numpy.ones(k)) < tol)
Esempio n. 3
0
    def testProject(self):
        numExamples = 5
        numFeatures = 10

        X = numpy.random.rand(numExamples, numFeatures)
        Y = X*2

        tau = 0.0
        kernel = LinearKernel()

        tol = 10**--6
        k = 5

        cca = KernelCCA(kernel, kernel, tau)
        alpha, beta, lmbdas = cca.learnModel(X, Y)

        XU, YU = cca.project(X, Y, k)

        self.assertTrue(numpy.linalg.norm(XU-YU) < tol)

        self.assertTrue(numpy.linalg.norm(numpy.dot(XU.T, XU) - numpy.ones(k)) < tol)
        self.assertTrue(numpy.linalg.norm(numpy.dot(YU.T, YU) - numpy.ones(k)) < tol)