Beispiel #1
0
 def testCallOther(self):
     Kp = poly_kernel(self.X, self.Y, degree=2)
     Ki = Kinterface(data=self.X,
                     kernel=poly_kernel,
                     kernel_args={"degree": 2},
                     row_normalize=False)
     Kr = Ki(self.X, self.Y)
     self.assertAlmostEquals(np.linalg.norm(Kp - Kr), 0, delta=3)
Beispiel #2
0
 def testCall(self):
     Kp = poly_kernel(self.X, self.X, degree=2)
     Ki = Kinterface(data=self.X,
                     kernel=poly_kernel,
                     kernel_args={"degree": 2})
     self.assertAlmostEquals(np.linalg.norm(Ki(self.X, self.X) - Kp),
                             0,
                             delta=3)
Beispiel #3
0
    def testKernelSum(self):
        Ki = Kinterface(data=self.X,
                        kernel=kernel_sum,
                        kernel_args={
                            "kernels": [poly_kernel, poly_kernel, poly_kernel],
                            "kernels_args": [{
                                "degree": 2
                            }, {
                                "degree": 3
                            }, {
                                "degree": 4
                            }]
                        },
                        row_normalize=False)

        Kc = poly_kernel(self.X, self.X, degree=2) + \
             poly_kernel(self.X, self.X, degree=3) + \
             poly_kernel(self.X, self.X, degree=4)
        self.assertAlmostEqual(np.linalg.norm(Ki[:, :] - Kc), 0, places=3)
Beispiel #4
0
 def testRowNorm(self):
     Kp = poly_kernel(self.X, self.X, degree=2)
     Kr = kernel_row_normalize(Kp)
     Ki = Kinterface(data=self.X,
                     kernel=poly_kernel,
                     kernel_args={"degree": 2},
                     row_normalize=True)
     self.assertAlmostEquals(np.linalg.norm(Ki.diag().ravel() -
                                            np.ones((self.n, ))),
                             0,
                             delta=3)
     self.assertAlmostEquals(np.linalg.norm(Ki(self.X, self.X) - Kr),
                             0,
                             delta=3)
     self.assertAlmostEquals(np.linalg.norm(Ki[:, :] - Kr), 0, delta=3)
Beispiel #5
0
    def testDeterministicDecrease(self):
        """
        Test expected reconstruction properties of the Nystrom method.
        """
        for d in range(1, 6):
            K = poly_kernel(self.X, self.X, degree=d)
            model = Nystrom(rank=self.n, random_state=42)
            model.fit(K)

            errors = np.zeros((self.n, ))
            for i in range(self.n):
                Ki = model.G[:, :i + 1].dot(model.G[:, :i + 1].T)
                errors[i] = np.linalg.norm(K - Ki)

            self.assertTrue(np.all(errors[:-1] > errors[1:]))
            self.assertAlmostEqual(errors[-1], 0, delta=3)
Beispiel #6
0
    def testPoly(self):
        """
        Test expected reconstruction properties of the ICD.
        """
        for d in range(1, 6):
            K = poly_kernel(self.X, self.X, degree=d)
            model = ICD(rank=self.n)
            model.fit(K)

            errors = np.zeros((self.n, ))
            for i in range(self.n):
                Ki = model.G[:, :i + 1].dot(model.G[:, :i + 1].T)
                errors[i] = np.linalg.norm(K - Ki)

            self.assertTrue(np.all(errors[:-1] > errors[1:]))
            self.assertAlmostEqual(errors[-1], 0, delta=3)
Beispiel #7
0
    def testPoly(self):
        """
        Test expected reconstruction properties of the ICD.
        """
        delta = 5
        rank = self.n
        for d in range(1, 6):
            K = poly_kernel(self.X, self.X, degree=d)
            y = np.random.rand(self.n, 1)
            model = CSI(rank=rank, delta=delta, kappa=0.1)
            model.fit(K, y)

            errors = np.zeros((rank, ))
            for i in range(rank):
                Ki = model.G[:, :i + 1].dot(model.G[:, :i + 1].T)
                errors[i] = np.linalg.norm(K - Ki)

            self.assertAlmostEqual(errors[-1], 0, places=3)
            self.assertTrue(np.all(errors[:-1] >= errors[1:]))
Beispiel #8
0
    def testLeverage(self):
        """
        Assert the leverage scores performs a better low rank approximation with incraesing number of
        columns.
        :return:
        """
        K = poly_kernel(self.X, self.X, degree=2)
        rank_range = [10, 20, 30, 50]
        repeats = 10

        errors_lev = np.zeros((
            repeats,
            len(rank_range),
        ))
        errors_rand = np.zeros((
            repeats,
            len(rank_range),
        ))

        for j in xrange(repeats):
            self.X = np.random.rand(self.n, self.p)
            for i, rank in enumerate(rank_range):
                model_lev = Nystrom(rank=rank, random_state=j, lbd=1)
                model_lev.fit(K)

                model_rand = Nystrom(rank=rank, random_state=j, lbd=0)
                model_rand.fit(K)

                Li = model_lev.G.dot(model_lev.G.T)
                Ri = model_rand.G.dot(model_rand.G.T)
                errors_lev[j, i] = np.linalg.norm(K - Li)
                errors_rand[j, i] = np.linalg.norm(K - Ri)
            self.assertTrue(np.all(errors_lev[j, :-1] > errors_lev[j, 1:]))

        lev_win = np.sum(errors_lev < errors_rand)
        rand_win = np.sum(errors_lev > errors_rand)
        print("Leverage win: %d, random win: %d" % (lev_win, rand_win))
        self.assertTrue(lev_win > rand_win)