コード例 #1
0
    def test02_sgd_sanity_with_epochs(self):
        C, W0, X, _, _ = create_C_W_X_d()
        optimizer = SGD(batch_size=256, m=X.shape[1])
        W = W0.copy()
        for epoch in range(15):
            W = optimizer.optimize(W,
                                   X,
                                   C,
                                   objective_soft_max,
                                   objective_soft_max_gradient_W,
                                   lr=1)

        self.assertTrue(True)
コード例 #2
0
    def test01_linear_convergence_gradient_test_X(self):
        max_iter = 17
        C, W, X, _, d = create_C_W_X_d()
        eps0 = 0.5
        losses = []
        loss_convergence = []
        for i in range(max_iter):
            eps = eps0 * (0.5**i)
            objective1 = round(objective_soft_max(X + eps * d, W, C), 10)
            objective2 = round(objective_soft_max(X, W, C), 10)
            losses.append(abs(objective1 - objective2))

        for i in range(1, len(losses)):
            loss_convergence.append(losses[i] / losses[i - 1])
        avg_val = average(round(loss_convergence[-5:], 4))
        self.assertTrue(0.4 <= avg_val <= 0.6, msg=f'avg value = {avg_val}')
コード例 #3
0
    def test00_linear_convergence_gradient_test_W(self):
        max_iter = 25
        eps0 = 0.5
        losses = []
        loss_convergence = []
        C, W, X, d, _ = create_C_W_X_d()
        for i in range(max_iter):
            eps = eps0 * (0.5**i)
            losses.append(
                abs(
                    objective_soft_max(X, W + eps * d, C) -
                    objective_soft_max(X, W, C)))

        for i in range(1, len(losses)):
            loss_convergence.append(losses[i] / losses[i - 1])
        avg_val = average(round(loss_convergence[-5:], 4))
        self.assertTrue(0.40 <= avg_val <= 0.6, msg=f'avg value = {avg_val}')
コード例 #4
0
    def test03_quadratic_convergence_gradient_X(self):
        max_iter = 17
        eps0 = 0.5
        losses = []
        loss_convergence = []
        C, W, X, _, d = create_C_W_X_d()
        for i in range(max_iter):
            eps = eps0 * (0.5**i)
            objective1 = objective_soft_max(X + eps * d, W, C)
            objective2 = objective_soft_max(X, W, C)
            objective3 = eps * trace(
                d.T @ objective_soft_max_gradient_X(X, W, C))

            losses.append(abs(objective1 - objective2 - objective3))

        for j in range(1, len(losses)):
            loss_convergence.append(losses[j] / losses[j - 1])

        avg_val = average(round(loss_convergence[-5:], 4))
        self.assertTrue(0.2 <= avg_val <= 0.3, msg=f'ans = {avg_val}')
コード例 #5
0
ファイル: test_loss.py プロジェクト: Yairz1/NN_impl
    def test02_objective_soft_max_sanity_test(self):
        C, W, X, _, _ = create_C_W_X_d()
        c1 = np.round(objective_soft_max(X, W, C), 10)
        c2 = np.round(objective_soft_max_old(X, W, C), 10)

        self.assertEqual(c1, c2)
コード例 #6
0
 def test01_sgd_sanity(self):
     C, W0, X, _, _ = create_C_W_X_d()
     optimizer = SGD(batch_size=256, m=X.shape[1])
     W = optimizer.optimize(W0, X, C, objective_soft_max,
                            objective_soft_max_gradient_W)
     self.assertTrue(True)