Пример #1
0
def test_selu_activation(N=50):
    from numpy_ml.neural_nets.activations import SELU

    N = np.inf if N is None else N

    mine = SELU()
    gold = lambda z: F.selu(torch.FloatTensor(z)).numpy()

    i = 0
    while i < N:
        n_dims = np.random.randint(1, 100)
        z = random_stochastic_matrix(1, n_dims)
        assert_almost_equal(mine.fn(z), gold(z))
        print("PASSED")
        i += 1
Пример #2
0
def test_leakyrelu_activation(N=50):
    from numpy_ml.neural_nets.activations import LeakyReLU

    N = np.inf if N is None else N

    i = 0
    while i < N:
        n_dims = np.random.randint(1, 100)
        z = random_stochastic_matrix(1, n_dims)
        alpha = np.random.uniform(0, 10)

        mine = LeakyReLU(alpha=alpha)
        gold = lambda z: F.leaky_relu(torch.FloatTensor(z), alpha).numpy()
        assert_almost_equal(mine.fn(z), gold(z))

        print("PASSED")
        i += 1
Пример #3
0
def test_gelu_activation(N=50):
    from numpy_ml.neural_nets.activations import GELU

    N = np.inf if N is None else N

    i = 0
    while i < N:
        n_dims = np.random.randint(1, 100)
        z = random_stochastic_matrix(1, n_dims)
        approx = np.random.choice([True, False])

        mine = GELU(approximate=False)
        mine_approx = GELU(approximate=True)
        gold = lambda z: F.gelu(torch.FloatTensor(z)).numpy()
        np.testing.assert_allclose(mine.fn(z), gold(z), rtol=1e-3)
        assert_almost_equal(mine.fn(z), mine_approx.fn(z))

        print("PASSED")
        i += 1