def test_gt_forward():
    ad.set_mode('forward')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data + 1)
    y = Variable(data)
    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert (x > y).all(), 'Data failed'
def test_ge_reverse():
    ad.set_mode('reverse')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data + 1)
    y = Variable(data)
    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert (x >= y).all(), 'Data failed'
def test_eq_reverse():
    ad.set_mode('reverse')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)
    y = x

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert (x == y).all(), 'equality reverse failed'
def test_eq_forward():
    ad.set_mode('forward')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)
    y = x

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert (x == y).all(), 'equqlity forward failed'
def test_neq_forward():
    ad.set_mode('forward')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)
    y = Variable(data + 1)

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert (x != y).all, 'inequality forward failed'
예제 #6
0
def test_gradient_descent_forward():
    ad.set_mode('forward')

    def loss(params):
        var = Variable(params)
        x, y = var[0], var[1]
        l = (x + 5)**2 + (y + 3)**2

        l.compute_gradients()

        return (l.data, l.gradient)

    x_init = [10, 4]
    optimize_GD = GD(loss, x_init, lr=0.1, max_iter=1000, tol=1e-13)
    sol = optimize_GD.solve()
    assert round(sol[0]) == -5 and round(sol[1]) == -3
예제 #7
0
def test_adam_reverse():
    ad.set_mode('reverse')

    def loss(params):
        ad.reset_graph()
        var = Variable(params)
        x = var[0]
        y = var[1]
        l = (x + 5)**2 + (y + 3)**2

        l.compute_gradients()
        return (l.data, l.gradient)

    x_init = [10, 4]
    adam = Adam(loss, x_init, lr=0.5, max_iter=1000, tol=1e-13)
    sol = adam.solve()
    assert round(sol[0]) == -5 and round(sol[1]) == -3
예제 #8
0
def test_adam_forward():
    ad.set_mode('forward')

    def loss(params):
        var = Variable(params)
        x = var[0]
        y = var[1]
        l = (x + 5)**2 + (y + 3)**2

        l.compute_gradients()
        return (l.data, l.gradient)

    x_init = [10, 4]
    adam = Adam(loss, x_init, lr=0.1, max_iter=1000, tol=1e-13)
    sol = adam.solve()
    assert round(sol[0]) == -5 and round(
        sol[1]) == -3, 'expected -5, -3  received {}, {}'.format(
            sol[0], sol[1])
def test_sin_reverse():
    ad.set_mode('reverse')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    # =============================================================================
    #   define custom block
    # =============================================================================
    sin_block = sin()

    # =============================================================================
    #   compute output of custom block
    # =============================================================================
    y_block = sin_block(x)
    # =============================================================================
    #   Compute gradient backwards
    # =============================================================================
    y_block.compute_gradients()

    # =============================================================================
    #   define expected output
    # =============================================================================
    data_true = np.sin(data)
    gradient_true = np.diag(np.cos(data))

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(data_true, y_block.data).all(
    ), 'wrong sin data pass. expected {}, given{}'.format(
        data_true, y_block.data)

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(gradient_true, y_block.gradient).all(
    ), 'wrong sin gradient forward pass. expected {}, given{}'.format(
        gradient_true, y_block.gradient)
예제 #10
0
def test_neg_forward():
    ad.set_mode('forward')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    y_true = Variable(-x.data, -x.gradient)

    y_block = -x

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(y_true.data, y_block.data).all(), 'Data failed'

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(y_true.gradient, y_block.gradient).all(), 'Gradient failed'
def test_log_diffBase_reverse():
    ad.set_mode('reverse')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    # =============================================================================
    #   define custom block
    # =============================================================================
    base = np.random.randint(2, 5) + np.random.random()
    log_block = log(base=base)

    # =============================================================================
    #   compute output of custom block
    # =============================================================================
    y_block = log_block(x)
    y_block.compute_gradients()

    # =============================================================================
    #   define expected output
    # =============================================================================
    data_true = np.log(data) / np.log(base)
    gradient_true = np.diag(1 / (data * np.log(base)))

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(data_true, y_block.data).all(
    ), 'wrong log data pass. expected {}, given{}'.format(
        data_true, y_block.data)

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(gradient_true, y_block.gradient).all(
    ), 'wrong log gradient forward pass. expected {}, given{}'.format(
        gradient_true, y_block.gradient)
    ad.set_mode('forward')
def test_logistic_reverse():
    ad.set_mode('reverse')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    # =============================================================================
    #   define custom block
    # =============================================================================
    logistic_block = logistic()

    # =============================================================================
    #   compute output of custom block
    # =============================================================================
    y_block = logistic_block(x)
    y_block.compute_gradients()

    # =============================================================================
    #   define expected output
    # =============================================================================
    data_true = 1 / (1 + np.exp(-data))
    gradient_true = np.diag(np.exp(data) / (1 + np.exp(data))**2)

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(data_true, y_block.data).all(
    ), 'wrong exp data pass. expected {}, given{}'.format(
        data_true, y_block.data)

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(gradient_true, y_block.gradient).all(
    ), 'wrong exp gradient forward pass. expected {}, given{}'.format(
        gradient_true, y_block.gradient)
    ad.set_mode('forward')
def test_tanh_reverse():
    ad.set_mode('reverse')
    # =============================================================================
    #   define the input variablet
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    # =============================================================================
    #   define custom block
    # =============================================================================
    tanh_block = tanh()

    # =============================================================================
    #   compute output of custom block
    # =============================================================================
    y_block = tanh_block(x)
    y_block.compute_gradients()

    # =============================================================================
    #   define expected output
    # =============================================================================
    data_true = np.tanh(data)
    gradient_true = np.diag(1 - np.tanh(data)**2)

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(data_true, y_block.data).all(
    ), 'wrong tanh data pass. expected {}, given{}'.format(
        data_true, y_block.data)

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(gradient_true, y_block.gradient).all(
    ), 'wrong tanh gradient forward pass. expected {}, given{}'.format(
        gradient_true, y_block.gradient)
    ad.set_mode('forward')
예제 #14
0
def test_comp_forward():
    ad.set_mode('forward')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    # =============================================================================
    #   define custom block
    # =============================================================================
    sin_block = sin()
    exp_block = exp()

    # =============================================================================
    #   compute output of custom block
    # =============================================================================
    y_block = sin_block(exp_block(x))
    y_block.compute_gradients()
    # =============================================================================
    #   define expected output
    # =============================================================================
    data_true = np.sin(np.exp(data))
    gradient_true = np.exp(data) * np.cos(np.exp(data)) * np.identity(5)

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(data_true, y_block.data).all(
    ), 'wrong exp data pass. expected {}, given{}'.format(
        data_true, y_block.data)

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(gradient_true, y_block.gradient).all(
    ), 'wrong exp gradient forward pass. expected {}, given{}'.format(
        gradient_true, y_block.gradient)
def test_cosh_forward():
    ad.set_mode('forward')
    # =============================================================================
    #   define the input variablet
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    # =============================================================================
    #   define custom block
    # =============================================================================
    cosh_block = cosh()

    # =============================================================================
    #   compute output of custom block
    # =============================================================================
    y_block = cosh_block(x)
    y_block.compute_gradients()
    # =============================================================================
    #   define expected output
    # =============================================================================
    data_true = np.cosh(data)
    gradient_true = np.diag(np.sinh(data))

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(data_true, y_block.data).all(
    ), 'wrong cosh data pass. expected {}, given{}'.format(
        data_true, y_block.data)

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(gradient_true, y_block.gradient).all(
    ), 'wrong cosh gradient forward pass. expected {}, given{}'.format(
        gradient_true, y_block.gradient)
def test_arctan_forward():
    ad.set_mode('forward')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    # =============================================================================
    #   define custom block
    # =============================================================================
    arctan_block = arctan()

    # =============================================================================
    #   compute output of custom block
    # =============================================================================
    y_block = arctan_block(x)
    y_block.compute_gradients()
    # =============================================================================
    #   define expected output
    # =============================================================================
    data_true = np.arctan(data)
    gradient_true = np.diag(1 / (1 + data**2))

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(data_true, y_block.data).all(
    ), 'wrong arctan data pass. expected {}, given{}'.format(
        data_true, y_block.data)

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(gradient_true, y_block.gradient).all(
    ), 'wrong arctan gradient forward pass. expected {}, given{}'.format(
        gradient_true, y_block.gradient)
예제 #17
0
def test_multiple_forward():
    """
    assert that the package works well when we use it repetively
    """

    ad.set_mode('forward')

    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    # =============================================================================
    #   define custom block
    # =============================================================================
    sin_block = sin()
    exp_block = exp()

    # =============================================================================
    #   compute output of custom block
    # =============================================================================
    y_block = sin_block(exp_block(x))
    y_block.compute_gradients()
    # =============================================================================
    #   define expected output
    # =============================================================================
    data_true = np.sin(np.exp(data))
    gradient_true = np.exp(data) * np.cos(np.exp(data)) * np.identity(5)

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(data_true, y_block.data).all(
    ), 'wrong exp data pass. expected {}, given{}'.format(
        data_true, y_block.data)

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(gradient_true, y_block.gradient).all(
    ), 'wrong exp gradient forward pass. expected {}, given{}'.format(
        gradient_true, y_block.gradient)

    # =============================================================================
    #     assert multiple gradient computes work
    # =============================================================================
    for _ in range(5):
        y_block.compute_gradients()
        assert np.equal(data_true, y_block.data).all(
        ), 'wrong exp data pass. expected {}, given{}'.format(
            data_true, y_block.data)
        assert np.equal(gradient_true, y_block.gradient).all(
        ), 'wrong exp gradient forward pass. expected {}, given{}'.format(
            gradient_true, y_block.gradient)

# =============================================================================
#     assert multiple passes work
# =============================================================================
    for _ in range(5):
        y_block = sin_block(exp_block(x))
        y_block.compute_gradients()
        assert np.equal(data_true, y_block.data).all(
        ), 'wrong exp data pass. expected {}, given{}'.format(
            data_true, y_block.data)
        assert np.equal(gradient_true, y_block.gradient).all(
        ), 'wrong exp gradient forward pass. expected {}, given{}'.format(
            gradient_true, y_block.gradient)


# =============================================================================
#   assert multiple definitions work
# =============================================================================
    for _ in range(5):

        data = np.random.random(5)
        x = Variable(data)

        sin_block = sin()
        exp_block = exp()

        y_block = sin_block(exp_block(x))
        y_block.compute_gradients()

        data_true = np.sin(np.exp(data))
        gradient_true = np.exp(data) * np.cos(np.exp(data)) * np.identity(5)

        assert np.equal(data_true, y_block.data).all(
        ), 'wrong exp data pass. expected {}, given{}'.format(
            data_true, y_block.data)
        assert np.equal(gradient_true, y_block.gradient).all(
        ), 'wrong exp gradient forward pass. expected {}, given{}'.format(
            gradient_true, y_block.gradient)