Esempio n. 1
0
def test_vectorfunction():
    """
    Function testing applying operations to vector of expression
    """
    x, y = fwd.Variable(), fwd.Variable()
    f = fwd.sin(x) + fwd.cos(y)
    g = x**2 - y**2
    vector = fwd.VectorFunction([f, g])
    # test evaluation_at
    evaluation_returned = vector.evaluation_at({x: np.pi / 6, y: np.pi / 6})
    evaluation_expected = np.array([
        np.sin(np.pi / 6) + np.cos(np.pi / 6), (np.pi / 6)**2 - (np.pi / 6)**2
    ])
    for r, e in zip(evaluation_returned, evaluation_expected):
        assert equals(r, e)
    # test gradient_at
    gradient_returned = vector.gradient_at(x, {x: np.pi / 6, y: np.pi / 6})
    gradient_expected = np.array([np.cos(np.pi / 6), np.pi / 3])
    for r, e in zip(gradient_returned, gradient_expected):
        assert equals(r, e)
    #test jacobian_at
    jacobian_returned = vector.jacobian_at({x: np.pi / 6, y: np.pi / 6})
    jacobian_expected = np.array([[np.cos(np.pi / 6), -np.sin(np.pi / 6)],
                                  [np.pi / 3, -np.pi / 3]])
    for i in range(2):
        for j in range(2):
            assert equals(jacobian_returned[i, j], jacobian_expected[i, j])
Esempio n. 2
0
def test_newton_scalar():
    """
    Function testing Newton's Method on finding roots for 1-D or 2-D scalar 
    function using both forward and backward modes
    """
    # test 1-d scalar function
    x = fwd.Variable()
    f = x - fwd.sin(x)
    root_1d = rf.newton_scalar(f, {x: 3.0}, 100, tol=1e-6)
    root_x = root_1d[x]
    assert equals((root_x) - np.sin(root_x), 0.0, tol=1e-6)
    root_1d = rf.newton_scalar(f, {x: 3.0}, 100, tol=1e-6, method='backward')
    root_x = root_1d[x]
    assert equals((root_x) - np.sin(root_x), 0.0, tol=1e-6)

    # test 2-d scalar function
    x, y = fwd.Variable(), fwd.Variable()
    g = x**2 + y**2
    root_2d = rf.newton_scalar(g, {x: 1.0, y: 2.0}, 100, tol=1e-6)
    root_x, root_y = root_2d[x], root_2d[y]
    assert equals(root_x**2 + root_y**2, 0.0, tol=1e-6)
    root_2d = rf.newton_scalar(g, {
        x: 1.0,
        y: 2.0
    },
                               100,
                               tol=1e-6,
                               method='backward')
    root_x, root_y = root_2d[x], root_2d[y]
    assert equals(root_x**2 + root_y**2, 0.0, tol=1e-6)
    # test warning
    x = fwd.Variable()
    f = x - fwd.sin(x)
    root_warning = rf.newton_scalar(f, {x: 3.0}, 0, tol=1e-6)
Esempio n. 3
0
def test_tan_2ndord_2vars():
    """
    Function testing 2nd order derivative for tan with two-variable input
    """
    x, y = fwd.Variable(), fwd.Variable()
    f = fwd.tan(x / y)
    df_dxdy = lambda x, y: -(y / np.cos(x / y)**2 + 2 * x * np.tan(x / y) / np.
                             cos(x / y)**2) / y**3
    assert equals(f.derivative_at((x, x), {
        x: 1.5,
        y: 2.5
    }, order=2), f.derivative_at(x, {
        x: 1.5,
        y: 2.5
    }, order=2))
    assert equals(f.derivative_at((x, y), {
        x: 1.5,
        y: 2.5
    }, order=2), f.derivative_at((y, x), {
        x: 1.5,
        y: 2.5
    }, order=2))
    assert equals(f.derivative_at((x, y), {
        x: 1.5,
        y: 2.5
    }, order=2), df_dxdy(1.5, 2.5))
Esempio n. 4
0
def test_exp_2ndord_2vars():
    """
    Function testing 2nd order derivative for exp with two-variable input
    """
    x, y = fwd.Variable(), fwd.Variable()
    f = fwd.exp(x / y)
    df_dxdy = lambda x, y: -(x * np.exp(x / y) + y * np.exp(x / y)) / y**3
    assert equals(f.derivative_at((x, x), {
        x: 1.5,
        y: 2.5
    }, order=2), f.derivative_at(x, {
        x: 1.5,
        y: 2.5
    }, order=2))
    assert equals(f.derivative_at((x, y), {
        x: 1.5,
        y: 2.5
    }, order=2), f.derivative_at((y, x), {
        x: 1.5,
        y: 2.5
    }, order=2))
    assert equals(f.derivative_at((x, y), {
        x: 1.5,
        y: 2.5
    }, order=2), df_dxdy(1.5, 2.5))
Esempio n. 5
0
def testplot():
    """
    Function testing whether plot.py works
    """
    x, y = fwd.Variable(), fwd.Variable()
    f= 100.0*(y - x**2)**2 + (1 - x)**2.0
    plot_contour(f, {x:-2,y:-1}, x, y, plot_range=[-3,3],method = 'gradient_descent')
    plot_contour(f, {x:-2,y:-1}, x, y, plot_range=[-3,3],method = 'newton')
Esempio n. 6
0
def test_sin():
    """
    Function testing sin
    """
    a = fwd.Variable()
    b = fwd.Variable()
    f = fwd.sin(a * b)
    assert equals(f.derivative_at(a, {a: 2, b: 2}), np.cos(4) * 2)
    assert equals(f.evaluation_at({a: 1, b: 2}), np.sin(2))
Esempio n. 7
0
def test_power():
    """
    Function testing power
    """
    x = fwd.Variable()
    y = fwd.Variable()
    f = x**y
    assert equals(f.evaluation_at({x: 3.0, y: 2.0}), 9.0)
    assert equals(f.derivative_at(x, {x: 3.0, y: 2.0}), 6.0)
Esempio n. 8
0
def test_newton():
    """
    Function testing Newton's Method
    """
    x, y = fwd.Variable(), fwd.Variable()
    rosen = 100.0 * (y - x**2)**2 + (1 - x)**2.0
    newton_returned = opt.newton(rosen, {x: 0.0, y: 1.0})
    assert equals(newton_returned[x], 1.0, tol=1e-6)
    assert equals(newton_returned[y], 1.0, tol=1e-6)
Esempio n. 9
0
def test_bfgs():
    """
    Function testing Broyden–Fletcher–Goldfarb–Shanno (BFGS) algorithm 
    """
    x, y = fwd.Variable(), fwd.Variable()
    rosen = 100.0 * (y - x**2)**2 + (1 - x)**2.0
    bfgs_returned = opt.bfgs(rosen, {x: 0.0, y: 1.0})
    assert equals(bfgs_returned[x], 1.0, tol=1e-6)
    assert equals(bfgs_returned[y], 1.0, tol=1e-6)
Esempio n. 10
0
def test_exp():
    """
    Function testing exponent
    """
    x = fwd.Variable()
    y = fwd.Variable()
    g = x + fwd.exp(y - 1)
    assert equals(g.evaluation_at({x: 1.0, y: 2.0}), 1.0 + np.exp(1.0))
    assert equals(g.derivative_at(x, {x: 1.0, y: 2.0}), 1.0)
    assert equals(g.derivative_at(y, {x: 1.0, y: 2.0}), np.exp(1.0))
Esempio n. 11
0
def test_multiply():
    """
    Function testing multiplication
    """
    x = fwd.Variable()
    y = fwd.Variable()
    f = x * y
    assert equals(f.evaluation_at({x: 3.0, y: 2.0}), 6.0)
    assert equals(f.derivative_at(x, {x: 3.0, y: 2.0}), 2.0)
    assert equals(f.derivative_at(y, {x: 3.0, y: 2.0}), 3.0)
Esempio n. 12
0
def test_tan():
    """
    Function testing tan
    """
    a = fwd.Variable()
    b = fwd.Variable()
    c = a * b
    f = fwd.tan(c * b)
    assert equals(f.evaluation_at({a: 1, b: 2}), np.tan(4))
    assert equals(f.derivative_at(c, {a: 1, b: 2}), 2 * (1 / np.cos(4))**2)
Esempio n. 13
0
def test_eq():
    """
    Function testing dunder method equal
    """
    x, y = fwd.Variable(), fwd.Variable()
    f = fwd.sin(x) + fwd.cos(y)
    g = fwd.sin(x) + fwd.cos(y)
    h = fwd.sin(y) + fwd.cos(x)
    assert f == g
    assert f != h
Esempio n. 14
0
def test_arctan():
    """
    Function testing arctan
    """
    a = fwd.Variable()
    b = fwd.Variable()
    c = a * b
    f = fwd.arctan(c * b)
    assert equals(f.evaluation_at({a: 2, b: 3}), np.arctan(18))
    assert equals(f.derivative_at(c, {a: 2, b: 3}), (1 / (18**2 + 1)) * 3)
Esempio n. 15
0
def test_tanh():
    """
    Function testing tanh
    """
    a = fwd.Variable()
    b = fwd.Variable()
    c = a * b
    f = fwd.tanh(c)
    assert equals(f.evaluation_at({a: 3, b: 2}), np.tanh(6))
    assert equals(f.derivative_at(c, {a: 3, b: 2}), 1 - np.tanh(6)**2)
Esempio n. 16
0
def test_cosh():
    """
    Function testing cosh
    """
    a = fwd.Variable()
    b = fwd.Variable()
    c = a * b
    f = fwd.cosh(c * b)
    assert equals(f.evaluation_at({a: 3, b: 2}), np.cosh(12))
    assert equals(f.derivative_at(c, {a: 3, b: 2}), np.sinh(12) * 2)
Esempio n. 17
0
def test_divide():
    """
    Function testing division
    """
    x = fwd.Variable()
    y = fwd.Variable()
    f = x / y
    assert equals(f.evaluation_at({x: 3.0, y: 2.0}), 1.5)
    assert equals(f.derivative_at(x, {x: 3.0, y: 2.0}), 1 / 2.0)
    assert equals(f.derivative_at(y, {x: 3.0, y: 2.0}), -0.75)
Esempio n. 18
0
def test_cotan():
    """
    Function testing cotan
    """
    a = fwd.Variable()
    b = fwd.Variable()
    c = a * b
    f = fwd.cotan(c * b)
    assert equals(f.evaluation_at({a: 1, b: 2}), 1 / np.tan(4))
    assert equals(f.derivative_at(c, {a: 1, b: 2}), -(1 / (np.sin(4)**2)) * 2)
Esempio n. 19
0
def test_adding_three_variables():
    """
    Function testing addition with variable class
    """
    a = fwd.Variable()
    b = fwd.Variable()
    c = fwd.Variable()
    f = fwd.exp(a - b + c)
    assert equals(f.evaluation_at({a: 1.0, b: 2.0, c: 3.0}), np.exp(2.0))
    assert equals(f.derivative_at(b, {a: 1.0, b: 2.0, c: 3.0}), -np.exp(2.0))
    assert equals(f.derivative_at(a, {a: 1.0, b: 2.0, c: 3.0}), np.exp(2.0))
Esempio n. 20
0
def test_arccos():
    """
    Function testing arccos
    """
    a = fwd.Variable()
    b = fwd.Variable()
    c = a * b
    f = fwd.arccos(c * b)
    assert equals(f.evaluation_at({a: 0.2, b: 0.5}), np.arccos(0.05))
    assert equals(f.derivative_at(c,{a:0.2,b:0.5}), \
                  (-1/np.sqrt(1-(0.2*0.5*0.5)**2))*0.5)
Esempio n. 21
0
def test_sech():
    """
    Function testing sech
    """
    a = fwd.Variable()
    b = fwd.Variable()
    c = a * b
    f = fwd.sech(c * b)
    # - tanh x sech x
    assert equals(f.evaluation_at({a: 2, b: 1}), 1 / np.cosh(2))
    assert equals(f.derivative_at(c,{a:2,b:1}), \
                  -(np.sinh(2)/np.cosh(2))*(1/np.cosh(2))*1)
Esempio n. 22
0
def test_cos():
    """
    Function testing cos
    """
    a = fwd.Variable()
    b = fwd.Variable()
    c = a + b
    f1 = fwd.cos(a + c)
    f2 = fwd.cos(a * b)
    assert equals(f1.evaluation_at({a: 1.0, b: 2.0}), np.cos(4))
    assert equals(f2.evaluation_at({a: 1.0, b: 2}), np.cos(2))
    assert equals(f1.derivative_at(a, {a: 1.0, b: 2.0}), -np.sin(1 + 3) * 2)
    assert equals(f2.derivative_at(a, {a: 2, b: 2}), -np.sin(2 * 2) * 2)
Esempio n. 23
0
def test_gradient_descent():
    """
    Function testing Gradient Descent
    """
    x, y = fwd.Variable(), fwd.Variable()
    f = x**2 - 2 * x + 1 + y**2
    gradient_descent_returned = opt.gradient_descent(f, {
        x: -1.0,
        y: 2.0
    },
                                                     learning_rate=0.1)
    assert equals(gradient_descent_returned[x], 1.0, tol=1e-3)
    assert equals(gradient_descent_returned[y], 0.0, tol=1e-3)
Esempio n. 24
0
def test_csc():
    """
    Function testing csc
    """
    # -csc x cot x
    a = fwd.Variable()
    b = fwd.Variable()
    c = a * b
    f = fwd.csc(c * b)
    assert equals(f.evaluation_at({a: 1, b: 2}), 1 / np.sin(4))
    assert equals(f.derivative_at(c, {
        a: 1,
        b: 2
    }), -(1 / np.tan(4)) * (1 / np.sin(4)) * 2)
Esempio n. 25
0
def test_notimplemented():
    """
    Function testing raising not implemented error for higher order
    """
    x = fwd.Variable()
    y = fwd.Variable()

    with pytest.raises(NotImplementedError):
        f = x * y
        f.derivative_at(x, {x: 0.5, y: 0.5}, order=3)
    with pytest.raises(NotImplementedError):
        f = x / y
        f.derivative_at(x, {x: 0.5, y: 0.5}, order=3)

    with pytest.raises(NotImplementedError):
        f = fwd.cotan(x)
        f.derivative_at(x, {x: 0.5}, order=2)
    with pytest.raises(NotImplementedError):
        f = fwd.sec(x)
        f.derivative_at(x, {x: 0.5}, order=2)
    with pytest.raises(NotImplementedError):
        f = fwd.csc(x)
        f.derivative_at(x, {x: 0.5}, order=2)
    with pytest.raises(NotImplementedError):
        f = fwd.sinh(x)
        f.derivative_at(x, {x: 0.5}, order=2)
    with pytest.raises(NotImplementedError):
        f = fwd.cosh(x)
        f.derivative_at(x, {x: 0.5}, order=2)
    with pytest.raises(NotImplementedError):
        f = fwd.tanh(x)
        f.derivative_at(x, {x: 0.5}, order=2)
    with pytest.raises(NotImplementedError):
        f = fwd.csch(x)
        f.derivative_at(x, {x: 0.5}, order=2)
    with pytest.raises(NotImplementedError):
        f = fwd.sech(x)
        f.derivative_at(x, {x: 0.5}, order=2)
    with pytest.raises(NotImplementedError):
        f = fwd.coth(x)
        f.derivative_at(x, {x: 0.5}, order=2)
    with pytest.raises(NotImplementedError):
        f = fwd.arcsin(x)
        f.derivative_at(x, {x: 0.5}, order=2)
    with pytest.raises(NotImplementedError):
        f = fwd.arccos(x)
        f.derivative_at(x, {x: 0.5}, order=2)
    with pytest.raises(NotImplementedError):
        f = fwd.arctan(x)
        f.derivative_at(x, {x: 0.5}, order=2)
Esempio n. 26
0
def test_gradient():
    """
    Function testing generation of gradient matrix
    """
    x, y = fwd.Variable(), fwd.Variable()
    f = fwd.sin(x) + fwd.cos(y)
    f_gradient_at = lambda x, y: np.array([np.cos(x), -np.sin(y)])
    gradient_expected = f_gradient_at(1.5, 2.5)
    gradient_returned = f.gradient_at({x: 1.5, y: 2.5})
    for i in range(2):
        assert equals(gradient_expected[i], gradient_returned[i])
    gradient_returned = f.gradient_at({x: 1.5, y: 2.5}, returns_dict=True)
    assert equals(gradient_returned[x], gradient_expected[0])
    assert equals(gradient_returned[y], gradient_expected[1])
Esempio n. 27
0
def test_pow_2ndord():
    """
    Function testing 2nd order power
    """
    # one variable
    x = fwd.Variable()
    f = (x + 1)**3
    assert equals(f.derivative_at(x, {x: 2.0}, order=2), 18.0)
    # two variables
    x, y = fwd.Variable(), fwd.Variable()
    g = (x + y)**3
    assert equals(g.derivative_at(x, {x: 2.0, y: 1.0}, order=2), 18.0)
    # test error raising
    with pytest.raises(NotImplementedError):
        g.derivative_at(x, {x: 1.0, y: 2.0}, order=3)
Esempio n. 28
0
def test_hessian():
    """
    Function testing generation of hessian matrix
    """
    x, y = fwd.Variable(), fwd.Variable()
    rosen = 100.0 * (y - x**2)**2 + (1 - x)**2.0
    rosen_hessian = lambda x, y: \
        np.array([[1200*x**2-400*x+2, -400*x],
                  [-400*x,             200]])
    rosen_hessian_returned = rosen.hessian_at({x: 1.0, y: 1.0})
    rosen_hessian_expected = rosen_hessian(1.0, 1.0)
    for i in range(2):
        for j in range(2):
            assert equals(rosen_hessian_returned[i, j],
                          rosen_hessian_expected[i, j])
Esempio n. 29
0
def test_divide_constant():
    """
    Function testing division with constant
    """
    x = fwd.Variable()
    assert equals((x / 2.0).derivative_at(x, {x: 3.0}), 0.5)
    assert equals((2.0 / x).derivative_at(x, {x: 3.0}), -2 / 9.0)
Esempio n. 30
0
def test_multiply_constant():
    """
    Function testing multiplication with constant
    """
    x = fwd.Variable()
    assert equals((2.0 * x).derivative_at(x, {x: 3.0}), 2.0)
    assert equals((x * 2.0).derivative_at(x, {x: 3.0}), 2.0)