Ejemplo n.º 1
0
def test_newton_scalar():
    """
    Function testing Newton's Method on finding roots for 1-D or 2-D scalar 
    function using both forward and backward modes
    """
    # test 1-d scalar function
    x = fwd.Variable()
    f = x - fwd.sin(x)
    root_1d = rf.newton_scalar(f, {x: 3.0}, 100, tol=1e-6)
    root_x = root_1d[x]
    assert equals((root_x) - np.sin(root_x), 0.0, tol=1e-6)
    root_1d = rf.newton_scalar(f, {x: 3.0}, 100, tol=1e-6, method='backward')
    root_x = root_1d[x]
    assert equals((root_x) - np.sin(root_x), 0.0, tol=1e-6)

    # test 2-d scalar function
    x, y = fwd.Variable(), fwd.Variable()
    g = x**2 + y**2
    root_2d = rf.newton_scalar(g, {x: 1.0, y: 2.0}, 100, tol=1e-6)
    root_x, root_y = root_2d[x], root_2d[y]
    assert equals(root_x**2 + root_y**2, 0.0, tol=1e-6)
    root_2d = rf.newton_scalar(g, {
        x: 1.0,
        y: 2.0
    },
                               100,
                               tol=1e-6,
                               method='backward')
    root_x, root_y = root_2d[x], root_2d[y]
    assert equals(root_x**2 + root_y**2, 0.0, tol=1e-6)
    # test warning
    x = fwd.Variable()
    f = x - fwd.sin(x)
    root_warning = rf.newton_scalar(f, {x: 3.0}, 0, tol=1e-6)
Ejemplo n.º 2
0
def test_eq():
    """
    Function testing dunder method equal
    """
    x, y = fwd.Variable(), fwd.Variable()
    f = fwd.sin(x) + fwd.cos(y)
    g = fwd.sin(x) + fwd.cos(y)
    h = fwd.sin(y) + fwd.cos(x)
    assert f == g
    assert f != h
Ejemplo n.º 3
0
def test_sin_2ndord_2vars():
    """
    Function testing 2nd order derivative for sin with two-variable input
    """
    x, y = fwd.Variable(), fwd.Variable()
    f = fwd.sin(x / y)
    df_dxdy = lambda x, y: -(y * np.cos(x / y) - x * np.sin(x / y)) / y**3
    assert equals(f.derivative_at((x, x), {
        x: 1.5,
        y: 2.5
    }, order=2), f.derivative_at(x, {
        x: 1.5,
        y: 2.5
    }, order=2))
    assert equals(f.derivative_at((x, y), {
        x: 1.5,
        y: 2.5
    }, order=2), f.derivative_at((y, x), {
        x: 1.5,
        y: 2.5
    }, order=2))
    assert equals(f.derivative_at((x, y), {
        x: 1.5,
        y: 2.5
    }, order=2), df_dxdy(1.5, 2.5))
Ejemplo n.º 4
0
def test_vectorfunction():
    """
    Function testing applying operations to vector of expression
    """
    x, y = fwd.Variable(), fwd.Variable()
    f = fwd.sin(x) + fwd.cos(y)
    g = x**2 - y**2
    vector = fwd.VectorFunction([f, g])
    # test evaluation_at
    evaluation_returned = vector.evaluation_at({x: np.pi / 6, y: np.pi / 6})
    evaluation_expected = np.array([
        np.sin(np.pi / 6) + np.cos(np.pi / 6), (np.pi / 6)**2 - (np.pi / 6)**2
    ])
    for r, e in zip(evaluation_returned, evaluation_expected):
        assert equals(r, e)
    # test gradient_at
    gradient_returned = vector.gradient_at(x, {x: np.pi / 6, y: np.pi / 6})
    gradient_expected = np.array([np.cos(np.pi / 6), np.pi / 3])
    for r, e in zip(gradient_returned, gradient_expected):
        assert equals(r, e)
    #test jacobian_at
    jacobian_returned = vector.jacobian_at({x: np.pi / 6, y: np.pi / 6})
    jacobian_expected = np.array([[np.cos(np.pi / 6), -np.sin(np.pi / 6)],
                                  [np.pi / 3, -np.pi / 3]])
    for i in range(2):
        for j in range(2):
            assert equals(jacobian_returned[i, j], jacobian_expected[i, j])
Ejemplo n.º 5
0
def test_sin_2ndord():
    """
    Function testing 2nd order sin
    """
    # one variable
    x = fwd.Variable()
    f = fwd.sin(x)
    assert equals(f.derivative_at(x, {x: 1.0}, order=2), -np.sin(1.0))
    # two variables
    x, y = fwd.Variable(), fwd.Variable()
    g = fwd.sin(x * y)
    assert equals(g.derivative_at(x, {
        x: 1.0,
        y: 2.0
    }, order=2), -2.0**2 * np.sin(2.0))
    # test error raising
    with pytest.raises(NotImplementedError):
        g.derivative_at(x, {x: 1.0, y: 2.0}, order=3)
Ejemplo n.º 6
0
def test_sin():
    """
    Function testing sin
    """
    a = fwd.Variable()
    b = fwd.Variable()
    f = fwd.sin(a * b)
    assert equals(f.derivative_at(a, {a: 2, b: 2}), np.cos(4) * 2)
    assert equals(f.evaluation_at({a: 1, b: 2}), np.sin(2))
Ejemplo n.º 7
0
def test_gradient():
    """
    Function testing generation of gradient matrix
    """
    x, y = fwd.Variable(), fwd.Variable()
    f = fwd.sin(x) + fwd.cos(y)
    f_gradient_at = lambda x, y: np.array([np.cos(x), -np.sin(y)])
    gradient_expected = f_gradient_at(1.5, 2.5)
    gradient_returned = f.gradient_at({x: 1.5, y: 2.5})
    for i in range(2):
        assert equals(gradient_expected[i], gradient_returned[i])
    gradient_returned = f.gradient_at({x: 1.5, y: 2.5}, returns_dict=True)
    assert equals(gradient_returned[x], gradient_expected[0])
    assert equals(gradient_returned[y], gradient_expected[1])
Ejemplo n.º 8
0
def test_sqrt():
    """
    Function testing sqrt
    """
    x, y = fwd.Variable(), fwd.Variable()
    f = fwd.sqrt(fwd.sin(x) + fwd.cos(y))
    dfdx = lambda x, y: np.cos(x) / (2 * np.sqrt(np.sin(x) + np.cos(y)))
    dfdy = lambda x, y: -np.sin(y) / (2 * np.sqrt(np.sin(x) + np.cos(y)))
    d2fdxdy = lambda x, y: np.cos(x) * np.sin(y) / (4 * (np.sin(x) + np.cos(y))
                                                    **1.5)
    assert equals(f.evaluation_at({
        x: 1.5,
        y: 2.5
    }), np.sqrt(np.sin(1.5) + np.cos(2.5)))
    assert equals(f.derivative_at(x, {x: 1.5, y: 2.5}), dfdx(1.5, 2.5))
    assert equals(f.derivative_at(y, {x: 1.5, y: 2.5}), dfdy(1.5, 2.5))
    assert equals(f.derivative_at((x, y), {x: 1.5, y: 2.5}), d2fdxdy(1.5, 2.5))
Ejemplo n.º 9
0
def test_log():
    """
    Function testing natural log(i.e. ln in math notation)
    """
    x, y = fwd.Variable(), fwd.Variable()
    f = fwd.log(fwd.sin(x) + y**2)
    dfdx = lambda x, y: np.cos(x) / (np.sin(x) + y**2)
    dfdy = lambda x, y: 2 * y / (np.sin(x) + y**2)
    d2fdxdy = lambda x, y: -2 * y * np.cos(x) / (np.sin(x) + y**2)**2
    assert equals(f.evaluation_at({
        x: 1.5,
        y: 2.5
    }), np.log(np.sin(1.5) + 2.5**2))
    assert equals(f.derivative_at(x, {x: 1.5, y: 2.5}), dfdx(1.5, 2.5))
    assert equals(f.derivative_at(y, {x: 1.5, y: 2.5}), dfdy(1.5, 2.5))
    assert equals(f.derivative_at((x, y), {x: 1.5, y: 2.5}), d2fdxdy(1.5, 2.5))
    with pytest.raises(NotImplementedError):
        f.derivative_at(x, {x: 1.0, y: 2.0}, order=3)
Ejemplo n.º 10
0
def test_backward():
    """
    testing backward propagation with 3 examples
    """
    a = fw.Variable()
    b = fw.Variable()
    c = a+b
    d = fw.Variable()
    e = c*d
    f = a+e
    val_dict = {b:1,a:2,d:4}
    bp.back_propagation(f,val_dict)
    var_list = [a,b,c,d,e,f]
    for i in var_list:
        assert(equals(i.bder, f.derivative_at(i,val_dict)))
    
    a = fw.Variable()
    b = fw.Variable()
    e = b-a
    c = fw.cos(e)
    d = a+c
    val_dict = {b:1,a:2}
    bp.back_propagation(d,val_dict)
    var_list = [a,b,c,d]
    for i in var_list:
        assert(equals(i.bder, d.derivative_at(i,val_dict)))
        
    
    a = fw.Variable()
    b = fw.Variable()
    c = fw.csc(a)
    d = fw.sec(a)
    e = fw.tan(c)
    f = fw.cotan(d)
    g = fw.sinh(f-e)
    val_dict = {b:1,a:2}
    bp.back_propagation(g,val_dict)
    var_list = [a,b,c,d,e,f,g]
    for i in var_list:
        assert(equals(i.bder, g.derivative_at(i,val_dict)))


    a = fw.Variable()
    b = fw.Variable()
    c = fw.cotan(a)
    d = fw.sech(b)
    e = fw.tanh(d)
    f = fw.csch(e)
    g = c+f
    g2= g/2
    h = fw.arcsin(g2)
    i = fw.arccos(h)
    j = fw.arctan(i)
    k = fw.cosh(j)
    l = fw.sin(k)
    val_dict = {b:1,a:2}
    bp.back_propagation(l,val_dict)
    var_list = [a,b,c,d,e,f,g,g2,h,i,j,k,l]
    for num in var_list:
        assert(equals(num.bder, l.derivative_at(num,val_dict)))
    
    
    a = fw.Variable()
    b = fw.Variable()
    c = fw.power(a,3)
    d = fw.exp(b)
    e = c+d
    f = fw.coth(e)
    g = fw.log(f)
    h = -g
    val_dict = {b:1,a:2}
    bp.back_propagation(h,val_dict)
    var_list = [a,b,c,d,e,f,g,h]
    for i in var_list:
        assert(equals(i.bder, h.derivative_at(i,val_dict)))
    
    a = fw.Variable()
    b = fw.Variable()
    c = a/b
    val_dict = {b:1,a:2}
    bp.back_propagation(c,val_dict)
    var_list = [a,b,c]
    for i in var_list:
        assert(equals(i.bder, c.derivative_at(i,val_dict)))
    print('Passed')