def test_eq(): """ Function testing dunder method equal """ x, y = fwd.Variable(), fwd.Variable() f = fwd.sin(x) + fwd.cos(y) g = fwd.sin(x) + fwd.cos(y) h = fwd.sin(y) + fwd.cos(x) assert f == g assert f != h
def test_cos(): """ Function testing cos """ a = fwd.Variable() b = fwd.Variable() c = a + b f1 = fwd.cos(a + c) f2 = fwd.cos(a * b) assert equals(f1.evaluation_at({a: 1.0, b: 2.0}), np.cos(4)) assert equals(f2.evaluation_at({a: 1.0, b: 2}), np.cos(2)) assert equals(f1.derivative_at(a, {a: 1.0, b: 2.0}), -np.sin(1 + 3) * 2) assert equals(f2.derivative_at(a, {a: 2, b: 2}), -np.sin(2 * 2) * 2)
def test_cos_2ndord_2vars(): """ Function testing 2nd order derivative for cos with two-variable input """ x, y = fwd.Variable(), fwd.Variable() f = fwd.cos(x / y) df_dxdy = lambda x, y: (y * np.sin(x / y) + x * np.cos(x / y)) / y**3 assert equals(f.derivative_at((x, x), { x: 1.5, y: 2.5 }, order=2), f.derivative_at(x, { x: 1.5, y: 2.5 }, order=2)) assert equals(f.derivative_at((x, y), { x: 1.5, y: 2.5 }, order=2), f.derivative_at((y, x), { x: 1.5, y: 2.5 }, order=2)) assert equals(f.derivative_at((x, y), { x: 1.5, y: 2.5 }, order=2), df_dxdy(1.5, 2.5))
def test_vectorfunction(): """ Function testing applying operations to vector of expression """ x, y = fwd.Variable(), fwd.Variable() f = fwd.sin(x) + fwd.cos(y) g = x**2 - y**2 vector = fwd.VectorFunction([f, g]) # test evaluation_at evaluation_returned = vector.evaluation_at({x: np.pi / 6, y: np.pi / 6}) evaluation_expected = np.array([ np.sin(np.pi / 6) + np.cos(np.pi / 6), (np.pi / 6)**2 - (np.pi / 6)**2 ]) for r, e in zip(evaluation_returned, evaluation_expected): assert equals(r, e) # test gradient_at gradient_returned = vector.gradient_at(x, {x: np.pi / 6, y: np.pi / 6}) gradient_expected = np.array([np.cos(np.pi / 6), np.pi / 3]) for r, e in zip(gradient_returned, gradient_expected): assert equals(r, e) #test jacobian_at jacobian_returned = vector.jacobian_at({x: np.pi / 6, y: np.pi / 6}) jacobian_expected = np.array([[np.cos(np.pi / 6), -np.sin(np.pi / 6)], [np.pi / 3, -np.pi / 3]]) for i in range(2): for j in range(2): assert equals(jacobian_returned[i, j], jacobian_expected[i, j])
def test_cos_2ndord(): """ Function testing 2nd order cos """ # one variable x = fwd.Variable() f = fwd.cos(x) assert equals(f.derivative_at(x, {x: 1.0}, order=2), -np.cos(1.0)) # two variables x, y = fwd.Variable(), fwd.Variable() g = fwd.cos(x * y) assert equals(g.derivative_at(x, { x: 1.0, y: 2.0 }, order=2), -2.0**2 * np.cos(2.0)) # test error raising with pytest.raises(NotImplementedError): g.derivative_at(x, {x: 1.0, y: 2.0}, order=3)
def test_gradient(): """ Function testing generation of gradient matrix """ x, y = fwd.Variable(), fwd.Variable() f = fwd.sin(x) + fwd.cos(y) f_gradient_at = lambda x, y: np.array([np.cos(x), -np.sin(y)]) gradient_expected = f_gradient_at(1.5, 2.5) gradient_returned = f.gradient_at({x: 1.5, y: 2.5}) for i in range(2): assert equals(gradient_expected[i], gradient_returned[i]) gradient_returned = f.gradient_at({x: 1.5, y: 2.5}, returns_dict=True) assert equals(gradient_returned[x], gradient_expected[0]) assert equals(gradient_returned[y], gradient_expected[1])
def test_sqrt(): """ Function testing sqrt """ x, y = fwd.Variable(), fwd.Variable() f = fwd.sqrt(fwd.sin(x) + fwd.cos(y)) dfdx = lambda x, y: np.cos(x) / (2 * np.sqrt(np.sin(x) + np.cos(y))) dfdy = lambda x, y: -np.sin(y) / (2 * np.sqrt(np.sin(x) + np.cos(y))) d2fdxdy = lambda x, y: np.cos(x) * np.sin(y) / (4 * (np.sin(x) + np.cos(y)) **1.5) assert equals(f.evaluation_at({ x: 1.5, y: 2.5 }), np.sqrt(np.sin(1.5) + np.cos(2.5))) assert equals(f.derivative_at(x, {x: 1.5, y: 2.5}), dfdx(1.5, 2.5)) assert equals(f.derivative_at(y, {x: 1.5, y: 2.5}), dfdy(1.5, 2.5)) assert equals(f.derivative_at((x, y), {x: 1.5, y: 2.5}), d2fdxdy(1.5, 2.5))
def test_backward(): """ testing backward propagation with 3 examples """ a = fw.Variable() b = fw.Variable() c = a+b d = fw.Variable() e = c*d f = a+e val_dict = {b:1,a:2,d:4} bp.back_propagation(f,val_dict) var_list = [a,b,c,d,e,f] for i in var_list: assert(equals(i.bder, f.derivative_at(i,val_dict))) a = fw.Variable() b = fw.Variable() e = b-a c = fw.cos(e) d = a+c val_dict = {b:1,a:2} bp.back_propagation(d,val_dict) var_list = [a,b,c,d] for i in var_list: assert(equals(i.bder, d.derivative_at(i,val_dict))) a = fw.Variable() b = fw.Variable() c = fw.csc(a) d = fw.sec(a) e = fw.tan(c) f = fw.cotan(d) g = fw.sinh(f-e) val_dict = {b:1,a:2} bp.back_propagation(g,val_dict) var_list = [a,b,c,d,e,f,g] for i in var_list: assert(equals(i.bder, g.derivative_at(i,val_dict))) a = fw.Variable() b = fw.Variable() c = fw.cotan(a) d = fw.sech(b) e = fw.tanh(d) f = fw.csch(e) g = c+f g2= g/2 h = fw.arcsin(g2) i = fw.arccos(h) j = fw.arctan(i) k = fw.cosh(j) l = fw.sin(k) val_dict = {b:1,a:2} bp.back_propagation(l,val_dict) var_list = [a,b,c,d,e,f,g,g2,h,i,j,k,l] for num in var_list: assert(equals(num.bder, l.derivative_at(num,val_dict))) a = fw.Variable() b = fw.Variable() c = fw.power(a,3) d = fw.exp(b) e = c+d f = fw.coth(e) g = fw.log(f) h = -g val_dict = {b:1,a:2} bp.back_propagation(h,val_dict) var_list = [a,b,c,d,e,f,g,h] for i in var_list: assert(equals(i.bder, h.derivative_at(i,val_dict))) a = fw.Variable() b = fw.Variable() c = a/b val_dict = {b:1,a:2} bp.back_propagation(c,val_dict) var_list = [a,b,c] for i in var_list: assert(equals(i.bder, c.derivative_at(i,val_dict))) print('Passed')