def test_compile_gradient(self): def f(x): return x ** 2 s = Symbolic(f) x = np.random.random((3, 4)) o = s.trace(x)[1] self.assertRaises(TypeError, s.compile_function_gradient, x, o) F = s.compile_gradient(x, o.sum()) self.assertTrue(np.allclose(F(x), 2 * x))
def test_compile_gradient(self): def f(x): return x ** 2 s = Symbolic(f) x = np.random.random((3, 4)) o = s.trace(x)[1] self.assertRaises(TypeError, s.compile_function_gradient, x, o) F = s.compile_gradient(x, o.sum()) self.assertTrue(np.allclose(F(x), 2 * x))
def test_symbolic_readme(self): """ the README example""" # -- a vanilla function def f1(x): return x + 2 # -- a function referencing a global variable y = np.random.random(10) def f2(x): return x * y # -- a function with a local variable def f3(x): z = tag(np.ones(10), 'local_var') return (x + z) ** 2 # -- create a general symbolic tracer and apply # it to the three functions x = np.random.random(10) tracer = Symbolic() out1 = tracer.trace(f1, x) out2 = tracer.trace(f2, out1) out3 = tracer.trace(f3, out2) # -- compile a function representing f(x, y, z) = out3 new_fn = tracer.compile_function(inputs=[x, y, 'local_var'], outputs=out3) # -- compile the gradient of f(x) = out3, with respect to y fn_grad = tracer.compile_gradient(inputs=x, outputs=out3, wrt=y, reduction=theano.tensor.sum) assert fn_grad # to stop flake error self.assertTrue(np.allclose(new_fn(x, y, np.ones(10)), f3(f2(f1(x)))))
def test_symbolic(self): def f1(x): return x + 1.0 def f2(x): return x * 2.0 def f3(x): return x ** 2 s = Symbolic() x = np.random.random((3, 4)) o1 = s.trace(f1, x) o2 = s.trace(f2, o1) o3 = s.trace(f3, o2) # test function f = s.compile_function(x, o3) self.assertTrue(np.allclose(f(x), f3(f2(f1(x))))) # test gradient o4 = s.trace(lambda x: x.sum(), o3) g = s.compile_gradient(x, o4, wrt=x) self.assertTrue(np.allclose(g(x), 8 * (x+1)))