def testGradientFunc(self): def XSquarePlusOne(x): return x * x + 1.0 def XSquarePlusOneGrad(x, dy): dx = functional_ops._symbolic_gradient(input=[x, dy], Tout=[tf.float32], f="XSquarePlusOne", name="dx") return dx g = tf.Graph() with g.as_default(): f = function.define_function(XSquarePlusOne, {"x": tf.float32}) g = function.define_function(XSquarePlusOneGrad, { "x": tf.float32, "dy": tf.float32 }) epsilon = tf.constant([0.1]) two = tf.constant([2.0]) call_f = function.call_function(f, two) call_g = function.call_function(g, two, epsilon) with tf.Session() as sess: self.assertAllClose([5.0], sess.run(call_f)) self.assertAllClose([0.4], sess.run(call_g))
def testStrippedOpListNestedFunctions(self): with self.test_session(): # Square two levels deep def f0(x): return tf.square(x) f0 = function.define_function(f0, {"x": tf.int32}) def f1(x): return function.call_function(f0, x) f1 = function.define_function(f1, {"x": tf.int32}) # At this point we've defined two functions but haven't called them, so # there should be no used ops. op_list = tf.contrib.util.stripped_op_list_for_graph( tf.get_default_graph().as_graph_def()) self.assertEquals(len(op_list.op), 0) # If we call the function on a constant, there should be two ops function.call_function(f1, tf.constant(7)) op_list = tf.contrib.util.stripped_op_list_for_graph( tf.get_default_graph().as_graph_def()) self.assertEquals(["Const", "Square"], [op.name for op in op_list.op])
def testGradientFunc(self): def XSquarePlusOne(x): return x * x + 1.0 def XSquarePlusOneGrad(x, dy): dx = functional_ops._symbolic_gradient(input=[x, dy], Tout=[tf.float32], f="XSquarePlusOne", name="dx") return dx g = tf.Graph() with g.as_default(): f = function.define_function(XSquarePlusOne, {"x": tf.float32}) g = function.define_function(XSquarePlusOneGrad, {"x": tf.float32, "dy": tf.float32}) epsilon = tf.constant([0.1]) two = tf.constant([2.0]) call_f = function.call_function(f, two) call_g = function.call_function(g, two, epsilon) with tf.Session() as sess: self.assertAllClose([5.0], sess.run(call_f)) self.assertAllClose([0.4], sess.run(call_g))
def testDefineFunctionNames(self): def Foo(a): return a + 1 with tf.Graph().as_default(): f_def = function.define_function(Foo, {"a": tf.float32}) one = tf.constant([1.0]) call1 = function.call_function(f_def, one) self.assertEquals("Foo", call1.op.name) call2 = function.call_function(f_def, one) self.assertEquals("Foo_1", call2.op.name) call3 = function.call_function(f_def, one, name="mine") self.assertEquals("mine", call3.op.name) with tf.name_scope("my"): call4 = function.call_function(f_def, one, name="precious") self.assertEquals("my/precious", call4.op.name)
def testDefineFunctionNoArgs(self): def AConstant(): return tf.constant([42]) with tf.Graph().as_default(): f_def = function.define_function(AConstant, {}) call = function.call_function(f_def) self.assertEquals("AConstant", call.op.name) with tf.Session() as sess: self.assertAllEqual([42], sess.run(call))
def testDefineFunction2Args(self): def APlus2B(a, b): return a + b * 2 with tf.Graph().as_default(): f_def = function.define_function(APlus2B, {"a": tf.float32, "b": tf.float32}) one = tf.constant([1.0]) two = tf.constant([2.0]) call = function.call_function(f_def, one, two) self.assertEquals("APlus2B", call.op.name) with tf.Session() as sess: self.assertAllEqual([5.0], sess.run(call))
def testCallErrors(self): def Const(): return tf.constant(1) def PlusOne(a): return a + 1 def PlusMinus(a, b): return a + b, b - a with tf.Graph().as_default(): one = tf.constant([1]) two = tf.constant([2]) const = function.define_function(Const, {}) plus_one = function.define_function(PlusOne, {"a": tf.int32}) plus_minus = function.define_function(PlusMinus, { "a": tf.int32, "b": tf.int32 }) function.call_function(const) with self.assertRaisesRegexp(ValueError, "arguments: 0"): function.call_function(const, one) with self.assertRaisesRegexp(ValueError, "arguments: 0"): function.call_function(const, one, two) with self.assertRaisesRegexp(ValueError, "arguments: 1"): function.call_function(plus_one) function.call_function(plus_one, one) with self.assertRaisesRegexp(ValueError, "arguments: 1"): function.call_function(plus_one, one, two) with self.assertRaisesRegexp(ValueError, "arguments: 2"): function.call_function(plus_minus) with self.assertRaisesRegexp(ValueError, "arguments: 2"): function.call_function(plus_minus, one) function.call_function(plus_minus, one, two) function.call_function(plus_one, one, name="p1") with self.assertRaisesRegexp(ValueError, "Unknown keyword arguments"): function.call_function(plus_one, one, device="/gpu:0")
def testCallErrors(self): def Const(): return tf.constant(1) def PlusOne(a): return a + 1 def PlusMinus(a, b): return a + b, b - a with tf.Graph().as_default(): one = tf.constant([1]) two = tf.constant([2]) const = function.define_function(Const, {}) plus_one = function.define_function(PlusOne, {"a": tf.int32}) plus_minus = function.define_function(PlusMinus, {"a": tf.int32, "b": tf.int32}) function.call_function(const) with self.assertRaisesRegexp(ValueError, "arguments: 0"): function.call_function(const, one) with self.assertRaisesRegexp(ValueError, "arguments: 0"): function.call_function(const, one, two) with self.assertRaisesRegexp(ValueError, "arguments: 1"): function.call_function(plus_one) function.call_function(plus_one, one) with self.assertRaisesRegexp(ValueError, "arguments: 1"): function.call_function(plus_one, one, two) with self.assertRaisesRegexp(ValueError, "arguments: 2"): function.call_function(plus_minus) with self.assertRaisesRegexp(ValueError, "arguments: 2"): function.call_function(plus_minus, one) function.call_function(plus_minus, one, two) function.call_function(plus_one, one, name="p1") with self.assertRaisesRegexp(ValueError, "Unknown keyword arguments"): function.call_function(plus_one, one, device="/gpu:0")
def f1(x): return function.call_function(f0, x)
import tensorflow as tf from tensorflow.python.framework import function from tensorflow.python.ops import functional_ops graph = tf.Graph() with graph.as_default(): tt = tf.constant([4.2]) def XSquarePlusOne(x): ph = tf.placeholder("float", shape=[1]) return x * x + 1.0 def XSquarePlusOneGrad(x, dy): dx = functional_ops._symbolic_gradient(input=[x, dy], Tout=[tf.float32], f="XSquarePlusOne", name="dx") return dx f = function.define_function(XSquarePlusOne, {"x": tf.float32}) g = function.define_function(XSquarePlusOneGrad, {"x": tf.float32, "dy": tf.float32}) epsilon = tf.constant([1.0]) two = tf.constant([2.0]) call_f = function.call_function(f, two) call_g = function.call_function(g, two, epsilon) tf.train.write_graph(graph.as_graph_def(), '/tmp/tfb', 'simple.pbtxt', as_text=True) with tf.Session() as sess: print sess.run(call_f) print sess.run(call_g)