def testCompilationSeparateGradientScopeNames(self): with self.test_session(graph=ops.Graph()): with jit.experimental_jit_scope(True, separate_compiled_gradients=True): # XlaScope 0 a1 = constant_op.constant([[1]]) a1t = math_ops.matmul(a1, a1) with jit.experimental_jit_scope(True, separate_compiled_gradients=True): # XlaScope 1 a2 = constant_op.constant([[1]]) a2t = math_ops.matmul(a2, a2) self.assertEqual(b"jit_scope_0", a1.op.get_attr("_XlaScope")) self.assertEqual(b"jit_scope_1", a2.op.get_attr("_XlaScope")) grad_a1 = gradients.gradients(a1t, a1, name="GA")[0] grad_a2 = gradients.gradients(a2t, a2, name="GB")[0] grad_a1 = grad_a1.op.inputs[0] grad_a2 = grad_a2.op.inputs[0] self.assertTrue(grad_a1.op.get_attr("_XlaCompile")) self.assertTrue(grad_a2.op.get_attr("_XlaCompile")) self.assertEqual(b"jit_scope_0_grad_GA", grad_a1.op.get_attr("_XlaScope")) self.assertEqual(b"jit_scope_1_grad_GB", grad_a2.op.get_attr("_XlaScope"))
def testJITXlaScope(self): with self.test_session(graph=ops.Graph()): with jit.experimental_jit_scope(True): # XlaScope 0 a1 = constant_op.constant(1) with jit.experimental_jit_scope(True): # XlaScope 1 a2 = constant_op.constant(1) with jit.experimental_jit_scope(True): # XlaScope still 1, depth 1 a3 = constant_op.constant(1) with jit.experimental_jit_scope(True): # XlaScope still 1, depth 2 a4 = constant_op.constant(1) # XlaScope still 1, depth 1 a5 = constant_op.constant(1) with jit.experimental_jit_scope(True): # XlaScope now 2, depth 0 a6 = constant_op.constant(1) self.assertEqual(b"jit_scope_0", a1.op.get_attr("_XlaScope")) self.assertEqual(b"jit_scope_1", a2.op.get_attr("_XlaScope")) self.assertEqual(b"jit_scope_1", a3.op.get_attr("_XlaScope")) self.assertEqual(b"jit_scope_1", a4.op.get_attr("_XlaScope")) self.assertEqual(b"jit_scope_1", a5.op.get_attr("_XlaScope")) self.assertEqual(b"jit_scope_2", a6.op.get_attr("_XlaScope"))
def compute(self, use_jit, compute_fn): random_seed.set_random_seed(1234) with self.test_session(graph=ops.Graph()) as sess: with jit.experimental_jit_scope(use_jit): r = compute_fn() sess.run(variables.global_variables_initializer()) return (r, sess.run(r))
def testPlaysNicelyWithDefunSeparateGradientScope(self): with self.test_session(graph=ops.Graph()) as sess: with jit.experimental_jit_scope(True): # This should be ignored @function.Defun( compiled=True, noinline=True, separate_compiled_gradients=True) def mulop(x1, x2): return x1 * x2 x = constant_op.constant(1.0) r = mulop(x, x) g_r = gradients.gradients(r, x, name="GA")[0] # Ensure the forward function is compiled. graph_def = r.graph.as_graph_def() func_attrs = graph_def.library.function[0].attr self.assertTrue(func_attrs["_XlaCompile"].b) self.assertEqual(b"function_mulop", func_attrs["_XlaScope"].s) # Ensure the gradient (SymbolicGradient) is compiled, with a different # _XlaScope from the function itself. grad_op = g_r.op.inputs[0].op self.assertTrue(grad_op.get_attr("_XlaCompile")) self.assertEqual(b"function_mulop_grad_GA", grad_op.get_attr("_XlaScope")) # Ensure the ops run: grad(x1*x1) = 2*x1 self.assertAllClose([1.0, 1.0, 2.0], sess.run([x, r, g_r]))
def testPlaysNicelyWithDefunSeparateGradientScope(self): with self.test_session(graph=ops.Graph()) as sess: with jit.experimental_jit_scope(True): @function.Defun( compiled=True, noinline=True, separate_compiled_gradients=True) def mulop(x1, x2): return x1 * x2 x = constant_op.constant(1.0) r = mulop(x, x) g_r = gradients.gradients(r, x, name="GA")[0] # Ensure the forward function is compiled. graph_def = r.graph.as_graph_def() func_attrs = graph_def.library.function[0].attr self.assertTrue(func_attrs["_XlaCompile"].b) self.assertEqual(b"jit_scope_0", func_attrs["_XlaScope"].s) # Ensure the gradient (SymbolicGradient) is compiled, with a different # _XlaScope from the function itself. grad_op = g_r.op.inputs[0].op self.assertTrue(grad_op.get_attr("_XlaCompile")) self.assertEqual(b"jit_scope_0_grad_GA", grad_op.get_attr("_XlaScope")) # Ensure the ops run: grad(x1*x1) = 2*x1 self.assertAllClose([1.0, 1.0, 2.0], sess.run([x, r, g_r]))
def testCompilationGradientScopeNames(self): with self.test_session(graph=ops.Graph()): with jit.experimental_jit_scope(): # XlaScope 0 a1 = constant_op.constant(1) a1t = a1 + a1 with jit.experimental_jit_scope(): # XlaScope 1 a2 = constant_op.constant(1) a2t = a2 + a2 self.assertEqual(b"jit_scope_0", a1.op.get_attr("_XlaScope")) self.assertEqual(b"jit_scope_1", a2.op.get_attr("_XlaScope")) grad_a1 = gradients.gradients(a1t, a1, name="GA")[0] grad_a2 = gradients.gradients(a2t, a2, name="GB")[0] grad_a1 = grad_a1.op.inputs[0] grad_a2 = grad_a2.op.inputs[0] self.assertTrue(grad_a1.op.get_attr("_XlaCompile")) self.assertTrue(grad_a2.op.get_attr("_XlaCompile")) self.assertEqual(b"jit_scope_0", grad_a1.op.get_attr("_XlaScope")) self.assertEqual(b"jit_scope_1", grad_a2.op.get_attr("_XlaScope"))
def testCompilationSeparateGradientScopeNames(self): with self.test_session(graph=ops.Graph()): with jit.experimental_jit_scope(True, separate_compiled_gradients=True): # XlaScope 0 a1 = constant_op.constant([[1.]]) a1t = math_ops.matmul(a1, a1) with jit.experimental_jit_scope(True, separate_compiled_gradients=True): # XlaScope 1 a2 = constant_op.constant([[1.]]) a2t = math_ops.matmul(a2, a2) self.assertEqual(b"jit_scope_0", a1.op.get_attr("_XlaScope")) self.assertEqual(b"jit_scope_1", a2.op.get_attr("_XlaScope")) grad_a1 = gradients.gradients(a1t, a1, name="GA")[0] grad_a2 = gradients.gradients(a2t, a2, name="GB")[0] grad_a1 = grad_a1.op.inputs[0] grad_a2 = grad_a2.op.inputs[0] self.assertTrue(grad_a1.op.get_attr("_XlaCompile")) self.assertTrue(grad_a2.op.get_attr("_XlaCompile")) self.assertEqual(b"jit_scope_0_grad_GA", grad_a1.op.get_attr("_XlaScope")) self.assertEqual(b"jit_scope_1_grad_GB", grad_a2.op.get_attr("_XlaScope"))
def testDefunInheritsJitScope(self): with self.test_session(graph=ops.Graph()): with jit.experimental_jit_scope(True): @function.Defun(compiled=True, noinline=True) def mulop(x1, x2): return x1 * x2 x = constant_op.constant(1.0) r = mulop(x, x) # Ensure the forward function is compiled. graph_def = r.graph.as_graph_def() func_attrs = graph_def.library.function[0].attr self.assertTrue(func_attrs["_XlaCompile"].b) # Ensure _XlaScope is inherited from enclosing context. self.assertEqual(b"jit_scope_0", func_attrs["_XlaScope"].s)
def testCompilationInGradient(self): with self.test_session(): x = constant_op.constant(3) y_nc = math_ops.add(x, x, name="not_compiled") with jit.experimental_jit_scope(): y_c = math_ops.add(y_nc, y_nc, name="compiled") x_grads = gradients.gradients([y_c], [x])[0] operations = x_grads.graph.get_operations() c_grad_ops = [ op for op in operations if "gradients/compiled" in op.name] nc_grad_ops = [ op for op in operations if "gradients/not_compiled" in op.name] self.assertGreater(len(c_grad_ops), 0) self.assertGreater(len(nc_grad_ops), 0) for cg in c_grad_ops: self.assertTrue(cg.get_attr("_XlaCompile")) for ncg in nc_grad_ops: with self.assertRaisesRegexp(ValueError, "No attr named"): ncg.get_attr("_XlaCompile") # d/dx (4 * x) self.assertAllClose(4, x_grads.eval())