Ejemplo n.º 1
0
    def testJITXlaScope(self):
        with self.session(graph=ops.Graph()):
            with jit.experimental_jit_scope(True):
                # XlaScope 0
                a1 = constant_op.constant(1)
            with jit.experimental_jit_scope(True):
                # XlaScope 1
                a2 = constant_op.constant(1)
                with jit.experimental_jit_scope(True):
                    # XlaScope still 1, depth 1
                    a3 = constant_op.constant(1)
                    with jit.experimental_jit_scope(True):
                        # XlaScope still 1, depth 2
                        a4 = constant_op.constant(1)
                    # XlaScope still 1, depth 1
                    a5 = constant_op.constant(1)
            with jit.experimental_jit_scope(True):
                # XlaScope now 2, depth 0
                a6 = constant_op.constant(1)

        self.assertEqual(b"jit_scope_0", a1.op.get_attr("_XlaScope"))
        self.assertEqual(b"jit_scope_1", a2.op.get_attr("_XlaScope"))
        self.assertEqual(b"jit_scope_1", a3.op.get_attr("_XlaScope"))
        self.assertEqual(b"jit_scope_1", a4.op.get_attr("_XlaScope"))
        self.assertEqual(b"jit_scope_1", a5.op.get_attr("_XlaScope"))
        self.assertEqual(b"jit_scope_2", a6.op.get_attr("_XlaScope"))
Ejemplo n.º 2
0
    def testCompilationSeparateGradientScopeNames(self):
        with self.session(graph=ops.Graph()):
            with jit.experimental_jit_scope(True,
                                            separate_compiled_gradients=True):
                # XlaScope 0
                a1 = constant_op.constant([[1.]])
                a1t = math_ops.matmul(a1, a1)
            with jit.experimental_jit_scope(True,
                                            separate_compiled_gradients=True):
                # XlaScope 1
                a2 = constant_op.constant([[1.]])
                a2t = math_ops.matmul(a2, a2)

            self.assertEqual(b"jit_scope_0", a1.op.get_attr("_XlaScope"))
            self.assertEqual(b"jit_scope_1", a2.op.get_attr("_XlaScope"))
            grad_a1 = gradients.gradients(a1t, a1, name="GA")[0]
            grad_a2 = gradients.gradients(a2t, a2, name="GB")[0]
            grad_a1 = grad_a1.op.inputs[0]
            grad_a2 = grad_a2.op.inputs[0]
            self.assertTrue(grad_a1.op.get_attr("_XlaCompile"))
            self.assertTrue(grad_a2.op.get_attr("_XlaCompile"))
            self.assertEqual(b"jit_scope_0_grad_GA",
                             grad_a1.op.get_attr("_XlaScope"))
            self.assertEqual(b"jit_scope_1_grad_GB",
                             grad_a2.op.get_attr("_XlaScope"))
Ejemplo n.º 3
0
  def testJITXlaScope(self):
    with self.session(graph=ops.Graph()):
      with jit.experimental_jit_scope(True):
        # XlaScope 0
        a1 = constant_op.constant(1)
      with jit.experimental_jit_scope(True):
        # XlaScope 1
        a2 = constant_op.constant(1)
        with jit.experimental_jit_scope(True):
          # XlaScope still 1, depth 1
          a3 = constant_op.constant(1)
          with jit.experimental_jit_scope(True):
            # XlaScope still 1, depth 2
            a4 = constant_op.constant(1)
          # XlaScope still 1, depth 1
          a5 = constant_op.constant(1)
      with jit.experimental_jit_scope(True):
        # XlaScope now 2, depth 0
        a6 = constant_op.constant(1)

    self.assertEqual(b"jit_scope_0", a1.op.get_attr("_XlaScope"))
    self.assertEqual(b"jit_scope_1", a2.op.get_attr("_XlaScope"))
    self.assertEqual(b"jit_scope_1", a3.op.get_attr("_XlaScope"))
    self.assertEqual(b"jit_scope_1", a4.op.get_attr("_XlaScope"))
    self.assertEqual(b"jit_scope_1", a5.op.get_attr("_XlaScope"))
    self.assertEqual(b"jit_scope_2", a6.op.get_attr("_XlaScope"))
Ejemplo n.º 4
0
 def compute(self, use_jit, compute_fn):
     random_seed.set_random_seed(1234)
     with self.session(graph=ops.Graph()) as sess:
         with jit.experimental_jit_scope(use_jit):
             r = compute_fn()
         sess.run(variables.global_variables_initializer())
         return (r, sess.run(r))
Ejemplo n.º 5
0
    def testPlaysNicelyWithDefunSeparateGradientScope(self):
        with self.session(graph=ops.Graph()) as sess:
            with jit.experimental_jit_scope(True):

                @function.Defun(compiled=True,
                                noinline=True,
                                separate_compiled_gradients=True)
                def mulop(x1, x2):
                    return x1 * x2

                x = constant_op.constant(1.0)
                r = mulop(x, x)
                g_r = gradients.gradients(r, x, name="GA")[0]

            # Ensure the forward function is compiled.
            graph_def = r.graph.as_graph_def()
            func_attrs = graph_def.library.function[0].attr
            self.assertTrue(func_attrs["_XlaCompile"].b)
            self.assertEqual(b"jit_scope_0", func_attrs["_XlaScope"].s)

            # Ensure the gradient (SymbolicGradient) is compiled, with a different
            # _XlaScope from the function itself.
            grad_op = g_r.op.inputs[0].op
            self.assertTrue(grad_op.get_attr("_XlaCompile"))
            self.assertEqual(b"jit_scope_0_grad_GA",
                             grad_op.get_attr("_XlaScope"))

            # Ensure the ops run: grad(x1*x1) = 2*x1
            self.assertAllClose([1.0, 1.0, 2.0], sess.run([x, r, g_r]))
Ejemplo n.º 6
0
  def testJITInEager(self):

    with self.assertRaisesRegexp(
        RuntimeError, "xla.experimental.jit_scope is not supported when eager "
        "execution is enabled. Try use it inside tf.function."):
      with jit.experimental_jit_scope(True):
        constant_op.constant(1)
Ejemplo n.º 7
0
 def compute(self, use_jit, compute_fn):
   random_seed.set_random_seed(1234)
   with self.session(graph=ops.Graph()) as sess:
     with jit.experimental_jit_scope(use_jit):
       r = compute_fn()
     sess.run(variables.global_variables_initializer())
     return (r, sess.run(r))
Ejemplo n.º 8
0
  def testPlaysNicelyWithDefunSeparateGradientScope(self):
    with self.session(graph=ops.Graph()) as sess:
      with jit.experimental_jit_scope(True):

        @function.Defun(
            compiled=True, noinline=True, separate_compiled_gradients=True)
        def mulop(x1, x2):
          return x1 * x2

        x = constant_op.constant(1.0)
        r = mulop(x, x)
        g_r = gradients.gradients(r, x, name="GA")[0]

      # Ensure the forward function is compiled.
      graph_def = r.graph.as_graph_def()
      func_attrs = graph_def.library.function[0].attr
      self.assertTrue(func_attrs["_XlaCompile"].b)
      self.assertEqual(b"jit_scope_0", func_attrs["_XlaScope"].s)

      # Ensure the gradient (SymbolicGradient) is compiled, with a different
      # _XlaScope from the function itself.
      grad_op = g_r.op.inputs[0].op
      self.assertTrue(grad_op.get_attr("_XlaCompile"))
      self.assertEqual(b"jit_scope_0_grad_GA",
                       grad_op.get_attr("_XlaScope"))

      # Ensure the ops run: grad(x1*x1) = 2*x1
      self.assertAllClose([1.0, 1.0, 2.0], sess.run([x, r, g_r]))
Ejemplo n.º 9
0
  def testCompilationGradientScopeNames(self):
    with self.session(graph=ops.Graph()):
      with jit.experimental_jit_scope():
        # XlaScope 0
        a1 = constant_op.constant([[1.]])
        a1t = math_ops.matmul(a1, a1)
      with jit.experimental_jit_scope():
        # XlaScope 1
        a2 = constant_op.constant([[1.]])
        a2t = math_ops.matmul(a2, a2)

      self.assertEqual(b"jit_scope_0", a1.op.get_attr("_XlaScope"))
      self.assertEqual(b"jit_scope_1", a2.op.get_attr("_XlaScope"))
      grad_a1 = gradients.gradients(a1t, a1, name="GA")[0]
      grad_a2 = gradients.gradients(a2t, a2, name="GB")[0]
      grad_a1 = grad_a1.op.inputs[0]
      grad_a2 = grad_a2.op.inputs[0]
      self.assertTrue(grad_a1.op.get_attr("_XlaCompile"))
      self.assertTrue(grad_a2.op.get_attr("_XlaCompile"))
      self.assertEqual(b"jit_scope_0", grad_a1.op.get_attr("_XlaScope"))
      self.assertEqual(b"jit_scope_1", grad_a2.op.get_attr("_XlaScope"))
Ejemplo n.º 10
0
  def testDefunInheritsJitScope(self):
    with self.session(graph=ops.Graph()):
      with jit.experimental_jit_scope(True):
        @function.Defun(compiled=True, noinline=True)
        def mulop(x1, x2):
          return x1 * x2
        x = constant_op.constant(1.0)
        r = mulop(x, x)

      # Ensure the forward function is compiled.
      graph_def = r.graph.as_graph_def()
      func_attrs = graph_def.library.function[0].attr
      self.assertTrue(func_attrs["_XlaCompile"].b)
      # Ensure _XlaScope is inherited from enclosing context.
      self.assertEqual(b"jit_scope_0", func_attrs["_XlaScope"].s)
Ejemplo n.º 11
0
  def testCompilationInGradient(self):
    with self.cached_session():
      x = constant_op.constant([[3.]])
      y_nc = math_ops.matmul(x, x, name="not_compiled")
      with jit.experimental_jit_scope():
        y_c = math_ops.matmul(y_nc, y_nc, name="compiled")
      x_grads = gradients.gradients([y_c], [x])[0]
      operations = x.graph.get_operations()
      c_grad_ops = [
          op for op in operations if "gradients/compiled" in op.name]
      nc_grad_ops = [
          op for op in operations if "gradients/not_compiled" in op.name]
      self.assertGreater(len(c_grad_ops), 0)
      self.assertGreater(len(nc_grad_ops), 0)
      for cg in c_grad_ops:
        self.assertTrue(cg.get_attr("_XlaCompile"))
      for ncg in nc_grad_ops:
        with self.assertRaisesRegexp(ValueError, "[Nn]o attr named"):
          ncg.get_attr("_XlaCompile")

      # d/dx (x ** 4) = 4 * (x ** 3)
      self.assertAllClose([[108]], x_grads.eval())
Ejemplo n.º 12
0
 def jit_f():
     with jit.experimental_jit_scope():
         return f()