Exemple #1
0
    def testMemoryLeakAnonymousVariable(self):
        if not memory_test_util.memory_profiler_is_available():
            self.skipTest("memory_profiler required to run this test")

        def f():
            inputs = Variable(array_ops.zeros([32, 100], dtypes.float32))
            del inputs

        memory_test_util.assert_no_leak(f, num_iters=10000)
Exemple #2
0
    def testMemoryLeakInSimpleModelForwardOnly(self):
        if not memory_test_util.memory_profiler_is_available():
            self.skipTest("memory_profiler required to run this test")

        inputs = array_ops.zeros([32, 100], dtypes.float32)
        net = SingleLayerNet()

        def f():
            with backprop.GradientTape():
                net(inputs)

        memory_test_util.assert_no_leak(f)
Exemple #3
0
    def testMemoryLeakInFunction(self):
        if not memory_test_util.memory_profiler_is_available():
            self.skipTest("memory_profiler required to run this test")

        def f():
            @def_function.function
            def graph(x):
                return x * x + x

            graph(constant_op.constant(42))

        memory_test_util.assert_no_leak(f,
                                        num_iters=1000,
                                        increase_threshold_absolute_mb=30)
Exemple #4
0
    def testMemoryLeakInSimpleModelForwardAndBackward(self):
        if not memory_test_util.memory_profiler_is_available():
            self.skipTest("memory_profiler required to run this test")

        inputs = array_ops.zeros([32, 100], dtypes.float32)
        net = SingleLayerNet()

        def f():
            with backprop.GradientTape() as tape:
                result = net(inputs)

            tape.gradient(result, net.variables)

            del tape

        memory_test_util.assert_no_leak(f)
  def testMemoryLeakInLocalCopy(self):
    if not memory_test_util.memory_profiler_is_available():
      self.skipTest("memory_profiler required to run this test")

    remote.connect_to_remote_host(self._cached_server_target)

    # Run a function locally with the input on a remote worker and ensure we
    # do not leak a reference to the remote tensor.

    @def_function.function
    def local_func(i):
      return i

    def func():
      with ops.device("job:worker/replica:0/task:0/device:CPU:0"):
        x = array_ops.zeros([1000, 1000], dtypes.int32)

      local_func(x)

    memory_test_util.assert_no_leak(
        func, num_iters=100, increase_threshold_absolute_mb=50)
    def testMemoryLeakInGlobalGradientRegistry(self):
        # Past leak: b/139819011

        if not memory_test_util.memory_profiler_is_available():
            self.skipTest("memory_profiler required to run this test")

        def f():
            @def_function.function(autograph=False)
            def graph(x):
                @def_function.function(autograph=False)
                def cubed(a):
                    return a * a * a

                y = cubed(x)
                # To ensure deleting the function does not affect the gradient
                # computation.
                del cubed
                return gradient_ops.gradients(gradient_ops.gradients(y, x), x)

            return graph(constant_op.constant(1.5))[0].numpy()

        memory_test_util.assert_no_leak(f,
                                        num_iters=300,
                                        increase_threshold_absolute_mb=50)