Example #1
0
 def _testDilationGradDeterminismError(self, use_gpu):
     if use_gpu and test.is_gpu_available(cuda_only=True):
         try:
             config.enable_deterministic_ops(True)
             with self.assertRaisesRegexp(
                     errors_impl.UnimplementedError,
                     "Determinism is not yet supported "
                     "for Dilation2DBackpropInput."):
                 self._ConstructAndTestGradient(image_shape=[1, 3, 3, 1],
                                                kernel_shape=[1, 1, 1],
                                                strides=[1, 1],
                                                rates=[1, 1],
                                                padding="VALID",
                                                use_gpu=use_gpu)
         finally:
             config.enable_deterministic_ops(False)
     else:
         try:
             config.enable_deterministic_ops(True)
             self._ConstructAndTestGradient(image_shape=[1, 3, 3, 1],
                                            kernel_shape=[1, 1, 1],
                                            strides=[1, 1],
                                            rates=[1, 1],
                                            padding="VALID",
                                            use_gpu=use_gpu)
         finally:
             config.enable_deterministic_ops(False)
Example #2
0
 def test_bincount_determinism_error(self):
   num_samples = 10000
   np.random.seed(42)
   arr = np.random.randint(0, 1000, num_samples)
   try:
     config.enable_deterministic_ops(True)
     with test_util.use_gpu():
       if test_util.is_gpu_available(cuda_only=True):
         with self.assertRaisesRegexp(
             errors_impl.UnimplementedError, "Determinism is not yet "
             "supported for Bincount."):
           self.evaluate(bincount_ops.bincount(arr, None))
   finally:
     config.enable_deterministic_ops(False)
 def testDeterministicOpsErrors(self):
     try:
         config.enable_deterministic_ops(True)
         random.set_global_generator(None)
         with self.assertRaisesWithPredicateMatch(
                 RuntimeError,
                 '"get_global_generator" cannot be called if determinism is enabled'
         ):
             random.get_global_generator()
         random.set_global_generator(random.Generator.from_seed(50))
         random.get_global_generator()
         with self.assertRaisesWithPredicateMatch(
                 RuntimeError,
                 '"from_non_deterministic_state" cannot be called when determinism '
                 "is enabled."):
             random.Generator.from_non_deterministic_state()
     finally:
         config.enable_deterministic_ops(False)
Example #4
0
  def test_no_determinism(self):
    config.enable_deterministic_ops(False)
    v = variables.Variable(0.)

    def interleave_fn(x):
      del x
      v.assign(1.)
      return dataset_ops.Dataset.range(2)

    dataset = dataset_ops.Dataset.range(5)
    dataset = dataset.apply(testing.assert_next(["ParallelInterleave"]))
    dataset = dataset.interleave(
        interleave_fn, cycle_length=5, num_parallel_calls=3)
    options = options_lib.Options()
    options.experimental_optimization.apply_default_optimizations = False
    dataset = dataset.with_options(options)
    self.evaluate(variables.global_variables_initializer())
    expected_output = [0] * 5 + [1] * 5
    self.assertDatasetProduces(
        dataset, expected_output=expected_output, requires_initialization=True)
Example #5
0
 def testDeterminism(self):
     # This does not test any ops are deterministic, because that is tested by
     # many kernel tests.
     try:
         config.enable_deterministic_ops(False)
         self.assertFalse(config.deterministic_ops_enabled())
         config.enable_deterministic_ops(True)
         self.assertTrue(config.deterministic_ops_enabled())
     finally:
         config.enable_deterministic_ops(False)
Example #6
0
 def tearDown(self):
   super().tearDown()
   config.enable_deterministic_ops(False)
Example #7
0
 def setUp(self):
   super().setUp()
   random_seed.set_random_seed(None)
   config.enable_deterministic_ops(True)
Example #8
0
    pixel, can contribute to nondeterminism in the gradient associated with that
    input pixel location.

    Note that the number of boxes can be less than, equal to, or greater than
    the batch size. Wth non-reproducible ordering of reduction operations, three
    or more crops overlapping on the same input image pixel can independently
    contribute to nondeterminism in the image gradient associated with that
    input pixel location. This is independent of contributions caused by the
    upsampling of any given crop.
    """
    self._testReproducibleBackprop(test_image_not_boxes=True)

  @test_util.run_in_graph_and_eager_modes
  def testReproducibleBackpropToBoxes(self):
    """Test that backprop to boxes is reproducible.

    If the input and output dimensions are the same, then the boxes gradients
    will be deterministically zero. Otherwise, in the presence of
    non-reproducible ordering of reduction operations, nondeterminism can be
    introduced, whether there is upsampling or downsampling and whether or not
    there are overlapping crops.
    """
    self._testReproducibleBackprop(test_image_not_boxes=False)


if __name__ == '__main__':
  # TODO(reedwm): Merge this file with image_grad_test.py and
  # image_grad_test_base.py
  config.enable_deterministic_ops(True)
  test.main()