def testMirroredStratParaAsync(self):
    """Tests RNG/MirrorStrategy interaction #3.

    The user can create n independent RNGs outside strategy.scope(), where n
    is the number of replicas, and give one to each replica. The replicas can
    thus get different random-number streams.
    """
    shape = [3, 4]
    dtype = dtypes.int32
    gens = random.get_global_generator().split(count=2)
    devices = ["/cpu:0", test_util.gpu_device_name()]
    strat = MirroredStrategy(devices=devices)
    # Use `PerReplica` to specify which `gen` is sent to which replica
    gens = dist_values.PerReplica(
        device_map=dist_values.ReplicaDeviceMap(devices),
        values=[[g] for g in gens])
    with strat.scope():
      def f(gen):
        t1 = gen.uniform_full_int(shape=shape, dtype=dtype)
        t2 = gen.uniform_full_int(shape=shape, dtype=dtype)
        t = array_ops.stack([t1, t2])
        return t
      results = strat.extended.call_for_each_replica(
          fn=f, args=gens)
      values = results.values
      self.assertAllEqual(2, len(values))
      self.assertAllDifferent(values)
Ejemplo n.º 2
0
  def testSameAsOldRandomOps(self):
    """Tests that the generated numbers are the same as the old random_ops.py .
    """
    seed1, seed2 = 50, 60
    # note how the two seeds for the old op correspond to the seed for the new
    # op
    random.get_global_generator().reset([0, seed2, seed1])
    shape = constant_op.constant([2, 3])
    dtype = dtypes.float32
    # create a graph for the old op in order to call it many times
    @def_function.function
    def old():
      return gen_random_ops.random_standard_normal(
          shape, dtype=dtype, seed=seed1, seed2=seed2)

    def new():
      return random.get_global_generator().standard_normal(shape, dtype=dtype)

    for _ in range(100):
      self.assertAllEqual(old(), new())
 def compare(fst_includes_print, snd_includes_print):
   random.get_global_generator().reset(50)
   fst = f(fst_includes_print)
   random.get_global_generator().reset(50)
   snd = f(snd_includes_print)
   self.assertAllEqual(fst, snd)
   # Now do the above again using accelerated (defunned) 'f'.
   # Running 'f' with two different Boolean arguments should cause
   # two different graphs to be generated, hence demonstrating the
   # insensitivity to graph changes.
   f_acc = def_function.function(f)
   random.get_global_generator().reset(50)
   fst = f_acc(fst_includes_print)
   random.get_global_generator().reset(50)
   snd = f_acc(snd_includes_print)
   self.assertAllEqual(fst, snd)
  def testOpSeedSelectionAfterSetSeed(self):
    """Tests that op-seed selection is reset after reseting global generator.

    Fixing GitHub issue 9171:
    https://github.com/tensorflow/tensorflow/issues/9171
    """
    shape = (3,)
    random.get_global_generator().reset(1)
    a = random.get_global_generator().normal(shape)
    random.get_global_generator().reset(1)
    b = random.get_global_generator().normal(shape)
    self.assertAllEqual(a, b)

    # Now do the above again using accelerated ('defun'ed) computation
    @def_function.function
    def f():
      return random.get_global_generator().normal(shape)

    random.get_global_generator().reset(1)
    c = f()
    random.get_global_generator().reset(1)
    d = f()
    self.assertAllEqual(c, d)
    self.assertAllEqual(a, c)
 def verify(counter1, counter2, key1, key2, expect1, expect2):
   counter = uint32s_to_uint64(counter1, counter2)
   key = uint32s_to_uint64(key1, key2)
   random.get_global_generator().reset([counter, key])
   got = random.get_global_generator().uniform_full_int(
       shape=(2,), dtype=dtypes.uint32)
   expect = [expect1, expect2]
   self.assertAllEqual(expect, got)
   random.get_global_generator().reset([counter, key])
   got = random.get_global_generator().uniform_full_int(
       shape=(), dtype=dtypes.uint64)
   self.assertAllEqual(uint32s_to_uint64(*expect), got)
 def f():
   return random.get_global_generator().normal(shape)
 def f(include_print):
   shape = constant_op.constant([5])
   if include_print:
     shape = logging_ops.Print(shape, [shape])
   return random.get_global_generator().normal(shape)
 def testEagerAndDefun(self):
   """A simple test to make sure the op works in eager and defunned mode."""
   random.get_global_generator().normal((3,))
Ejemplo n.º 9
0
 def f(include_print):
     shape = constant_op.constant([5])
     if include_print:
         shape = logging_ops.Print(shape, [shape])
     return random.get_global_generator().normal(shape)
Ejemplo n.º 10
0
 def f():
     return random.get_global_generator().normal(shape)
Ejemplo n.º 11
0
 def testEagerAndDefun(self):
     """A simple test to make sure the op works in eager and defunned mode."""
     random.get_global_generator().normal((3, ))
Ejemplo n.º 12
0
 def new():
   return random.get_global_generator().standard_normal(shape, dtype=dtype)
 def make_seed():
   generator = random.get_global_generator()
   state = array_ops.identity(generator.state, name="state")
   return generator.uniform_full_int((2,), dtypes.int32, name="seed"), state
 def new():
     with ops.device(test_util.gpu_device_name()):
         return random.get_global_generator().standard_normal(
             shape, dtype=dtype)
 def new():
     with ops.device("/device:CPU:0"):
         return random.get_global_generator().standard_normal(
             shape, dtype=dtype)
 def new():
     return random.get_global_generator().standard_normal(shape,
                                                          dtype=dtype)