Example #1
0
 def __init__(self, methodName='runTest'):  # pylint: disable=invalid-name
     super().__init__(methodName)
     if npe.tpu_devices():
         # Initialize TPU for TF
         resolver = tf.distribute.cluster_resolver.TPUClusterResolver(
             tpu='local')
         tf.tpu.experimental.initialize_tpu_system(resolver)
Example #2
0
 def __init__(self, methodName="runTest"):  # pylint: disable=invalid-name
   super().__init__(methodName)
   physical_devices = tf.config.experimental.list_physical_devices("CPU")
   tf.config.experimental.set_virtual_device_configuration(
       physical_devices[0], [
           tf.config.experimental.VirtualDeviceConfiguration(),
           tf.config.experimental.VirtualDeviceConfiguration()
       ])
   if extensions.tpu_devices():
     resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu="local")
     tf.tpu.experimental.initialize_tpu_system(resolver)
Example #3
0
  def testPmean(self):
    if extensions.tpu_devices():
      self.skipTest("pmean for TPU is not supported yet")
    devices = self._get_two_devices(require_same_type=True)

    def reduce_mean(f):
      return extensions.pmean(f)

    data = tf_np.asarray(tf.convert_to_tensor(value=[1, 3]))
    pmapped = extensions.pmap(reduce_mean, devices=devices)
    result = pmapped(data)

    self.assertAllClose(result[0], 2)
    self.assertAllClose(result[1], 2)
Example #4
0
 def _get_two_devices(self, require_same_type=False):
   tpus = extensions.tpu_devices()
   if FLAGS.requires_tpu:
     if len(tpus) == 2:
       res = tpus
     else:
       raise ValueError("This test requires 2 TPU cores but %s are found" %
                        len(tpus))
   else:
     if len(tpus) == 2:
       res = tpus
     elif self._hasGPU() and not require_same_type:
       res = ("CPU:0", "GPU:0")
     else:
       res = ("CPU:0", "CPU:1")
   return res