def testMultipleInsertLookupGrad(self):
   with backprop.GradientTape(persistent=True) as tape:
     m = map_ops.empty_tensor_map()
     k = constant_op.constant(1.0)
     k2 = constant_op.constant(2.0)
     k3 = constant_op.constant(3.0)
     v = constant_op.constant(11.0)
     v2 = constant_op.constant(12.0)
     v3 = constant_op.constant(13.0)
     tape.watch(v)
     tape.watch(v2)
     tape.watch(v3)
     m = map_ops.tensor_map_insert(m, k, v)
     m = map_ops.tensor_map_insert(m, k2, v2)
     m = map_ops.tensor_map_insert(m, k3, v3)
     l = map_ops.tensor_map_lookup(m, k, v.dtype)
     l2 = map_ops.tensor_map_lookup(m, k2, v2.dtype)
     l3 = map_ops.tensor_map_lookup(m, k3, v3.dtype)
     g = tape.gradient(l * 5, v)
     g2 = tape.gradient(l2 * 6, v2)
     g3 = tape.gradient(l3 * 7, v3)
     self.assertAllEqual(g, 5)
     self.assertAllEqual(g2, 6)
     self.assertAllEqual(g3, 7)
   del tape
Exemple #2
0
    def testStringKeyGrad(self):
        with backprop.GradientTape(persistent=True) as tape:
            m = map_ops.empty_tensor_map()
            k = constant_op.constant("key")
            k2 = constant_op.constant("key2")
            v = constant_op.constant(2.0)
            v2 = constant_op.constant(22.0)
            tape.watch(v2)
            m = map_ops.tensor_map_insert(m, k2, v2)
            m = map_ops.tensor_map_insert(m, k, v)
            s = map_ops.tensor_map_size(m)
            self.assertAllEqual(s, 2)
            l = map_ops.tensor_map_lookup(m, k, v.dtype)
            self.assertAllClose(l, v)
            m = map_ops.tensor_map_insert(m, k, v2)
            l2 = map_ops.tensor_map_lookup(m, k, v2.dtype)
            self.assertAllClose(l2, v2)
            g = tape.gradient(l2 * 5, v2)
            self.assertAllEqual(g, 5)

            m, e = map_ops.tensor_map_erase(m, k, v2.dtype)
            s = map_ops.tensor_map_size(m)
            self.assertAllEqual(s, 1)
            self.assertAllClose(e, v2)
            g2 = tape.gradient(e * 6, v2)
            self.assertAllEqual(g2, 6)
        del tape
Exemple #3
0
 def testEraseFirstGrad(self):
     with backprop.GradientTape(persistent=True) as tape:
         m = map_ops.empty_tensor_map()
         k = constant_op.constant(1.0)
         k2 = constant_op.constant(2.0)
         v = constant_op.constant(11.0)
         v2 = constant_op.constant(22.0)
         tape.watch(v)
         tape.watch(v2)
         m = map_ops.tensor_map_insert(m, k, v)
         l = map_ops.tensor_map_lookup(m, k, v.dtype)
         m = map_ops.tensor_map_insert(m, k2, v2)
         m, e = map_ops.tensor_map_erase(m, k, v.dtype)
         l2 = map_ops.tensor_map_lookup(m, k2, v2.dtype)
         self.assertAllClose(l2, v2)
         self.assertAllClose(e, v)
         g = tape.gradient(l * 5, v)
         self.assertAllEqual(g, 5)
         g2 = tape.gradient(l2 * 6, v2)
         self.assertAllEqual(g2, 6)
         g3 = tape.gradient(e * 7, v)
         self.assertAllEqual(g3, 7)
         m, e2 = map_ops.tensor_map_erase(m, k2, v2.dtype)
         g4 = tape.gradient(e2 * 8, v2)
         self.assertAllEqual(g4, 8)
     del tape
 def testStringKeyGrad(self):
   with backprop.GradientTape(persistent=True) as tape:
     m = map_ops.empty_tensor_map()
     k = constant_op.constant("key")
     k2 = constant_op.constant("key2")
     v = constant_op.constant(2.0)
     v2 = constant_op.constant(22.0)
     tape.watch(v)
     tape.watch(v2)
     m = map_ops.tensor_map_insert(m, k, v)
     m = map_ops.tensor_map_insert(m, k2, v2)
     s = map_ops.tensor_map_size(m)
     self.assertAllEqual(s, 2)
     # Test lookup and gradient.
     l = map_ops.tensor_map_lookup(m, k, v.dtype)
     self.assertAllClose(l, v)
     self.assertAllClose(tape.gradient(l * 5, v), 5)
     # Test replace and gradient.
     m = map_ops.tensor_map_insert(m, k, v2)
     l2 = map_ops.tensor_map_lookup(m, k, v2.dtype)
     self.assertAllClose(l2, v2)
     g = tape.gradient(l2 * 6, v2)
     self.assertAllEqual(g, 6)
     # Test erase, has key, and gradient.
     m = map_ops.tensor_map_erase(m, k, v2.dtype)
     s = map_ops.tensor_map_size(m)
     self.assertAllEqual(s, 1)
     h = map_ops.tensor_map_has_key(m, k)
     self.assertAllEqual(h, False)
     l = map_ops.tensor_map_lookup(m, k2, v2.dtype)
     g2 = tape.gradient(l * 6, v2)
     self.assertAllEqual(g2, 6)
   del tape
 def testInsertLookupComposeGrad(self):
   with backprop.GradientTape() as tape:
     m = map_ops.empty_tensor_map()
     k = constant_op.constant(1.0)
     k2 = constant_op.constant(2.0)
     v = constant_op.constant(11.0)
     tape.watch(v)
     m = map_ops.tensor_map_insert(m, k, v)
     l = map_ops.tensor_map_lookup(m, k, v.dtype)
     m = map_ops.tensor_map_insert(m, k2, l)
     l2 = map_ops.tensor_map_lookup(m, k2, l.dtype)
     g = tape.gradient(l2 * 5, v)
     self.assertAllEqual(g, 5)
Exemple #6
0
 def testStringValue(self):
     m = map_ops.empty_tensor_map()
     k = constant_op.constant("key")
     v = constant_op.constant("value")
     k2 = constant_op.constant(1.0)
     v2 = constant_op.constant(2.0)
     m = map_ops.tensor_map_insert(m, k, v)
     m = map_ops.tensor_map_insert(m, k2, v2)
     l = map_ops.tensor_map_lookup(m, k, v.dtype)
     self.assertAllEqual(l, v)
     l2 = map_ops.tensor_map_lookup(m, k2, v2.dtype)
     self.assertAllClose(l2, v2)
     m, e = map_ops.tensor_map_erase(m, k, v.dtype)
     self.assertAllEqual(e, v)
 def testTensorMapLookup(self):
   m = map_ops.empty_tensor_map()
   k = constant_op.constant(1.0)
   v = constant_op.constant(2.0)
   m = map_ops.tensor_map_insert(m, k, v)
   l = map_ops.tensor_map_lookup(m, k, dtypes.float32)
   self.assertAllClose(l, v)
 def testTensorMapLookupFromEmptyMapFails(self):
   m = map_ops.empty_tensor_map()
   k = constant_op.constant(1.0)
   with self.assertRaisesRegex(errors.InvalidArgumentError,
                               "Trying to lookup non-existent key."):
     l = map_ops.tensor_map_lookup(m, k, dtypes.float32)
     self.evaluate(l)
 def testEraseInsertComposedGrad(self):
   with backprop.GradientTape(persistent=True) as tape:
     m = map_ops.empty_tensor_map()
     k = constant_op.constant(1.0)
     k2 = constant_op.constant(2.0)
     v = constant_op.constant(11.0)
     v2 = constant_op.constant(22.0)
     tape.watch(v)
     tape.watch(v2)
     m = map_ops.tensor_map_insert(m, k, v)
     l = map_ops.tensor_map_lookup(m, k, v.dtype)
     m = map_ops.tensor_map_erase(m, k, v.dtype)
     m = map_ops.tensor_map_insert(m, k2, l)
     l2 = map_ops.tensor_map_lookup(m, k2, l.dtype)
     g = tape.gradient(l2 * 5, v)
     self.assertAllEqual(g, 5)
   del tape
Exemple #10
0
  def testIfHasKeyLookup(self):
    m = map_ops.empty_tensor_map()
    k = constant_op.constant(1.0)
    k2 = constant_op.constant(2.0)
    v = constant_op.constant(2.0)
    m = map_ops.tensor_map_insert(m, k, v)

    default_value = array_ops.zeros_like(v)
    l = control_flow_ops.cond(
        map_ops.tensor_map_has_key(m, k),
        lambda: map_ops.tensor_map_lookup(m, k, dtypes.float32),
        lambda: default_value)
    l2 = control_flow_ops.cond(
        map_ops.tensor_map_has_key(m, k2),
        lambda: map_ops.tensor_map_lookup(m, k, dtypes.float32),
        lambda: default_value)
    self.assertAllClose(l, v)
    self.assertAllClose(l2, default_value)
Exemple #11
0
 def testLookupMultiplyGrad(self):
   with backprop.GradientTape(persistent=True) as tape:
     k = constant_op.constant(1.0)
     k2 = constant_op.constant(2.0)
     v = constant_op.constant(11.0)
     v2 = constant_op.constant(22.0)
     tape.watch(v)
     tape.watch(v2)
     m = map_ops.empty_tensor_map()
     m = map_ops.tensor_map_insert(m, k, v)
     m = map_ops.tensor_map_insert(m, k2, v2)
     l1 = map_ops.tensor_map_lookup(m, k, v.dtype)
     l2 = map_ops.tensor_map_lookup(m, k2, v2.dtype)
     g = tape.gradient(l1 * l2, [v, v2])
     self.assertAllClose(g, [v2, v])
     g2 = tape.gradient(l1 * l1, v)
     self.assertAllClose(g2, 2 * v)
   del tape
Exemple #12
0
 def testTensorMapLookupMissingKeyFails(self):
   m = map_ops.empty_tensor_map()
   k = constant_op.constant(1.0)
   k2 = constant_op.constant(2.0)
   v = constant_op.constant(11.0)
   m = map_ops.tensor_map_insert(m, k, v)
   with self.assertRaisesRegex(errors.InvalidArgumentError,
                               "Trying to lookup non-existent key."):
     l = map_ops.tensor_map_lookup(m, k2, dtypes.float32)
     self.evaluate(l)
Exemple #13
0
 def testStringKeyValue(self):
   m = map_ops.empty_tensor_map()
   k = constant_op.constant("key")
   v = constant_op.constant("value")
   k2 = constant_op.constant(1.0)
   v2 = constant_op.constant(2.0)
   # Test insert and lookup on string key-value pair.
   m = map_ops.tensor_map_insert(m, k, v)
   m = map_ops.tensor_map_insert(m, k2, v2)
   l = map_ops.tensor_map_lookup(m, k, v.dtype)
   self.assertAllEqual(l, v)
   # Test lookup on float key-value pair.
   l2 = map_ops.tensor_map_lookup(m, k2, v2.dtype)
   self.assertAllClose(l2, v2)
   # Test erase and has key.
   self.assertAllEqual(map_ops.tensor_map_has_key(m, k), True)
   m = map_ops.tensor_map_erase(m, k, v.dtype)
   self.assertAllEqual(map_ops.tensor_map_has_key(m, k), False)
   self.assertAllEqual(map_ops.tensor_map_has_key(m, k2), True)
Exemple #14
0
 def testDiffKeySameValueGrad(self):
   with backprop.GradientTape(persistent=True) as tape:
     m = map_ops.empty_tensor_map()
     k = constant_op.constant(1.0)
     k2 = constant_op.constant(11.0)
     v = constant_op.constant(2.0)
     v2 = constant_op.constant(22.0)
     tape.watch(v)
     tape.watch(v2)
     m = map_ops.tensor_map_insert(m, k, v)
     m = map_ops.tensor_map_insert(m, k2, v)
     l = map_ops.tensor_map_lookup(m, k, v.dtype)
     l2 = map_ops.tensor_map_lookup(m, k2, v.dtype)
     g = tape.gradient(l + l2, v)
     self.assertAllEqual(g, 2)
     m = map_ops.tensor_map_insert(m, k2, v2)
     l2 = map_ops.tensor_map_lookup(m, k2, v2.dtype)
     g2 = tape.gradient(l + l2, v2)
     self.assertAllEqual(g2, 1)
   del tape
Exemple #15
0
 def testSameKeyAlternatingInsertLookupGrad(self):
     with backprop.GradientTape(persistent=True) as tape:
         m = map_ops.empty_tensor_map()
         k = constant_op.constant(1.0)
         v = constant_op.constant(2.0)
         v2 = constant_op.constant(22.0)
         tape.watch(v)
         tape.watch(v2)
         m = map_ops.tensor_map_insert(m, k, v)
         l = map_ops.tensor_map_lookup(m, k, v.dtype)
         self.assertAllClose(l, v)
         g = tape.gradient(l * 5, v)
         self.assertAllClose(g, 5)
         m = map_ops.tensor_map_insert(m, k, v2)
         l2 = map_ops.tensor_map_lookup(m, k, v2.dtype)
         self.assertAllClose(l2, v2)
         g2 = tape.gradient(l2 * 6, v)
         g3 = tape.gradient(l2 * 7, v2)
         self.assertAllClose(g2, array_ops.zeros_like(v))
         self.assertAllClose(g3, 7)
Exemple #16
0
 def testInsertLookupGrad(self):
   with backprop.GradientTape() as tape:
     m = map_ops.empty_tensor_map()
     k = constant_op.constant(1.0)
     v = constant_op.constant(11.0)
     tape.watch(v)
     m = map_ops.tensor_map_insert(m, k, v)
     l = map_ops.tensor_map_lookup(m, k, dtypes.float32)
     l *= 5
     g = tape.gradient(l, v)
     self.assertAllEqual(g, 5)
Exemple #17
0
 def testLookupAddGrad(self):
   with backprop.GradientTape(persistent=True) as tape:
     k = constant_op.constant(1.0)
     k2 = constant_op.constant(2.0)
     v = constant_op.constant(11.0)
     v2 = constant_op.constant(22.0)
     tape.watch(v)
     tape.watch(v2)
     m = map_ops.empty_tensor_map()
     m = map_ops.tensor_map_insert(m, k, v)
     m = map_ops.tensor_map_insert(m, k2, v2)
     l1 = map_ops.tensor_map_lookup(m, k, v.dtype)
     l2 = map_ops.tensor_map_lookup(m, k2, v2.dtype)
     g = tape.gradient(l1 + l2, [l1, l2])
     self.assertAllClose(g, [1, 1])
     g2 = tape.gradient(l1 + l2, [v, v2])
     self.assertAllClose(g2, [1, 1])
     g3 = tape.gradient(l1 + l2 * 4, v2)
     self.assertAllEqual(g3, 4)
   del tape
Exemple #18
0
    def testVectorValue(self):
        m = map_ops.empty_tensor_map()
        k = constant_op.constant([1.0, 2.0])
        v = constant_op.constant([11.0, 22.0])
        m = map_ops.tensor_map_insert(m, k, v)
        s = map_ops.tensor_map_size(m)
        self.assertAllEqual(s, 1)
        l = map_ops.tensor_map_lookup(m, k, v.dtype)
        self.assertAllEqual(l, v)

        m, e = map_ops.tensor_map_erase(m, k, v.dtype)
        s = map_ops.tensor_map_size(m)
        self.assertAllEqual(s, 0)
        self.assertAllClose(e, v)
Exemple #19
0
 def testReplaceLookupGrad(self):
   # Test using same key and different value.
   with backprop.GradientTape(persistent=True) as tape:
     m = map_ops.empty_tensor_map()
     k = constant_op.constant(1.0)
     v = constant_op.constant(11.0)
     v2 = constant_op.constant(22.0)
     tape.watch(v)
     tape.watch(v2)
     m = map_ops.tensor_map_insert(m, k, v)
     l = map_ops.tensor_map_lookup(m, k, v.dtype)
     self.assertAllClose(l, v)
     g = tape.gradient(l * 5, v)
     self.assertAllEqual(g, 5)
     # Replace key and lookup.
     m = map_ops.tensor_map_insert(m, k, v2)
     l2 = map_ops.tensor_map_lookup(m, k, v2.dtype)
     self.assertAllClose(l2, v2)
     g2 = tape.gradient(l2 * 6, v)
     self.assertAllClose(g2, array_ops.zeros_like(v))
     g3 = tape.gradient(l2 * 7, v2)
     self.assertAllClose(g3, 7)
   del tape
Exemple #20
0
 def testVectorValue(self):
   m = map_ops.empty_tensor_map()
   k = constant_op.constant([1.0, 2.0])
   v = constant_op.constant([11.0, 22.0])
   # Test insert and lookup.
   m = map_ops.tensor_map_insert(m, k, v)
   s = map_ops.tensor_map_size(m)
   self.assertAllEqual(s, 1)
   l = map_ops.tensor_map_lookup(m, k, v.dtype)
   self.assertAllEqual(l, v)
   # Test erase and has key.
   m = map_ops.tensor_map_erase(m, k, v.dtype)
   s = map_ops.tensor_map_size(m)
   self.assertAllEqual(s, 0)
   self.assertAllEqual(map_ops.tensor_map_has_key(m, k), False)