def testConcreteFunctionDictRetainsInsertedKeys(self):
        cache = function_cache.FunctionCache()

        key_1 = function_cache.make_cache_key(1)
        self.assertIsNone(cache.lookup(key_1, False))

        key_2 = function_cache.make_cache_key(2)
        key_3 = function_cache.make_cache_key(3)

        cache.add(key_1, "test_1")
        cache.add(key_2, "test_2")

        self.assertEqual(cache.lookup(key_1, False), "test_1")
        self.assertEqual(cache.lookup(key_2, False), "test_2")
        self.assertIsNone(cache.lookup(key_3, False))
    def benchmarkCacheHit50thKeyKnownSubtype(self):
        # If there are 50 keys and we get a key that has a subtype in cache and
        # the cache has observed the key before (to memorize the subtype).

        cache = function_cache.FunctionCache()
        args_per_call = 5
        num_total_checks = 50

        keys = []
        for i in range(num_total_checks - 1):
            args = []
            for j in range(args_per_call):
                args.append(array_ops.zeros([i, j]))
            keys.append(function_cache.make_cache_key(args))

        for key in keys:
            cache.add(key, "testing")
        cache.add(MockSubtypeOf2(3), "testing")
        cache.lookup(MockSubtypeOf2(2), True)

        iterations = 10000
        subtyping_time = timeit.timeit(
            lambda: cache.lookup(MockSubtypeOf2(2), True), number=iterations)

        self.report_benchmark(name="cache_hit_50th_key_known_subtype",
                              iters=iterations,
                              wall_time=subtyping_time,
                              metrics=[{
                                  "name":
                                  "cache_hit_50th_key_known_subtype_avg_ms",
                                  "value":
                                  subtyping_time / iterations * 1000
                              }])
    def testClearRemovesAllConcreteFunctions(self):
        cache = function_cache.FunctionCache()

        key_1 = function_cache.make_cache_key(1)
        key_2 = function_cache.make_cache_key(2)
        key_3 = function_cache.make_cache_key(3)

        cache.add(key_1, "test_1")
        cache.add(key_2, "test_2")

        self.assertEqual(cache.lookup(key_1, False), "test_1")
        self.assertEqual(cache.lookup(key_2, False), "test_2")
        self.assertIsNone(cache.lookup(key_3, False))

        cache.clear()

        self.assertIsNone(cache.lookup(key_1, False))
        self.assertIsNone(cache.lookup(key_2, False))
        self.assertIsNone(cache.lookup(key_3, False))
Beispiel #4
0
    def testWeakRefDeletionAlsoDeletesConcreteFunction(self):
        dummy_object = DummyClass()
        key, deletion_observer = function_cache.make_cache_key(dummy_object)

        cache = function_cache.FunctionCache()
        cache.add(key, deletion_observer, "testing")
        self.assertEqual(cache.lookup(key, False), "testing")

        del dummy_object
        self.assertIsNone(cache.lookup(key, False))
    def testWeakRefDeletionAlsoDeletesConcreteFunction(self):
        if not function_cache.DELETE_WITH_WEAKREF:
            self.skipTest("Weakref-Based Deletion is disabled")

        dummy_object = DummyClass()
        key, deletion_observer = function_cache.make_cache_key(dummy_object)

        cache = function_cache.FunctionCache()
        cache.add(key, deletion_observer, "testing")
        self.assertEqual(cache.lookup(key, False), "testing")

        del dummy_object
        self.assertIsNone(cache.lookup(key, False))
Beispiel #6
0
    def testMultipleObjectsWeakRefDeletion(self):
        dummy_object_1 = DummyClass()
        dummy_object_2 = DummyClass()
        key, deletion_observer = function_cache.make_cache_key(
            (dummy_object_1, dummy_object_2))

        cache = function_cache.FunctionCache()
        cache.add(key, deletion_observer, "testing")
        self.assertEqual(cache.lookup(key, False), "testing")

        del dummy_object_1
        self.assertIsNone(cache.lookup(key, False))

        del dummy_object_2
        self.assertIsNone(cache.lookup(key, False))
    def testDeleteRemovesConcreteFunctions(self):
        cache = function_cache.FunctionCache()
        key_1 = function_cache.make_cache_key(1)
        cache.add(key_1, "test_1")
        self.assertEqual(cache.lookup(key_1, False), "test_1")
        cache.delete(key_1)
        self.assertIsNone(cache.lookup(key_1, False))

        key_2 = MockSubtypeOf2(3)
        cache.add(key_2, "test_2")
        self.assertEqual(cache.lookup(key_2, False), "test_2")

        key_3 = MockSubtypeOf2(2)
        self.assertEqual(cache.lookup(key_3, True), "test_2")

        cache.delete(key_2)
        self.assertIsNone(cache.lookup(key_2, False))
        self.assertIsNone(cache.lookup(key_3, True))
    def testMultipleObjectsWeakRefDeletion(self):
        if not function_cache.DELETE_WITH_WEAKREF:
            self.skipTest("Weakref-Based Deletion is disabled")

        dummy_object_1 = DummyClass()
        dummy_object_2 = DummyClass()
        key, deletion_observer = function_cache.make_cache_key(
            (dummy_object_1, dummy_object_2))

        cache = function_cache.FunctionCache()
        cache.add(key, deletion_observer, "testing")
        self.assertEqual(cache.lookup(key, False), "testing")

        del dummy_object_1
        self.assertIsNone(cache.lookup(key, False))

        del dummy_object_2
        self.assertIsNone(cache.lookup(key, False))
    def testDeleteRemovesConcreteFunctions(self):
        cache = function_cache.FunctionCache()
        key_1, deletion_observer_1 = function_cache.make_cache_key(1)
        cache.add(key_1, deletion_observer_1, "test_1")
        self.assertEqual(cache.lookup(key_1, False), "test_1")
        cache.delete(key_1)
        self.assertIsNone(cache.lookup(key_1, False))

        key_2 = function_cache.FunctionCacheKey(MockSubtypeOf2(2), None)
        cache.add(key_2, trace_type.WeakrefDeletionObserver(), "test_2")
        self.assertEqual(cache.lookup(key_2, False), "test_2")

        key_3 = function_cache.FunctionCacheKey(MockSubtypeOf2(3), None)
        self.assertEqual(cache.lookup(key_3, True), "test_2")

        cache.delete(key_2)
        self.assertIsNone(cache.lookup(key_2, False))
        self.assertIsNone(cache.lookup(key_3, True))
Beispiel #10
0
  def benchmarkCacheHit50thKeyUnknownSubtype(self):
    # If there are 50 keys and we get a key that has a subtype in cache but
    # the cache has never observed the key before (no memory for the subtype).

    cache = function_cache.FunctionCache()
    args_per_call = 5
    num_total_checks = 50

    keys = []
    for i in range(num_total_checks - 1):
      args = []
      for j in range(args_per_call):
        args.append(array_ops.zeros([i, j]))
      keys.append(function_cache.make_cache_key(args))

    def setup():
      cache.clear()
      for key in keys:
        cache.add(*key, "testing")
      cache.add(
          function_cache.FunctionCacheKey(MockSubtypeOf2(3), None),
          function_trace_type.WeakrefDeletionObserver(), "testing")

    iterations = 10000
    lookup_key = function_cache.FunctionCacheKey(MockSubtypeOf2(2), None)
    subtyping_time = sum(
        timeit.repeat(
            stmt=lambda: cache.lookup(lookup_key, True),
            setup=setup,
            repeat=iterations,
            number=1))

    self.report_benchmark(
        name="cache_hit_50th_key_unknown_subtype",
        iters=iterations,
        wall_time=subtyping_time,
        metrics=[{
            "name": "cache_hit_50th_key_unknown_subtype_avg_ms",
            "value": subtyping_time / iterations * 1000
        }])
    def benchmarkCacheHit50thKeyEqual(self):
        # If there are 50 keys and we get a new key that is equal to a key that is
        # in the cache.

        cache = function_cache.FunctionCache()
        args_per_call = 5
        num_total_checks = 50

        keys = []
        for i in range(num_total_checks):
            args = []
            for j in range(args_per_call):
                args.append(array_ops.zeros([i, j]))
            keys.append(function_cache.make_cache_key(args))

        for key in keys:
            cache.add(key, "testing")

        iterations = 10000
        subtyping_time = timeit.timeit(lambda: cache.lookup(keys[-1], True),
                                       number=iterations)
        equality_time = timeit.timeit(lambda: cache.lookup(keys[-1], False),
                                      number=iterations)

        self.report_benchmark(
            name="cache_hit_50th_key_equal",
            iters=iterations,
            wall_time=subtyping_time + equality_time,
            metrics=[{
                "name": "cache_hit_50th_key_equal_subtype_avg_ms",
                "value": subtyping_time / iterations * 1000
            }, {
                "name": "cache_hit_50th_key_equal_equality_avg_ms",
                "value": equality_time / iterations * 1000
            }, {
                "name": "cache_hit_50th_key_subtype_over_equality_ratio",
                "value": subtyping_time / equality_time
            }])
Beispiel #12
0
    def benchmarkCacheHit50thKey(self):
        # Since FunctionCache uses an OrderedDict, it will check them in the order
        # of insertion.

        cache = function_cache.FunctionCache()
        args_per_call = 5
        num_total_checks = 50

        keys = []
        for i in range(num_total_checks):
            args = []
            for j in range(args_per_call):
                args.append(array_ops.zeros([i, j]))
            keys.append(function_cache.make_cache_key(args))

        for key in keys:
            cache.add(key, "testing")

        iterations = 10000
        subtyping_time = timeit.timeit(lambda: cache.lookup(keys[-1], True),
                                       number=iterations)
        equality_time = timeit.timeit(lambda: cache.lookup(keys[-1], False),
                                      number=iterations)

        self.report_benchmark(
            name="cache_hit_50th_key_subtype",
            iters=iterations,
            wall_time=subtyping_time + equality_time,
            metrics=[{
                "name": "cache_hit_50th_key_subtype_avg_ms",
                "value": subtyping_time / iterations * 1000
            }, {
                "name": "cache_hit_50th_key_equality_avg_ms",
                "value": equality_time / iterations * 1000
            }, {
                "name": "cache_hit_50th_key_subtype_over_equality_ratio",
                "value": subtyping_time / equality_time
            }])
Beispiel #13
0
 def second(o):
     return function_cache.make_cache_key(o)