def test_cache_compute_dict_speed(self): f = fungraph.fun(_slow_func, 1, 2) cache = MemoryCache() t1 = timeonce(lambda: f.cachedcompute(cache=cache)) t2 = timeonce(lambda: f.cachedcompute(cache=cache)) self.assertGreater(t1, 0.5) self.assertLess(t2, 0.5)
def test_cache(self): cachedir = tempfile.mkdtemp() node = fungraph.fun(_slow_identity, 5, waitseconds=1) f = lambda: node.cachedcompute(cache=cachedir) t1 = timeonce(f) t2 = timeonce(f) self.assertGreater(t1, 0.5) self.assertLess(t2, 0.5)
def test_cache_new_object(self): cachedir = tempfile.mkdtemp() node1 = fungraph.fun(_slow_identity, 5, waitseconds=1) f1 = lambda: node1.cachedcompute(cache=cachedir) t1 = timeonce(f1) node2 = fungraph.fun(_slow_identity, 5, waitseconds=1) f2 = lambda: node2.cachedcompute(cache=cachedir) t2 = timeonce(f2) self.assertGreater(t1, 0.5) self.assertLess(t2, 0.5)
def test_paralell(self): cluster = dask.distributed.LocalCluster( n_workers=8, processes=True, threads_per_worker=1, memory_limit="auto" ) with dask.distributed.Client(address=cluster): def slowfunc(loc): time.sleep(1) return random.gauss(loc, 1.0) N = 8 args = [fungraph.fun(slowfunc, random.uniform(0.0, 1.0)) for _ in range(N)] jobs = fungraph.fun(lambda *args: sum(args), *args) t1 = timeonce(lambda: slowfunc(1.0)) tn = timeonce(jobs.cachedcompute) self.assertLess(tn, (t1 * N) / 2.0)
def test_clone_reuses_cache(self): cachedir = tempfile.mkdtemp() node = fungraph.fun( operator.add, fungraph.fun(_slow_identity, 2, waitseconds=1), fungraph.fun(_slow_identity, 3, waitseconds=1), ) clone = node.clone() nodefun = lambda: node.cachedcompute(cache=cachedir) clonefun = lambda: clone.cachedcompute(cache=cachedir) tn1 = timeonce(nodefun) tn2 = timeonce(nodefun) tc1 = timeonce(clonefun) self.assertGreater(tn1, 0.5) self.assertLess(tn2, 0.5) self.assertLess(tc1, 0.5)
def test_paralell_uses_cache(self): cluster = dask.distributed.LocalCluster( n_workers=8, processes=True, threads_per_worker=1, memory_limit="auto" ) with dask.distributed.Client(address=cluster): def slowfunc(loc): time.sleep(1) return random.gauss(loc, 1.0) N = 8 args = [fungraph.fun(slowfunc, random.uniform(0.0, 1.0)) for _ in range(N)] jobs = fungraph.fun(lambda *args: sum(args), *args) with TemporaryDirectory() as d: with fungraph.cachecontext(d): t1 = timeonce(jobs.compute) t2 = timeonce(jobs.compute) self.assertGreater(t1, 1.0) self.assertLess(t2, 0.5)
def _timenodeonce(node, cachedir): return timeonce(lambda: node.cachedcompute(cache=cachedir))
def test_nocache_compute_none(self): f = fungraph.fun(_slow_func, 1, 2) t1 = timeonce(lambda: f.compute()) t2 = timeonce(lambda: f.compute()) self.assertAlmostEqual(t1, t2, delta=0.1)