def test_spawned_children_exception(self): vids = list(range(2)) rt = Runtime(get_spawned_process_pool_factory()) _, vids_with_err = rt.run(TestRuntime.query_that_throws_at_0, vids, print_error=False) self.assertEqual([0], vids_with_err)
def test_iterator_error(self): vids = list(range(2)) rt = Runtime(get_spawned_process_pool_factory()) gen = rt.get_result_iterator(TestRuntime.query_that_throws_at_0, vids, print_error=False) result = next(gen) self.assertIntervalSetEq(result, TestRuntime.query_that_throws_at_0([1])) with self.assertRaises(RekallRuntimeException): next(gen)
def get_runtime_for_script(num_workers=mp.cpu_count()): """Returns a parallel runtime that is safe for multithreaded programs. Notes: This uses SpawnedProcessPool and it does not work in a Jupyter Notebook. It will hang in Jupyter. """ return Runtime(get_worker_pool_factory_for_script(num_workers))
def get_runtime_for_jupyter(num_workers=mp.cpu_count()): """Returns a parallel runtime that works in Jupyter Notebook. Notes: This uses ForkedProcessPool so the usual caveats of forking a multithreaded program apply. """ return Runtime(get_worker_pool_factory_for_jupyter(num_workers))
def test_exception_inline(self): vids = list(range(2)) rt = Runtime.inline() _, vids_with_err = rt.run(TestRuntime.query_that_throws_at_0, vids, print_error=False) self.assertEqual([0], vids_with_err)
def get_runtime_for_ipython_cluster(client): """Returns a runtime using an ipython cluster as worker processes. This is the best option in a Jupyter Notebook. """ return Runtime(get_worker_pool_factory_for_ipython_cluster(client))
def test_returning_intervalset(self): vids = list(range(1, 101)) rt = Runtime(get_spawned_process_pool_factory()) answer, _ = rt.run(TestRuntime.query_that_throws_at_0, vids) self.assertIntervalSetEq(answer, TestRuntime.query_that_throws_at_0(vids))
def test_spawned_children(self): vids = list(range(10)) rt = Runtime(get_spawned_process_pool_factory()) self.assertCollectionEq( rt.run(TestRuntime.query, vids, chunksize=3)[0], TestRuntime.query(vids))
def test_single_process_runtime(self): vids = list(range(1000)) rt = Runtime.inline() self.assertCollectionEq( rt.run(TestRuntime.query, vids)[0], TestRuntime.query(vids))
def test_all_tasks_fail(self): vids = list(range(1)) rt = Runtime.inline() with self.assertRaises(RekallRuntimeException): rt.run(TestRuntime.query_that_throws_at_0, vids, print_error=False)
def test_inline_iterator(self): vids = list(range(1000)) rt = Runtime.inline() gen = rt.get_result_iterator(TestRuntime.query, vids, randomize=True) for result in gen: self.assertCollectionEq(result, TestRuntime.query(result.keys()))
def test_iterator(self): vids = list(range(1000)) rt = Runtime(get_forked_process_pool_factory(5)) gen = rt.get_result_iterator(TestRuntime.query, vids, randomize=False) for vid, result in zip(vids, gen): self.assertCollectionEq(result, TestRuntime.query([vid]))