def test_backend_context_manager(): all_test_backends = ['test_backend_%d' % i for i in range(3)] for test_backend in all_test_backends: register_parallel_backend(test_backend, FakeParallelBackend) all_backends = ['multiprocessing', 'threading'] + all_test_backends try: assert _active_backend_type() == MultiprocessingBackend # check that this possible to switch parallel backends sequentially for test_backend in all_backends: # TODO: parametrize this block later # yield check_backend_context_manager, test_backend check_backend_context_manager(test_backend) # The default backend is retored assert _active_backend_type() == MultiprocessingBackend # Check that context manager switching is thread safe: Parallel(n_jobs=2, backend='threading')(delayed(check_backend_context_manager)(b) for b in all_backends if not b) # The default backend is again retored assert _active_backend_type() == MultiprocessingBackend finally: for backend_name in list(BACKENDS.keys()): if backend_name.startswith('test_'): del BACKENDS[backend_name]
def test_backend_context_manager(): all_test_backends = ["test_backend_%d" % i for i in range(3)] for test_backend in all_test_backends: register_parallel_backend(test_backend, FakeParallelBackend) all_backends = ["multiprocessing", "threading"] + all_test_backends try: assert_equal(_active_backend_type(), MultiprocessingBackend) # check that this possible to switch parallel backends sequentially for test_backend in all_backends: yield check_backend_context_manager, test_backend # The default backend is retored assert_equal(_active_backend_type(), MultiprocessingBackend) # Check that context manager switching is thread safe: Parallel(n_jobs=2, backend="threading")( delayed(check_backend_context_manager)(b) for b in all_backends if not b ) # The default backend is again retored assert_equal(_active_backend_type(), MultiprocessingBackend) finally: for backend_name in list(BACKENDS.keys()): if backend_name.startswith("test_"): del BACKENDS[backend_name]
def test_backend_context_manager(): all_test_backends = ['test_backend_%d' % i for i in range(5)] for test_backend in all_test_backends: register_parallel_backend(test_backend, MyParallelBackend) try: assert_equal(parallel.get_default_backend(), 'multiprocessing') # check that this possible to switch parallel backens sequentially for test_backend in all_test_backends: check_backend_context_manager(test_backend) # The default backend is retored assert_equal(parallel.get_default_backend(), 'multiprocessing') # Check that context manager switching is thread safe: Parallel(n_jobs=2, backend='threading')(delayed(check_backend_context_manager)(b) for b in all_test_backends) # The default backend is again retored assert_equal(parallel.get_default_backend(), 'multiprocessing') finally: for backend_name in list(BACKENDS.keys()): if backend_name.startswith('test_'): del BACKENDS[backend_name]
def test_backend_context_manager(): all_test_backends = ['test_backend_%d' % i for i in range(3)] for test_backend in all_test_backends: register_parallel_backend(test_backend, FakeParallelBackend) all_backends = ['multiprocessing', 'threading'] + all_test_backends try: assert _active_backend_type() == MultiprocessingBackend # check that this possible to switch parallel backends sequentially for test_backend in all_backends: # TODO: parametrize this block later # yield check_backend_context_manager, test_backend check_backend_context_manager(test_backend) # The default backend is retored assert _active_backend_type() == MultiprocessingBackend # Check that context manager switching is thread safe: Parallel(n_jobs=2, backend='threading')( delayed(check_backend_context_manager)(b) for b in all_backends if not b) # The default backend is again retored assert _active_backend_type() == MultiprocessingBackend finally: for backend_name in list(BACKENDS.keys()): if backend_name.startswith('test_'): del BACKENDS[backend_name]
from joblib._parallel_backends import SequentialBackend from joblib._parallel_backends import ThreadingBackend from joblib._parallel_backends import MultiprocessingBackend from joblib._parallel_backends import ParallelBackendBase from joblib._parallel_backends import LokyBackend from joblib._parallel_backends import SafeFunction from joblib._parallel_backends import WorkerInterrupt from joblib.parallel import Parallel, delayed from joblib.parallel import register_parallel_backend, parallel_backend from joblib.parallel import effective_n_jobs, cpu_count from joblib.parallel import mp, BACKENDS, DEFAULT_BACKEND, EXTERNAL_BACKENDS from joblib.my_exceptions import JoblibException ALL_VALID_BACKENDS = [None] + sorted(BACKENDS.keys()) # Add instances of backend classes deriving from ParallelBackendBase ALL_VALID_BACKENDS += [BACKENDS[backend_str]() for backend_str in BACKENDS] PROCESS_BACKENDS = ['multiprocessing', 'loky'] PARALLEL_BACKENDS = PROCESS_BACKENDS + ['threading'] if hasattr(mp, 'get_context'): # Custom multiprocessing context in Python 3.4+ ALL_VALID_BACKENDS.append(mp.get_context('spawn')) DefaultBackend = BACKENDS[DEFAULT_BACKEND] def get_workers(backend): return getattr(backend, '_pool', getattr(backend, '_workers', None))
from joblib._parallel_backends import ThreadingBackend from joblib._parallel_backends import MultiprocessingBackend from joblib._parallel_backends import SafeFunction from joblib._parallel_backends import WorkerInterrupt from joblib.parallel import Parallel, delayed from joblib.parallel import register_parallel_backend, parallel_backend from joblib.parallel import mp, cpu_count, BACKENDS, effective_n_jobs from joblib.my_exceptions import JoblibException import nose from nose.tools import assert_equal, assert_true, assert_false, assert_raises ALL_VALID_BACKENDS = [None] + sorted(BACKENDS.keys()) if hasattr(mp, "get_context"): # Custom multiprocessing context in Python 3.4+ ALL_VALID_BACKENDS.append(mp.get_context("spawn")) def division(x, y): return x / y def square(x): return x ** 2 class MyExceptionWithFinickyInit(Exception):