def test_no_blas_crash_or_freeze_with_multiprocessing(): if sys.version_info < (3, 4): raise nose.SkipTest("multiprocessing can cause BLAS freeze on" " old Python") # Use the spawn backend that is both robust and available on all platforms spawn_backend = mp.get_context("spawn") # Check that on recent Python version, the 'spawn' start method can make # it possible to use multiprocessing in conjunction of any BLAS # implementation that happens to be used by numpy with causing a freeze or # a crash rng = np.random.RandomState(42) # call BLAS DGEMM to force the initialization of the internal thread-pool # in the main process a = rng.randn(1000, 1000) np.dot(a, a.T) # check that the internal BLAS thread-pool is not in an inconsistent state # in the worker processes managed by multiprocessing Parallel(n_jobs=2, backend=spawn_backend)(delayed(np.dot)(a, a.T) for i in range(2))
def test_no_blas_crash_or_freeze_with_multiprocessing(): if sys.version_info < (3, 4): raise SkipTest('multiprocessing can cause BLAS freeze on old Python') # Use the spawn backend that is both robust and available on all platforms spawn_backend = mp.get_context('spawn') # Check that on recent Python version, the 'spawn' start method can make # it possible to use multiprocessing in conjunction of any BLAS # implementation that happens to be used by numpy with causing a freeze or # a crash rng = np.random.RandomState(42) # call BLAS DGEMM to force the initialization of the internal thread-pool # in the main process a = rng.randn(1000, 1000) np.dot(a, a.T) # check that the internal BLAS thread-pool is not in an inconsistent state # in the worker processes managed by multiprocessing Parallel(n_jobs=2, backend=spawn_backend)(delayed(np.dot)(a, a.T) for i in range(2))
from joblib.parallel import Parallel, delayed from joblib.parallel import register_parallel_backend, parallel_backend from joblib.parallel import effective_n_jobs, cpu_count from joblib.parallel import mp, BACKENDS, DEFAULT_BACKEND, EXTERNAL_BACKENDS from joblib.my_exceptions import JoblibException ALL_VALID_BACKENDS = [None] + sorted(BACKENDS.keys()) # Add instances of backend classes deriving from ParallelBackendBase ALL_VALID_BACKENDS += [BACKENDS[backend_str]() for backend_str in BACKENDS] PROCESS_BACKENDS = ['multiprocessing', 'loky'] PARALLEL_BACKENDS = PROCESS_BACKENDS + ['threading'] if hasattr(mp, 'get_context'): # Custom multiprocessing context in Python 3.4+ ALL_VALID_BACKENDS.append(mp.get_context('spawn')) DefaultBackend = BACKENDS[DEFAULT_BACKEND] def get_workers(backend): return getattr(backend, '_pool', getattr(backend, '_workers', None)) def division(x, y): return x / y def square(x): return x**2
from Queue import Queue from joblib.parallel import Parallel, delayed, SafeFunction, WorkerInterrupt from joblib.parallel import mp, cpu_count, VALID_BACKENDS from joblib.my_exceptions import JoblibException import nose from nose.tools import assert_equal, assert_true, assert_false, assert_raises ALL_VALID_BACKENDS = [None] + VALID_BACKENDS if hasattr(mp, "get_context"): # Custom multiprocessing context in Python 3.4+ ALL_VALID_BACKENDS.append(mp.get_context("spawn")) def division(x, y): return x / y def square(x): return x ** 2 def exception_raiser(x): if x == 7: raise ValueError return x
# Backward compat from Queue import Queue from joblib.parallel import Parallel, delayed, SafeFunction, WorkerInterrupt, \ mp, cpu_count, VALID_BACKENDS from joblib.my_exceptions import JoblibException import nose ALL_VALID_BACKENDS = [None] + VALID_BACKENDS if hasattr(mp, 'get_context'): # Custom multiprocessing context in Python 3.4+ ALL_VALID_BACKENDS.append(mp.get_context('spawn')) ############################################################################### def division(x, y): return x / y def square(x): return x ** 2 def exception_raiser(x): if x == 7: raise ValueError return x