コード例 #1
0
def test_nested_exception_dispatch():
    # Ensure TransportableException objects for nested joblib cases gets
    # propagated.
    assert_raises(JoblibException,
                  Parallel(n_jobs=2, pre_dispatch=16, verbose=0),
                  (delayed(SafeFunction(exception_raiser))(i)
                   for i in range(30)))
コード例 #2
0
ファイル: test_disk.py プロジェクト: allankevinrichie/joblib
def test_mkdirp(tmpdir):
    mkdirp(os.path.join(tmpdir.strpath, 'ham'))
    mkdirp(os.path.join(tmpdir.strpath, 'ham'))
    mkdirp(os.path.join(tmpdir.strpath, 'spam', 'spam'))

    # Not all OSErrors are ignored
    assert_raises(OSError, mkdirp, '')
コード例 #3
0
def test_nested_exception_dispatch():
    # Ensure TransportableException objects for nested joblib cases gets
    # propagated.
    assert_raises(
        JoblibException,
        Parallel(n_jobs=2, pre_dispatch=16, verbose=0),
        (delayed(SafeFunction(exception_raiser))(i) for i in range(30)))
コード例 #4
0
def test_multiple_spawning():
    # Test that attempting to launch a new Python after spawned
    # subprocesses will raise an error, to avoid infinite loops on
    # systems that do not support fork
    if not int(os.environ.get('JOBLIB_MULTIPROCESSING', 1)):
        raise SkipTest()
    assert_raises(ImportError, Parallel(n_jobs=2, pre_dispatch='all'),
                  [delayed(_reload_joblib)() for i in range(10)])
コード例 #5
0
def test_multiple_spawning():
    # Test that attempting to launch a new Python after spawned
    # subprocesses will raise an error, to avoid infinite loops on
    # systems that do not support fork
    if not int(os.environ.get('JOBLIB_MULTIPROCESSING', 1)):
        raise SkipTest()
    assert_raises(ImportError, Parallel(n_jobs=2, pre_dispatch='all'),
                  [delayed(_reload_joblib)() for i in range(10)])
コード例 #6
0
def test_filter_args_2():
    assert (filter_args(j, [], (1, 2), dict(ee=2)) ==
            {'x': 1, 'y': 2, '**': {'ee': 2}})

    assert_raises(ValueError, filter_args, f, 'a', (None, ))
    # Check that we capture an undefined argument
    assert_raises(ValueError, filter_args, f, ['a'], (None, ))
    ff = functools.partial(f, 1)
    # filter_args has to special-case partial
    assert filter_args(ff, [], (1, )) == {'*': [1], '**': {}}
    assert filter_args(ff, ['y'], (1, )) == {'*': [1], '**': {}}
コード例 #7
0
def test_pool_with_memmap(tmpdir_path):
    """Check that subprocess can access and update shared memory memmap"""
    assert_array_equal = np.testing.assert_array_equal

    # Fork the subprocess before allocating the objects to be passed
    pool_temp_folder = os.path.join(tmpdir_path, 'pool')
    os.makedirs(pool_temp_folder)
    p = MemmapingPool(10, max_nbytes=2, temp_folder=pool_temp_folder)
    try:
        filename = os.path.join(tmpdir_path, 'test.mmap')
        a = np.memmap(filename, dtype=np.float32, shape=(3, 5), mode='w+')
        a.fill(1.0)

        p.map(inplace_double, [(a, (i, j), 1.0) for i in range(a.shape[0])
                               for j in range(a.shape[1])])

        assert_array_equal(a, 2 * np.ones(a.shape))

        # Open a copy-on-write view on the previous data
        b = np.memmap(filename, dtype=np.float32, shape=(5, 3), mode='c')

        p.map(inplace_double, [(b, (i, j), 2.0) for i in range(b.shape[0])
                               for j in range(b.shape[1])])

        # Passing memmap instances to the pool should not trigger the creation
        # of new files on the FS
        assert os.listdir(pool_temp_folder) == []

        # the original data is untouched
        assert_array_equal(a, 2 * np.ones(a.shape))
        assert_array_equal(b, 2 * np.ones(b.shape))

        # readonly maps can be read but not updated
        c = np.memmap(filename,
                      dtype=np.float32,
                      shape=(10, ),
                      mode='r',
                      offset=5 * 4)

        assert_raises(AssertionError, p.map, check_array,
                      [(c, i, 3.0) for i in range(c.shape[0])])

        # depending on the version of numpy one can either get a RuntimeError
        # or a ValueError
        assert_raises((RuntimeError, ValueError), p.map, inplace_double,
                      [(c, i, 2.0) for i in range(c.shape[0])])
    finally:
        # Clean all filehandlers held by the pool
        p.terminate()
        del p
コード例 #8
0
def test_pool_with_memmap(tmpdir_path):
    """Check that subprocess can access and update shared memory memmap"""
    assert_array_equal = np.testing.assert_array_equal

    # Fork the subprocess before allocating the objects to be passed
    pool_temp_folder = os.path.join(tmpdir_path, 'pool')
    os.makedirs(pool_temp_folder)
    p = MemmapingPool(10, max_nbytes=2, temp_folder=pool_temp_folder)
    try:
        filename = os.path.join(tmpdir_path, 'test.mmap')
        a = np.memmap(filename, dtype=np.float32, shape=(3, 5), mode='w+')
        a.fill(1.0)

        p.map(inplace_double, [(a, (i, j), 1.0)
                               for i in range(a.shape[0])
                               for j in range(a.shape[1])])

        assert_array_equal(a, 2 * np.ones(a.shape))

        # Open a copy-on-write view on the previous data
        b = np.memmap(filename, dtype=np.float32, shape=(5, 3), mode='c')

        p.map(inplace_double, [(b, (i, j), 2.0)
                               for i in range(b.shape[0])
                               for j in range(b.shape[1])])

        # Passing memmap instances to the pool should not trigger the creation
        # of new files on the FS
        assert os.listdir(pool_temp_folder) == []

        # the original data is untouched
        assert_array_equal(a, 2 * np.ones(a.shape))
        assert_array_equal(b, 2 * np.ones(b.shape))

        # readonly maps can be read but not updated
        c = np.memmap(filename, dtype=np.float32, shape=(10,), mode='r',
                      offset=5 * 4)

        assert_raises(AssertionError, p.map, check_array,
                      [(c, i, 3.0) for i in range(c.shape[0])])

        # depending on the version of numpy one can either get a RuntimeError
        # or a ValueError
        assert_raises((RuntimeError, ValueError), p.map, inplace_double,
                      [(c, i, 2.0) for i in range(c.shape[0])])
    finally:
        # Clean all filehandlers held by the pool
        p.terminate()
        del p
コード例 #9
0
def test_parallel_pickling():
    """ Check that pmap captures the errors when it is passed an object
        that cannot be pickled.
    """
    def g(x):
        return x ** 2

    try:
        # pickling a local function always fail but the exception
        # raised is a PickleError for python <= 3.4 and AttributeError
        # for python >= 3.5
        pickle.dumps(g)
    except Exception as exc:
        exception_class = exc.__class__

    assert_raises(exception_class, Parallel(),
                  (delayed(g)(x) for x in range(10)))
コード例 #10
0
def test_parallel_pickling():
    """ Check that pmap captures the errors when it is passed an object
        that cannot be pickled.
    """
    def g(x):
        return x**2

    try:
        # pickling a local function always fail but the exception
        # raised is a PickleError for python <= 3.4 and AttributeError
        # for python >= 3.5
        pickle.dumps(g)
    except Exception as exc:
        exception_class = exc.__class__

    assert_raises(exception_class, Parallel(),
                  (delayed(g)(x) for x in range(10)))
コード例 #11
0
def test_call_and_shelve():
    """Test MemorizedFunc outputting a reference to cache.
    """

    for func, Result in zip((
            MemorizedFunc(f, env['dir']),
            NotMemorizedFunc(f),
            Memory(cachedir=env['dir']).cache(f),
            Memory(cachedir=None).cache(f),
    ), (MemorizedResult, NotMemorizedResult, MemorizedResult,
            NotMemorizedResult)):
        assert func(2) == 5
        result = func.call_and_shelve(2)
        assert isinstance(result, Result)
        assert result.get() == 5

        result.clear()
        assert_raises(KeyError, result.get)
        result.clear()  # Do nothing if there is no cache.
コード例 #12
0
def _check_pickle(filename, expected_list):
    """Helper function to test joblib pickle content.

    Note: currently only pickles containing an iterable are supported
    by this function.
    """
    if (not PY3_OR_LATER and (filename.endswith('.xz') or
                              filename.endswith('.lzma'))):
        # lzma is not supported for python versions < 3.3
        assert_raises(NotImplementedError, numpy_pickle.load, filename)
        return

    version_match = re.match(r'.+py(\d)(\d).+', filename)
    py_version_used_for_writing = int(version_match.group(1))
    py_version_used_for_reading = sys.version_info[0]

    py_version_to_default_pickle_protocol = {2: 2, 3: 3}
    pickle_reading_protocol = py_version_to_default_pickle_protocol.get(
        py_version_used_for_reading, 4)
    pickle_writing_protocol = py_version_to_default_pickle_protocol.get(
        py_version_used_for_writing, 4)
    if pickle_reading_protocol >= pickle_writing_protocol:
        try:
            with warnings.catch_warnings(record=True) as caught_warnings:
                warnings.simplefilter('always')
                warnings.filterwarnings(
                    'ignore', module='numpy',
                    message='The compiler package is deprecated')
                result_list = numpy_pickle.load(filename)
                expected_nb_warnings = 1 if ("0.9" in filename or
                                             "0.8.4" in filename) else 0
                assert len(caught_warnings) == expected_nb_warnings
            for warn in caught_warnings:
                assert warn.category == DeprecationWarning
                assert (warn.message.args[0] ==
                        "The file '{0}' has been generated with a joblib "
                        "version less than 0.10. Please regenerate this "
                        "pickle file.".format(filename))
            for result, expected in zip(result_list, expected_list):
                if isinstance(expected, np.ndarray):
                    assert result.dtype == expected.dtype
                    np.testing.assert_equal(result, expected)
                else:
                    assert result == expected
        except Exception as exc:
            # When trying to read with python 3 a pickle generated
            # with python 2 we expect a user-friendly error
            if (py_version_used_for_reading == 3 and
                    py_version_used_for_writing == 2):
                assert isinstance(exc, ValueError)
                message = ('You may be trying to read with '
                           'python 3 a joblib pickle generated with python 2.')
                assert message in str(exc)
            else:
                raise
    else:
        # Pickle protocol used for writing is too high. We expect a
        # "unsupported pickle protocol" error message
        try:
            numpy_pickle.load(filename)
            raise AssertionError('Numpy pickle loading should '
                                 'have raised a ValueError exception')
        except ValueError as e:
            message = 'unsupported pickle protocol: {0}'.format(
                pickle_writing_protocol)
            assert message in str(e.args)
コード例 #13
0
def test_value_error():
    # Test inverting the input arguments to dump
    assert_raises(ValueError, numpy_pickle.dump, 'foo', dict())
コード例 #14
0
def test_check_subprocess_call_wrong_command():
    wrong_command = '_a_command_that_does_not_exist_'
    assert_raises(OSError,
                  check_subprocess_call,
                  [wrong_command])
コード例 #15
0
def test_value_error():
    # Test inverting the input arguments to dump
    assert_raises(ValueError, numpy_pickle.dump, 'foo', dict())
コード例 #16
0
def test_exception_dispatch():
    "Make sure that exception raised during dispatch are indeed captured"
    assert_raises(ValueError, Parallel(n_jobs=2, pre_dispatch=16, verbose=0),
                  (delayed(exception_raiser)(i) for i in range(30)))
コード例 #17
0
def test_parallel_timeout_fail():
    # Check that timeout properly fails when function is too slow
    for backend in ['multiprocessing', 'threading']:
        assert_raises(TimeoutError,
                      Parallel(n_jobs=2, backend=backend, timeout=0.01),
                      (delayed(sleep)(10) for x in range(10)))
コード例 #18
0
def test_exception_dispatch():
    "Make sure that exception raised during dispatch are indeed captured"
    assert_raises(
        ValueError,
        Parallel(n_jobs=2, pre_dispatch=16, verbose=0),
        (delayed(exception_raiser)(i) for i in range(30)))
コード例 #19
0
def test_error_capture():
    # Check that error are captured, and that correct exceptions
    # are raised.
    if mp is not None:
        # A JoblibException will be raised only if there is indeed
        # multiprocessing
        assert_raises(JoblibException, Parallel(n_jobs=2),
                      [delayed(division)(x, y)
                       for x, y in zip((0, 1), (1, 0))])
        assert_raises(WorkerInterrupt, Parallel(n_jobs=2),
                      [delayed(interrupt_raiser)(x) for x in (1, 0)])

        # Try again with the context manager API
        with Parallel(n_jobs=2) as parallel:
            assert parallel._backend._pool is not None
            original_pool = parallel._backend._pool

            assert_raises(JoblibException, parallel,
                          [delayed(division)(x, y)
                           for x, y in zip((0, 1), (1, 0))])

            # The managed pool should still be available and be in a working
            # state despite the previously raised (and caught) exception
            assert parallel._backend._pool is not None

            # The pool should have been interrupted and restarted:
            assert parallel._backend._pool is not original_pool

            assert ([f(x, y=1) for x in range(10)] ==
                    parallel(delayed(f)(x, y=1) for x in range(10)))

            original_pool = parallel._backend._pool
            assert_raises(WorkerInterrupt, parallel,
                          [delayed(interrupt_raiser)(x) for x in (1, 0)])

            # The pool should still be available despite the exception
            assert parallel._backend._pool is not None

            # The pool should have been interrupted and restarted:
            assert parallel._backend._pool is not original_pool

            assert ([f(x, y=1) for x in range(10)] ==
                    parallel(delayed(f)(x, y=1) for x in range(10)))

        # Check that the inner pool has been terminated when exiting the
        # context manager
        assert parallel._backend._pool is None
    else:
        assert_raises(KeyboardInterrupt, Parallel(n_jobs=2),
                      [delayed(interrupt_raiser)(x) for x in (1, 0)])

    # wrapped exceptions should inherit from the class of the original
    # exception to make it easy to catch them
    assert_raises(ZeroDivisionError, Parallel(n_jobs=2),
                  [delayed(division)(x, y) for x, y in zip((0, 1), (1, 0))])

    assert_raises(
        MyExceptionWithFinickyInit,
        Parallel(n_jobs=2, verbose=0),
        (delayed(exception_raiser)(i, custom_exception=True)
         for i in range(30)))

    try:
        # JoblibException wrapping is disabled in sequential mode:
        ex = JoblibException()
        Parallel(n_jobs=1)(
            delayed(division)(x, y) for x, y in zip((0, 1), (1, 0)))
    except Exception as ex:
        assert not isinstance(ex, JoblibException)
コード例 #20
0
def test_parallel_timeout_fail():
    # Check that timeout properly fails when function is too slow
    for backend in ['multiprocessing', 'threading']:
        assert_raises(TimeoutError,
                      Parallel(n_jobs=2, backend=backend, timeout=0.01),
                      (delayed(sleep)(10) for x in range(10)))
コード例 #21
0
def test_binary_zlibfile():
    filename = env['filename'] + str(random.randint(0, 1000))

    # Test bad compression levels
    for bad_value in (-1, 10, 15, 'a', (), {}):
        assert_raises(ValueError, BinaryZlibFile, filename, 'wb',
                      compresslevel=bad_value)

    # Test invalid modes
    for bad_mode in ('a', 'x', 'r', 'w', 1, 2):
        assert_raises(ValueError, BinaryZlibFile, filename, bad_mode)

    # Test wrong filename type (not a string or a file)
    for bad_file in (1, (), {}):
        assert_raises(TypeError, BinaryZlibFile, bad_file, 'rb')

    for d in (b'a little data as bytes.',
              # More bytes
              10000 * "{}"
              .format(random.randint(0, 1000) * 1000).encode('latin-1')):
        # Regular cases
        for compress_level in (1, 3, 9):
            with open(filename, 'wb') as f:
                with BinaryZlibFile(f, 'wb',
                                    compresslevel=compress_level) as fz:
                    assert fz.writable()
                    fz.write(d)
                    assert fz.fileno() == f.fileno()
                    assert_raises(io.UnsupportedOperation, fz._check_can_read)
                    assert_raises(io.UnsupportedOperation, fz._check_can_seek)
                assert fz.closed
                assert_raises(ValueError, fz._check_not_closed)

            with open(filename, 'rb') as f:
                with BinaryZlibFile(f) as fz:
                    assert fz.readable()
                    if PY3_OR_LATER:
                        assert fz.seekable()
                    assert fz.fileno() == f.fileno()
                    assert fz.read() == d
                    assert_raises(io.UnsupportedOperation,
                                  fz._check_can_write)
                    if PY3_OR_LATER:
                        # io.BufferedIOBase doesn't have seekable() method in
                        # python 2
                        assert fz.seekable()
                        fz.seek(0)
                        assert fz.tell() == 0
                assert fz.closed

            os.remove(filename)

            # Test with a filename as input
            with BinaryZlibFile(filename, 'wb',
                                compresslevel=compress_level) as fz:
                assert fz.writable()
                fz.write(d)

            with BinaryZlibFile(filename, 'rb') as fz:
                assert fz.read() == d
                assert fz.seekable()

            # Test without context manager
            fz = BinaryZlibFile(filename, 'wb', compresslevel=compress_level)
            assert fz.writable()
            fz.write(d)
            fz.close()

            fz = BinaryZlibFile(filename, 'rb')
            assert fz.read() == d
            fz.close()
コード例 #22
0
def test_binary_zlibfile():
    filename = env['filename'] + str(random.randint(0, 1000))

    # Test bad compression levels
    for bad_value in (-1, 10, 15, 'a', (), {}):
        assert_raises(ValueError,
                      BinaryZlibFile,
                      filename,
                      'wb',
                      compresslevel=bad_value)

    # Test invalid modes
    for bad_mode in ('a', 'x', 'r', 'w', 1, 2):
        assert_raises(ValueError, BinaryZlibFile, filename, bad_mode)

    # Test wrong filename type (not a string or a file)
    for bad_file in (1, (), {}):
        assert_raises(TypeError, BinaryZlibFile, bad_file, 'rb')

    for d in (
            b'a little data as bytes.',
            # More bytes
            10000 *
            "{}".format(random.randint(0, 1000) * 1000).encode('latin-1')):
        # Regular cases
        for compress_level in (1, 3, 9):
            with open(filename, 'wb') as f:
                with BinaryZlibFile(f, 'wb',
                                    compresslevel=compress_level) as fz:
                    assert fz.writable()
                    fz.write(d)
                    assert fz.fileno() == f.fileno()
                    assert_raises(io.UnsupportedOperation, fz._check_can_read)
                    assert_raises(io.UnsupportedOperation, fz._check_can_seek)
                assert fz.closed
                assert_raises(ValueError, fz._check_not_closed)

            with open(filename, 'rb') as f:
                with BinaryZlibFile(f) as fz:
                    assert fz.readable()
                    if PY3_OR_LATER:
                        assert fz.seekable()
                    assert fz.fileno() == f.fileno()
                    assert fz.read() == d
                    assert_raises(io.UnsupportedOperation, fz._check_can_write)
                    if PY3_OR_LATER:
                        # io.BufferedIOBase doesn't have seekable() method in
                        # python 2
                        assert fz.seekable()
                        fz.seek(0)
                        assert fz.tell() == 0
                assert fz.closed

            os.remove(filename)

            # Test with a filename as input
            with BinaryZlibFile(filename, 'wb',
                                compresslevel=compress_level) as fz:
                assert fz.writable()
                fz.write(d)

            with BinaryZlibFile(filename, 'rb') as fz:
                assert fz.read() == d
                assert fz.seekable()

            # Test without context manager
            fz = BinaryZlibFile(filename, 'wb', compresslevel=compress_level)
            assert fz.writable()
            fz.write(d)
            fz.close()

            fz = BinaryZlibFile(filename, 'rb')
            assert fz.read() == d
            fz.close()
コード例 #23
0
def test_filter_args_error_msg():
    """ Make sure that filter_args returns decent error messages, for the
        sake of the user.
    """
    assert_raises(ValueError, filter_args, f, [])
コード例 #24
0
def test_error_capture():
    # Check that error are captured, and that correct exceptions
    # are raised.
    if mp is not None:
        # A JoblibException will be raised only if there is indeed
        # multiprocessing
        assert_raises(
            JoblibException, Parallel(n_jobs=2),
            [delayed(division)(x, y) for x, y in zip((0, 1), (1, 0))])
        assert_raises(WorkerInterrupt, Parallel(n_jobs=2),
                      [delayed(interrupt_raiser)(x) for x in (1, 0)])

        # Try again with the context manager API
        with Parallel(n_jobs=2) as parallel:
            assert parallel._backend._pool is not None
            original_pool = parallel._backend._pool

            assert_raises(
                JoblibException, parallel,
                [delayed(division)(x, y) for x, y in zip((0, 1), (1, 0))])

            # The managed pool should still be available and be in a working
            # state despite the previously raised (and caught) exception
            assert parallel._backend._pool is not None

            # The pool should have been interrupted and restarted:
            assert parallel._backend._pool is not original_pool

            assert ([f(x, y=1) for x in range(10)
                     ] == parallel(delayed(f)(x, y=1) for x in range(10)))

            original_pool = parallel._backend._pool
            assert_raises(WorkerInterrupt, parallel,
                          [delayed(interrupt_raiser)(x) for x in (1, 0)])

            # The pool should still be available despite the exception
            assert parallel._backend._pool is not None

            # The pool should have been interrupted and restarted:
            assert parallel._backend._pool is not original_pool

            assert ([f(x, y=1) for x in range(10)
                     ] == parallel(delayed(f)(x, y=1) for x in range(10)))

        # Check that the inner pool has been terminated when exiting the
        # context manager
        assert parallel._backend._pool is None
    else:
        assert_raises(KeyboardInterrupt, Parallel(n_jobs=2),
                      [delayed(interrupt_raiser)(x) for x in (1, 0)])

    # wrapped exceptions should inherit from the class of the original
    # exception to make it easy to catch them
    assert_raises(ZeroDivisionError, Parallel(n_jobs=2),
                  [delayed(division)(x, y) for x, y in zip((0, 1), (1, 0))])

    assert_raises(MyExceptionWithFinickyInit, Parallel(n_jobs=2, verbose=0),
                  (delayed(exception_raiser)(i, custom_exception=True)
                   for i in range(30)))

    try:
        # JoblibException wrapping is disabled in sequential mode:
        ex = JoblibException()
        Parallel(n_jobs=1)(delayed(division)(x, y)
                           for x, y in zip((0, 1), (1, 0)))
    except Exception as ex:
        assert not isinstance(ex, JoblibException)
コード例 #25
0
def test_safe_function():
    safe_division = SafeFunction(division)
    assert_raises(JoblibException, safe_division, 1, 0)
コード例 #26
0
def test_invalid_backend():
    assert_raises(ValueError, Parallel, backend='unit-testing')
コード例 #27
0
def _check_pickle(filename, expected_list):
    """Helper function to test joblib pickle content.

    Note: currently only pickles containing an iterable are supported
    by this function.
    """
    if (not PY3_OR_LATER
            and (filename.endswith('.xz') or filename.endswith('.lzma'))):
        # lzma is not supported for python versions < 3.3
        assert_raises(NotImplementedError, numpy_pickle.load, filename)
        return

    version_match = re.match(r'.+py(\d)(\d).+', filename)
    py_version_used_for_writing = int(version_match.group(1))
    py_version_used_for_reading = sys.version_info[0]

    py_version_to_default_pickle_protocol = {2: 2, 3: 3}
    pickle_reading_protocol = py_version_to_default_pickle_protocol.get(
        py_version_used_for_reading, 4)
    pickle_writing_protocol = py_version_to_default_pickle_protocol.get(
        py_version_used_for_writing, 4)
    if pickle_reading_protocol >= pickle_writing_protocol:
        try:
            with warnings.catch_warnings(record=True) as caught_warnings:
                warnings.simplefilter('always')
                warnings.filterwarnings(
                    'ignore',
                    module='numpy',
                    message='The compiler package is deprecated')
                result_list = numpy_pickle.load(filename)
                filename_base = os.path.basename(filename)
                expected_nb_warnings = 1 if ("_0.9" in filename_base or
                                             "_0.8.4" in filename_base) else 0
                assert len(caught_warnings) == expected_nb_warnings
            for warn in caught_warnings:
                assert warn.category == DeprecationWarning
                assert (warn.message.args[0] ==
                        "The file '{0}' has been generated with a joblib "
                        "version less than 0.10. Please regenerate this "
                        "pickle file.".format(filename))
            for result, expected in zip(result_list, expected_list):
                if isinstance(expected, np.ndarray):
                    assert result.dtype == expected.dtype
                    np.testing.assert_equal(result, expected)
                else:
                    assert result == expected
        except Exception as exc:
            # When trying to read with python 3 a pickle generated
            # with python 2 we expect a user-friendly error
            if (py_version_used_for_reading == 3
                    and py_version_used_for_writing == 2):
                assert isinstance(exc, ValueError)
                message = ('You may be trying to read with '
                           'python 3 a joblib pickle generated with python 2.')
                assert message in str(exc)
            else:
                raise
    else:
        # Pickle protocol used for writing is too high. We expect a
        # "unsupported pickle protocol" error message
        try:
            numpy_pickle.load(filename)
            raise AssertionError('Numpy pickle loading should '
                                 'have raised a ValueError exception')
        except ValueError as e:
            message = 'unsupported pickle protocol: {0}'.format(
                pickle_writing_protocol)
            assert message in str(e.args)
コード例 #28
0
def test_invalid_backend():
    assert_raises(ValueError, Parallel, backend='unit-testing')
コード例 #29
0
def test_safe_function():
    safe_division = SafeFunction(division)
    assert_raises(JoblibException, safe_division, 1, 0)
コード例 #30
0
def test_invalid_batch_size():
    assert_raises(ValueError, Parallel, batch_size=0)
    assert_raises(ValueError, Parallel, batch_size=-1)
    assert_raises(ValueError, Parallel, batch_size=1.42)
コード例 #31
0
def test_invalid_batch_size():
    assert_raises(ValueError, Parallel, batch_size=0)
    assert_raises(ValueError, Parallel, batch_size=-1)
    assert_raises(ValueError, Parallel, batch_size=1.42)