コード例 #1
0
    def test_memory_func_with_kwonly_args(tmpdir):
        mem = Memory(cachedir=tmpdir.strpath, verbose=0)
        func_cached = mem.cache(func_with_kwonly_args)

        assert func_cached(1, 2, kw1=3) == (1, 2, 3, 'kw2')

        # Making sure that providing a keyword-only argument by
        # position raises an exception
        with raises(ValueError) as excinfo:
            func_cached(1, 2, 3, kw2=4)
        excinfo.match("Keyword-only parameter 'kw1' was passed as positional "
                      "parameter")

        # Keyword-only parameter passed by position with cached call
        # should still raise ValueError
        func_cached(1, 2, kw1=3, kw2=4)

        with raises(ValueError) as excinfo:
            func_cached(1, 2, 3, kw2=4)
        excinfo.match("Keyword-only parameter 'kw1' was passed as positional "
                      "parameter")

        # Test 'ignore' parameter
        func_cached = mem.cache(func_with_kwonly_args, ignore=['kw2'])
        assert func_cached(1, 2, kw1=3, kw2=4) == (1, 2, 3, 4)
        assert func_cached(1, 2, kw1=3, kw2='ignored') == (1, 2, 3, 4)
コード例 #2
0
def test_pool_with_memmap(tmpdir):
    """Check that subprocess can access and update shared memory memmap"""
    assert_array_equal = np.testing.assert_array_equal

    # Fork the subprocess before allocating the objects to be passed
    pool_temp_folder = tmpdir.mkdir('pool').strpath
    p = MemmapingPool(10, max_nbytes=2, temp_folder=pool_temp_folder)
    try:
        filename = tmpdir.join('test.mmap').strpath
        a = np.memmap(filename, dtype=np.float32, shape=(3, 5), mode='w+')
        a.fill(1.0)

        p.map(inplace_double, [(a, (i, j), 1.0)
                               for i in range(a.shape[0])
                               for j in range(a.shape[1])])

        assert_array_equal(a, 2 * np.ones(a.shape))

        # Open a copy-on-write view on the previous data
        b = np.memmap(filename, dtype=np.float32, shape=(5, 3), mode='c')

        p.map(inplace_double, [(b, (i, j), 2.0)
                               for i in range(b.shape[0])
                               for j in range(b.shape[1])])

        # Passing memmap instances to the pool should not trigger the creation
        # of new files on the FS
        assert os.listdir(pool_temp_folder) == []

        # the original data is untouched
        assert_array_equal(a, 2 * np.ones(a.shape))
        assert_array_equal(b, 2 * np.ones(b.shape))

        # readonly maps can be read but not updated
        c = np.memmap(filename, dtype=np.float32, shape=(10,), mode='r',
                      offset=5 * 4)

        with raises(AssertionError):
            p.map(check_array, [(c, i, 3.0) for i in range(c.shape[0])])

        # depending on the version of numpy one can either get a RuntimeError
        # or a ValueError
        with raises((RuntimeError, ValueError)):
            p.map(inplace_double, [(c, i, 2.0) for i in range(c.shape[0])])
    finally:
        # Clean all filehandlers held by the pool
        p.terminate()
        del p
コード例 #3
0
def test_hashing_pickling_error():
    def non_picklable():
        return 42

    with raises(pickle.PicklingError) as excinfo:
        hash(non_picklable)
    excinfo.match('PicklingError while hashing')
コード例 #4
0
def test_filter_args_error_msg(exception, regex, func, args):
    """ Make sure that filter_args returns decent error messages, for the
        sake of the user.
    """
    with raises(exception) as excinfo:
        filter_args(func, *args)
    excinfo.match(regex)
コード例 #5
0
 def get_set_get_collect(m, i):
     a = np.ones(42)
     with raises(KeyError):
         m.get(a)
     m.set(a, i)
     assert m.get(a) == i
     return id(a)
コード例 #6
0
def test_check_subprocess_call_non_matching_regex():
    code = '42'
    non_matching_pattern = '_no_way_this_matches_anything_'

    with raises(ValueError) as excinfo:
        check_subprocess_call([sys.executable, '-c', code],
                              stdout_regex=non_matching_pattern)
    excinfo.match('Unexpected stdout.+{}'.format(non_matching_pattern))
コード例 #7
0
ファイル: test_disk.py プロジェクト: ELVIS-Project/music21
def test_mkdirp(tmpdir):
    mkdirp(os.path.join(tmpdir.strpath, 'ham'))
    mkdirp(os.path.join(tmpdir.strpath, 'ham'))
    mkdirp(os.path.join(tmpdir.strpath, 'spam', 'spam'))

    # Not all OSErrors are ignored
    with raises(OSError):
        mkdirp('')
コード例 #8
0
ファイル: test_memory.py プロジェクト: joblib/joblib
def test_instanciate_incomplete_store_backend():
    # Verify that registering an external incomplete store backend raises an
    # exception when one tries to instanciate it.
    backend_name = "isb"
    register_store_backend(backend_name, IncompleteStoreBackend)
    assert (backend_name, IncompleteStoreBackend) in _STORE_BACKENDS.items()
    with raises(TypeError) as excinfo:
        _store_backend_factory(backend_name, "fake_location")
    excinfo.match(r"Can't instantiate abstract class "
                  "IncompleteStoreBackend with abstract methods*")
コード例 #9
0
def test_weak_array_key_map():

    def assert_empty_after_gc_collect(container, retries=3):
        for i in range(retries):
            if len(container) == 0:
                return
            gc.collect()
        assert len(container) == 0

    a = np.ones(42)
    m = _WeakArrayKeyMap()
    m.set(a, 'a')
    assert m.get(a) == 'a'

    b = a
    assert m.get(b) == 'a'
    m.set(b, 'b')
    assert m.get(a) == 'b'

    del a
    gc.collect()
    assert len(m._data) == 1
    assert m.get(b) == 'b'

    del b
    assert_empty_after_gc_collect(m._data)

    c = np.ones(42)
    m.set(c, 'c')
    assert len(m._data) == 1
    assert m.get(c) == 'c'

    with raises(KeyError):
        m.get(np.ones(42))

    del c
    assert_empty_after_gc_collect(m._data)

    # Check that creating and dropping numpy arrays with potentially the same
    # object id will not cause the map to get confused.
    def get_set_get_collect(m, i):
        a = np.ones(42)
        with raises(KeyError):
            m.get(a)
        m.set(a, i)
        assert m.get(a) == i
        return id(a)

    unique_ids = set([get_set_get_collect(m, i) for i in range(1000)])
    if platform.python_implementation() == 'CPython':
        # On CPython (at least) the same id is often reused many times for the
        # temporary arrays created under the local scope of the
        # get_set_get_collect function without causing any spurious lookups /
        # insertions in the map.
        assert len(unique_ids) < 100
コード例 #10
0
def test_backend_hinting_and_constraints_with_custom_backends(capsys):
    # Custom backends can declare that they use threads and have shared memory
    # semantics:
    class MyCustomThreadingBackend(ParallelBackendBase):
        supports_sharedmem = True
        use_threads = True

        def apply_async(self):
            pass

        def effective_n_jobs(self, n_jobs):
            return n_jobs

    with parallel_backend(MyCustomThreadingBackend()):
        p = Parallel(n_jobs=2, prefer='processes')  # ignored
        assert type(p._backend) == MyCustomThreadingBackend

        p = Parallel(n_jobs=2, require='sharedmem')
        assert type(p._backend) == MyCustomThreadingBackend

    class MyCustomProcessingBackend(ParallelBackendBase):
        supports_sharedmem = False
        use_threads = False

        def apply_async(self):
            pass

        def effective_n_jobs(self, n_jobs):
            return n_jobs

    with parallel_backend(MyCustomProcessingBackend()):
        p = Parallel(n_jobs=2, prefer='processes')
        assert type(p._backend) == MyCustomProcessingBackend

        out, err = capsys.readouterr()
        assert out == ""
        assert err == ""

        p = Parallel(n_jobs=2, require='sharedmem', verbose=10)
        assert type(p._backend) == ThreadingBackend

        out, err = capsys.readouterr()
        expected = ("Using ThreadingBackend as joblib.Parallel backend "
                    "instead of MyCustomProcessingBackend as the latter "
                    "does not provide shared memory semantics.")
        assert out.strip() == expected
        assert err == ""

    with raises(ValueError):
        Parallel(backend=MyCustomProcessingBackend(), require='sharedmem')
コード例 #11
0
def test_check_subprocess_call_non_zero_return_code():
    code_with_non_zero_exit = '\n'.join([
        'import sys', 'print("writing on stdout")',
        'sys.stderr.write("writing on stderr")', 'sys.exit(123)'
    ])

    pattern = re.compile(
        'Non-zero return code: 123.+'
        'Stdout:\nwriting on stdout.+'
        'Stderr:\nwriting on stderr', re.DOTALL)

    with raises(ValueError) as excinfo:
        check_subprocess_call([sys.executable, '-c', code_with_non_zero_exit])
    excinfo.match(pattern)
コード例 #12
0
ファイル: test_parallel.py プロジェクト: noamher/joblib
def test_abort_backend(n_jobs, backend):
    delays = ["a"] + [10] * 100

    if os.environ.get("TRAVIS_OS_NAME") is not None and n_jobs < 0:
        # Use only up to 8 cpu in travis as cpu_count return 32 whereas we
        # only access 2 cores.
        n_jobs += 8

    with raises(TypeError):
        t_start = time.time()
        Parallel(n_jobs=n_jobs,
                 backend=backend)(delayed(time.sleep)(i) for i in delays)
    dt = time.time() - t_start
    assert dt < 5
コード例 #13
0
def test_check_subprocess_call_non_zero_return_code():
    code_with_non_zero_exit = '\n'.join([
        'import sys',
        'print("writing on stdout")',
        'sys.stderr.write("writing on stderr")',
        'sys.exit(123)'])

    pattern = re.compile('Non-zero return code: 123.+'
                         'Stdout:\nwriting on stdout.+'
                         'Stderr:\nwriting on stderr', re.DOTALL)

    with raises(ValueError) as excinfo:
        check_subprocess_call([sys.executable, '-c', code_with_non_zero_exit])
    excinfo.match(pattern)
コード例 #14
0
def test_register_compressor_invalid_fileobj():
    # Test that registering an invalid file object is not allowed.

    class InvalidFileObject():
        pass

    class InvalidFileObjectWrapper(CompressorWrapper):
        def __init__(self):
            CompressorWrapper.__init__(self, obj=InvalidFileObject,
                                       prefix=b'prefix')

    with raises(ValueError) as excinfo:
        register_compressor('invalid', InvalidFileObjectWrapper())

    excinfo.match("Compressor 'fileobj_factory' attribute should implement "
                  "the file object interface")
コード例 #15
0
def test_check_subprocess_call_timeout():
    code_timing_out = '\n'.join([
        'import time', 'import sys', 'print("before sleep on stdout")',
        'sys.stdout.flush()', 'sys.stderr.write("before sleep on stderr")',
        'sys.stderr.flush()', 'time.sleep(1.1)',
        'print("process should have be killed before")', 'sys.stdout.flush()'
    ])

    pattern = re.compile(
        'Non-zero return code:.+'
        'Stdout:\nbefore sleep on stdout\s+'
        'Stderr:\nbefore sleep on stderr', re.DOTALL)

    with raises(ValueError) as excinfo:
        check_subprocess_call([sys.executable, '-c', code_timing_out],
                              timeout=1)
    excinfo.match(pattern)
コード例 #16
0
ファイル: test_func_inspect.py プロジェクト: joblib/joblib
    def test_filter_args_python_3():
        assert filter_args(func_with_kwonly_args, [], (1, 2), {"kw1": 3, "kw2": 4}) == {
            "a": 1,
            "b": 2,
            "kw1": 3,
            "kw2": 4,
        }

        # filter_args doesn't care about keyword-only arguments so you
        # can pass 'kw1' into *args without any problem
        with raises(ValueError) as excinfo:
            filter_args(func_with_kwonly_args, [], (1, 2, 3), {"kw2": 2})
        excinfo.match("Keyword-only parameter 'kw1' was passed as positional " "parameter")

        assert filter_args(func_with_kwonly_args, ["b", "kw2"], (1, 2), {"kw1": 3, "kw2": 4}) == {"a": 1, "kw1": 3}

        assert filter_args(func_with_signature, ["b"], (1, 2)) == {"a": 1}
コード例 #17
0
def test_parallel_pickling():
    """ Check that pmap captures the errors when it is passed an object
        that cannot be pickled.
    """
    def g(x):
        return x**2

    try:
        # pickling a local function always fail but the exception
        # raised is a PickleError for python <= 3.4 and AttributeError
        # for python >= 3.5
        pickle.dumps(g)
    except Exception as exc:
        exception_class = exc.__class__

    with raises(exception_class):
        Parallel()(delayed(g)(x) for x in range(10))
コード例 #18
0
def test_call_and_shelve(tmpdir):
    # Test MemorizedFunc outputting a reference to cache.

    for func, Result in zip((
            MemorizedFunc(f, tmpdir.strpath),
            NotMemorizedFunc(f),
            Memory(location=tmpdir.strpath, verbose=0).cache(f),
            Memory(location=None).cache(f),
    ), (MemorizedResult, NotMemorizedResult, MemorizedResult,
            NotMemorizedResult)):
        assert func(2) == 5
        result = func.call_and_shelve(2)
        assert isinstance(result, Result)
        assert result.get() == 5

        result.clear()
        with raises(KeyError):
            result.get()
        result.clear()  # Do nothing if there is no cache.
コード例 #19
0
ファイル: test_func_inspect.py プロジェクト: raghavrv/joblib
    def test_filter_args_python_3():
        assert (
            filter_args(func_with_kwonly_args, [], (1, 2),
                        {'kw1': 3, 'kw2': 4}) ==
            {'a': 1, 'b': 2, 'kw1': 3, 'kw2': 4})

        # filter_args doesn't care about keyword-only arguments so you
        # can pass 'kw1' into *args without any problem
        with raises(ValueError) as excinfo:
            filter_args(func_with_kwonly_args, [], (1, 2, 3), {'kw2': 2})
        excinfo.match("Keyword-only parameter 'kw1' was passed as positional "
                      "parameter")

        assert (
            filter_args(func_with_kwonly_args, ['b', 'kw2'], (1, 2),
                        {'kw1': 3, 'kw2': 4}) ==
            {'a': 1, 'kw1': 3})

        assert (filter_args(func_with_signature, ['b'], (1, 2)) == {'a': 1})
コード例 #20
0
    def test_filter_args_python_3():
        assert (
            filter_args(func_with_kwonly_args, [], (1, 2),
                        {'kw1': 3, 'kw2': 4}) ==
            {'a': 1, 'b': 2, 'kw1': 3, 'kw2': 4})

        # filter_args doesn't care about keyword-only arguments so you
        # can pass 'kw1' into *args without any problem
        with raises(ValueError) as excinfo:
            filter_args(func_with_kwonly_args, [], (1, 2, 3), {'kw2': 2})
        excinfo.match("Keyword-only parameter 'kw1' was passed as positional "
                      "parameter")

        assert (
            filter_args(func_with_kwonly_args, ['b', 'kw2'], (1, 2),
                        {'kw1': 3, 'kw2': 4}) ==
            {'a': 1, 'kw1': 3})

        assert (filter_args(func_with_signature, ['b'], (1, 2)) == {'a': 1})
コード例 #21
0
def test_weak_array_key_map():
    a = np.ones(42)
    m = _WeakArrayKeyMap()
    m.set(a, 42)
    assert m.get(a) == 42

    b = a
    assert m.get(b) == 42
    m.set(b, -42)
    assert m.get(a) == -42

    del a
    gc.collect()
    assert len(m._data) == 1
    assert m.get(b) == -42

    del b
    gc.collect()
    assert len(m._data) == 0

    m.set(np.ones(42), 42)
    with raises(KeyError):
        m.get(np.ones(42))
    gc.collect()
    assert len(m._data) == 0

    # Check that creating and dropping numpy arrays with potentially the same
    # object id will not cause the map to get confused.
    def get_set_get_collect(m, i):
        a = np.ones(42)
        with raises(KeyError):
            m.get(a)
        m.set(a, i)
        assert m.get(a) == i
        return id(a)

    unique_ids = set([get_set_get_collect(m, i) for i in range(1000)])
    if platform.python_implementation() == 'CPython':
        # On CPython (at least) the same id is often reused many times for the
        # temporary arrays created under the local scope of the
        # get_set_get_collect function without causing any spurious lookups /
        # insertions in the map.
        assert len(unique_ids) < 100
コード例 #22
0
ファイル: test_parallel.py プロジェクト: pierreglaser/joblib
def test_nested_exception_dispatch(backend):
    """Ensure errors for nested joblib cases gets propagated

    For Python 2.7, the TransportableException wrapping and unwrapping should
    preserve the traceback information of the inner function calls.

    For Python 3, we rely on the built-in __cause__ system that already
    report this kind of information to the user.
    """
    if PY27 and backend == 'multiprocessing':
        raise SkipTest("Nested parallel calls can deadlock with the python 2.7"
                       "multiprocessing backend.")

    def nested_function_inner(i):
        Parallel(n_jobs=2)(
            delayed(exception_raiser)(j) for j in range(30))

    def nested_function_outer(i):
        Parallel(n_jobs=2)(
            delayed(nested_function_inner)(j) for j in range(30))

    with raises(ValueError) as excinfo:
        Parallel(n_jobs=2, backend=backend)(
            delayed(nested_function_outer)(i) for i in range(30))

    # Check that important information such as function names are visible
    # in the final error message reported to the user
    report_lines = format_exception(excinfo.type, excinfo.value, excinfo.tb)
    report = "".join(report_lines)
    assert 'nested_function_outer' in report
    assert 'nested_function_inner' in report
    assert 'exception_raiser' in report

    if PY3_OR_LATER:
        # Under Python 3, there is no need for exception wrapping as the
        # exception raised in a worker process is transportable by default and
        # preserves the necessary information via the `__cause__` attribute.
        assert type(excinfo.value) is ValueError
    else:
        # The wrapping mechanism used to make exception of Python2.7
        # transportable does not create a JoblibJoblibJoblibValueError
        # despite the 3 nested parallel calls.
        assert type(excinfo.value) is JoblibValueError
コード例 #23
0
def test_memory_exception(tmpdir):
    """ Smoketest the exception handling of Memory.
    """
    memory = Memory(cachedir=tmpdir.strpath, verbose=0)

    class MyException(Exception):
        pass

    @memory.cache
    def h(exc=0):
        if exc:
            raise MyException

    # Call once, to initialise the cache
    h()

    for _ in range(3):
        # Call 3 times, to be sure that the Exception is always raised
        with raises(MyException):
            h(1)
コード例 #24
0
def test_call_and_shelve(tmpdir):
    """Test MemorizedFunc outputting a reference to cache.
    """

    for func, Result in zip((MemorizedFunc(f, tmpdir.strpath),
                             NotMemorizedFunc(f),
                             Memory(cachedir=tmpdir.strpath).cache(f),
                             Memory(cachedir=None).cache(f),
                             ),
                            (MemorizedResult, NotMemorizedResult,
                             MemorizedResult, NotMemorizedResult)):
        assert func(2) == 5
        result = func.call_and_shelve(2)
        assert isinstance(result, Result)
        assert result.get() == 5

        result.clear()
        with raises(KeyError):
            result.get()
        result.clear()  # Do nothing if there is no cache.
コード例 #25
0
def test_memory_exception(tmpdir):
    """ Smoketest the exception handling of Memory.
    """
    memory = Memory(location=tmpdir.strpath, verbose=0)

    class MyException(Exception):
        pass

    @memory.cache
    def h(exc=0):
        if exc:
            raise MyException

    # Call once, to initialise the cache
    h()

    for _ in range(3):
        # Call 3 times, to be sure that the Exception is always raised
        with raises(MyException):
            h(1)
コード例 #26
0
def test_check_subprocess_call_timeout():
    code_timing_out = '\n'.join([
        'import time',
        'import sys',
        'print("before sleep on stdout")',
        'sys.stdout.flush()',
        'sys.stderr.write("before sleep on stderr")',
        'sys.stderr.flush()',
        'time.sleep(1.1)',
        'print("process should have be killed before")',
        'sys.stdout.flush()'])

    pattern = re.compile('Non-zero return code:.+'
                         'Stdout:\nbefore sleep on stdout\s+'
                         'Stderr:\nbefore sleep on stderr',
                         re.DOTALL)

    with raises(ValueError) as excinfo:
        check_subprocess_call([sys.executable, '-c', code_timing_out],
                              timeout=1)
    excinfo.match(pattern)
コード例 #27
0
ファイル: test_memory.py プロジェクト: joblib/joblib
def test_deprecated_cachedir_behaviour(tmpdir):
    # verify the right deprecation warnings are raised when using cachedir
    # option instead of new location parameter.
    with warns(None) as w:
        memory = Memory(cachedir=tmpdir.strpath, verbose=0)
        assert memory.store_backend.location.startswith(tmpdir.strpath)

    assert len(w) == 1
    assert "The 'cachedir' parameter has been deprecated" in str(w[-1].message)

    with warns(None) as w:
        memory = Memory()
        assert memory.cachedir is None

    assert len(w) == 1
    assert "The 'cachedir' attribute has been deprecated" in str(w[-1].message)

    error_regex = """You set both "location='.+ and "cachedir='.+"""
    with raises(ValueError, match=error_regex):
        memory = Memory(location=tmpdir.strpath, cachedir=tmpdir.strpath,
                        verbose=0)
コード例 #28
0
ファイル: test_memory.py プロジェクト: nickmvincent/joblib
def test_deprecated_cachedir_behaviour(tmpdir):
    # verify the right deprecation warnings are raised when using cachedir
    # option instead of new location parameter.
    with warns(None) as w:
        memory = Memory(cachedir=tmpdir.strpath, verbose=0)
        assert memory.store_backend.location.startswith(tmpdir.strpath)

    assert len(w) == 1
    assert "The 'cachedir' parameter has been deprecated" in str(w[-1].message)

    with warns(None) as w:
        memory = Memory()
        assert memory.cachedir is None

    assert len(w) == 1
    assert "The 'cachedir' attribute has been deprecated" in str(w[-1].message)

    error_regex = """You set both "location='.+ and "cachedir='.+"""
    with raises(ValueError, match=error_regex):
        memory = Memory(location=tmpdir.strpath, cachedir=tmpdir.strpath,
                        verbose=0)
コード例 #29
0
ファイル: test_numpy_pickle.py プロジェクト: 745698140/test_1
def test_compression_using_file_extension(tmpdir, extension, cmethod):
    # test that compression method corresponds to the given filename extension.
    filename = tmpdir.join('test.pkl').strpath
    obj = "object to dump"

    dump_fname = filename + extension
    if not PY3_OR_LATER and cmethod in ('xz', 'lzma'):
        # Lzma module only available for python >= 3.3
        msg = "{} compression is only available".format(cmethod)
        with raises(NotImplementedError) as excinfo:
            numpy_pickle.dump(obj, dump_fname)
        excinfo.match(msg)
    else:
        numpy_pickle.dump(obj, dump_fname)
        # Verify the file contains the right magic number
        with open(dump_fname, 'rb') as f:
            assert _detect_compressor(f) == cmethod
        # Verify the reloaded object is correct
        obj_reloaded = numpy_pickle.load(dump_fname)
        assert isinstance(obj_reloaded, type(obj))
        assert obj_reloaded == obj
コード例 #30
0
def test_register_compressor_already_registered():
    # Test registration of existing compressor files.
    compressor_name = 'test-name'

    # register a test compressor
    register_compressor(compressor_name, AnotherZlibCompressorWrapper())

    with raises(ValueError) as excinfo:
        register_compressor(compressor_name,
                            StandardLibGzipCompressorWrapper())
    excinfo.match("Compressor '{}' already registered."
                  .format(compressor_name))

    register_compressor(compressor_name, StandardLibGzipCompressorWrapper(),
                        force=True)

    assert compressor_name in _COMPRESSORS
    assert _COMPRESSORS[compressor_name].fileobj_factory == gzip.GzipFile

    # Remove this dummy compressor file from extra compressors because other
    # tests might fail because of this
    _COMPRESSORS.pop(compressor_name)
コード例 #31
0
ファイル: test_numpy_pickle.py プロジェクト: 745698140/test_1
def test_joblib_compression_formats(tmpdir, compress, cmethod):
    filename = tmpdir.join('test.pkl').strpath
    objects = (np.ones(shape=(100, 100), dtype='f8'), range(10), {
        'a': 1,
        2: 'b'
    }, [], (), {}, 0, 1.0)

    dump_filename = filename + "." + cmethod
    for obj in objects:
        if not PY3_OR_LATER and cmethod in ('lzma', 'xz', 'lz4'):
            # Lzma module only available for python >= 3.3
            msg = "{} compression is only available".format(cmethod)
            error = NotImplementedError
            if cmethod == 'lz4':
                error = ValueError
            with raises(error) as excinfo:
                numpy_pickle.dump(obj,
                                  dump_filename,
                                  compress=(cmethod, compress))
            excinfo.match(msg)
        elif cmethod == 'lz4' and with_lz4.args[0]:
            # Skip the test if lz4 is not installed. We here use the with_lz4
            # skipif fixture whose argument is True when lz4 is not installed
            raise SkipTest("lz4 is not installed.")
        else:
            numpy_pickle.dump(obj, dump_filename, compress=(cmethod, compress))
            # Verify the file contains the right magic number
            with open(dump_filename, 'rb') as f:
                assert _detect_compressor(f) == cmethod
            # Verify the reloaded object is correct
            obj_reloaded = numpy_pickle.load(dump_filename)
            assert isinstance(obj_reloaded, type(obj))
            if isinstance(obj, np.ndarray):
                np.testing.assert_array_equal(obj_reloaded, obj)
            else:
                assert obj_reloaded == obj
コード例 #32
0
def test_delayed_check_pickle_deprecated():
    class UnpicklableCallable(object):
        def __call__(self, *args, **kwargs):
            return 42

        def __reduce__(self):
            raise ValueError()

    with warns(DeprecationWarning):
        f, args, kwargs = delayed(lambda x: 42, check_pickle=False)('a')
    assert f('a') == 42
    assert args == ('a', )
    assert kwargs == dict()

    with warns(DeprecationWarning):
        f, args, kwargs = delayed(UnpicklableCallable(),
                                  check_pickle=False)('a', option='b')
        assert f('a', option='b') == 42
        assert args == ('a', )
        assert kwargs == dict(option='b')

    with warns(DeprecationWarning):
        with raises(ValueError):
            delayed(UnpicklableCallable(), check_pickle=True)
コード例 #33
0
def test_memory_default_store_backend():
    # test an unknow backend falls back into a FileSystemStoreBackend
    with raises(TypeError) as excinfo:
        Memory(location='/tmp/joblib', backend='unknown')
    excinfo.match(r"Unknown location*")
コード例 #34
0
def test_register_invalid_store_backends_object():
    # verify the right exceptions are raised when passing a wrong backend
    # object.
    with raises(ValueError) as excinfo:
        register_store_backend("fs", None)
    excinfo.match(r'Store backend should inherit StoreBackendBase*')
コード例 #35
0
def test_register_invalid_store_backends_key(invalid_prefix):
    # verify the right exceptions are raised when passing a wrong backend key.
    with raises(ValueError) as excinfo:
        register_store_backend(invalid_prefix, None)
    excinfo.match(r'Store backend name should be a string*')
コード例 #36
0
def test_safe_function():
    safe_division = SafeFunction(division)
    with raises(JoblibException):
        safe_division(1, 0)
コード例 #37
0
ファイル: test_numpy_pickle.py プロジェクト: Aathi410/Pro123
def test_binary_zlibfile_invalid_filename_type(bad_file):
    with raises(TypeError) as excinfo:
        BinaryZlibFile(bad_file, 'rb')
    excinfo.match("filename must be a str or bytes object, or a file")
コード例 #38
0
ファイル: test_numpy_pickle.py プロジェクト: Aathi410/Pro123
def test_compress_tuple_argument_exception(tmpdir, compress_tuple, message):
    filename = tmpdir.join('test.pkl').strpath
    # Verify setting a wrong compress tuple raises a ValueError.
    with raises(ValueError) as excinfo:
        numpy_pickle.dump('dummy', filename, compress=compress_tuple)
    excinfo.match(message)
コード例 #39
0
def test_weak_array_key_map_no_pickling():
    m = _WeakArrayKeyMap()
    with raises(pickle.PicklingError):
        pickle.dumps(m)
コード例 #40
0
ファイル: test_numpy_pickle.py プロジェクト: 745698140/test_1
def _check_pickle(filename, expected_list):
    """Helper function to test joblib pickle content.

    Note: currently only pickles containing an iterable are supported
    by this function.
    """
    if not PY3_OR_LATER:
        if filename.endswith('.xz') or filename.endswith('.lzma'):
            # lzma is not implemented in python versions < 3.3
            with raises(NotImplementedError):
                numpy_pickle.load(filename)
        elif filename.endswith('.lz4'):
            # lz4 is not supported for python versions < 3.3
            with raises(ValueError) as excinfo:
                numpy_pickle.load(filename)
            assert excinfo.match("lz4 compression is only available with "
                                 "python3+")
        return

    version_match = re.match(r'.+py(\d)(\d).+', filename)
    py_version_used_for_writing = int(version_match.group(1))
    py_version_used_for_reading = sys.version_info[0]

    py_version_to_default_pickle_protocol = {2: 2, 3: 3}
    pickle_reading_protocol = py_version_to_default_pickle_protocol.get(
        py_version_used_for_reading, 4)
    pickle_writing_protocol = py_version_to_default_pickle_protocol.get(
        py_version_used_for_writing, 4)
    if pickle_reading_protocol >= pickle_writing_protocol:
        try:
            with warns(None) as warninfo:
                warnings.simplefilter('always')
                warnings.filterwarnings(
                    'ignore',
                    module='numpy',
                    message='The compiler package is deprecated')
                result_list = numpy_pickle.load(filename)
            filename_base = os.path.basename(filename)
            expected_nb_warnings = 1 if ("_0.9" in filename_base
                                         or "_0.8.4" in filename_base) else 0
            assert len(warninfo) == expected_nb_warnings
            for w in warninfo:
                assert w.category == DeprecationWarning
                assert (str(w.message) ==
                        "The file '{0}' has been generated with a joblib "
                        "version less than 0.10. Please regenerate this "
                        "pickle file.".format(filename))
            for result, expected in zip(result_list, expected_list):
                if isinstance(expected, np.ndarray):
                    assert result.dtype == expected.dtype
                    np.testing.assert_equal(result, expected)
                else:
                    assert result == expected
        except Exception as exc:
            # When trying to read with python 3 a pickle generated
            # with python 2 we expect a user-friendly error
            if (py_version_used_for_reading == 3
                    and py_version_used_for_writing == 2):
                assert isinstance(exc, ValueError)
                message = ('You may be trying to read with '
                           'python 3 a joblib pickle generated with python 2.')
                assert message in str(exc)
            elif filename.endswith('.lz4') and with_lz4.args[0]:
                assert isinstance(exc, ValueError)
                assert LZ4_NOT_INSTALLED_ERROR in str(exc)
            else:
                raise
    else:
        # Pickle protocol used for writing is too high. We expect a
        # "unsupported pickle protocol" error message
        try:
            numpy_pickle.load(filename)
            raise AssertionError('Numpy pickle loading should '
                                 'have raised a ValueError exception')
        except ValueError as e:
            message = 'unsupported pickle protocol: {0}'.format(
                pickle_writing_protocol)
            assert message in str(e.args)
コード例 #41
0
def test_wrong_hash_name():
    msg = "Valid options for 'hash_name' are"
    with raises(ValueError, match=msg):
        data = {'foo': 'bar'}
        hash(data, hash_name='invalid')
コード例 #42
0
ファイル: test_memmapping.py プロジェクト: Aathi410/Pro123
def test_weak_array_key_map_no_pickling():
    m = _WeakArrayKeyMap()
    with raises(pickle.PicklingError):
        pickle.dumps(m)
コード例 #43
0
def test_invalid_batch_size(batch_size):
    with raises(ValueError):
        Parallel(batch_size=batch_size)
コード例 #44
0
ファイル: test_memory.py プロジェクト: joblib/joblib
def test_register_invalid_store_backends_object():
    # verify the right exceptions are raised when passing a wrong backend
    # object.
    with raises(ValueError) as excinfo:
        register_store_backend("fs", None)
    excinfo.match(r'Store backend should inherit StoreBackendBase*')
コード例 #45
0
ファイル: test_memory.py プロジェクト: joblib/joblib
def test_register_invalid_store_backends_key(invalid_prefix):
    # verify the right exceptions are raised when passing a wrong backend key.
    with raises(ValueError) as excinfo:
        register_store_backend(invalid_prefix, None)
    excinfo.match(r'Store backend name should be a string*')
コード例 #46
0
ファイル: test_disk.py プロジェクト: ELVIS-Project/music21
def test_memstr_to_bytes_exception(text, exception, regex):
    with raises(exception) as excinfo:
        memstr_to_bytes(text)
    assert excinfo.match(regex)
コード例 #47
0
ファイル: test_numpy_pickle.py プロジェクト: Aathi410/Pro123
def test_value_error():
    # Test inverting the input arguments to dump
    with raises(ValueError):
        numpy_pickle.dump('foo', dict())
コード例 #48
0
def test_check_subprocess_call_wrong_command():
    wrong_command = '_a_command_that_does_not_exist_'
    with raises(OSError):
        check_subprocess_call([wrong_command])
コード例 #49
0
ファイル: test_numpy_pickle.py プロジェクト: Aathi410/Pro123
def test_binary_zlibfile_invalid_modes(tmpdir, bad_mode):
    filename = tmpdir.join('test.pkl').strpath
    with raises(ValueError) as excinfo:
        BinaryZlibFile(filename, bad_mode)
    excinfo.match("Invalid mode")
コード例 #50
0
ファイル: test_memory.py プロジェクト: joblib/joblib
def test_memory_default_store_backend():
    # test an unknow backend falls back into a FileSystemStoreBackend
    with raises(TypeError) as excinfo:
        Memory(location='/tmp/joblib', backend='unknown')
    excinfo.match(r"Unknown location*")
コード例 #51
0
ファイル: test_numpy_pickle.py プロジェクト: Aathi410/Pro123
def test_register_compressor_invalid_name(invalid_name):
    # Test that registering an invalid compressor name is not allowed.
    with raises(ValueError) as excinfo:
        register_compressor(invalid_name, None)
    excinfo.match("Compressor name should be a string")
コード例 #52
0
def test_invalid_njobs(backend):
    with raises(ValueError) as excinfo:
        Parallel(n_jobs=0, backend=backend)._initialize_backend()
    assert "n_jobs == 0 in Parallel has no meaning" in str(excinfo.value)