def test_do_tasks_after_close_raises_exc(): m = Multiprocess() f = lambda: 1 m.add_tasks(f, [()]) m.close() with pytest.raises(MultiprocessClosed): m.do_tasks()
def test_empty_fn(): def f(): pass m = Multiprocess() m.add_tasks(f, [()]) m.do_tasks() m.close()
def test_lambda_pickling(): f = lambda q: q.push(1) q = Queue() m = Multiprocess() m.add_tasks(f, [(q, )]) m.do_tasks() m.close() assert (q.qsize() == 1) assert (q.pop() == 1) assert (q.qsize() == 0)
def test_fn_raises_exc_is_caught(): m = Multiprocess() def f(): raise ValueError('unique') m.add_tasks(f, [()]) with pytest.raises(MultiprocessProcessException) as excinfo: m.do_tasks() assert 'unique' in str(excinfo.value) m.close()
def test_do_tasks_after_error_raises_exc(): def f(): raise ValueError('error') m = Multiprocess() m.add_tasks(f, [()]) with pytest.raises(MultiprocessProcessException): m.do_tasks() with pytest.raises(MultiprocessClosed): m.do_tasks() m.close()
def test_do_tasks_twice(): def f(q): q.push(1) q = Queue() m = Multiprocess() m.add_tasks(f, [(q, )]) m.do_tasks() assert (q.qsize() == 1) m.add_tasks(f, [(q, )]) m.do_tasks() assert (q.qsize() == 2)
def test_queue(): q = Queue() m = Multiprocess() def f(q): q.push(1) m.add_tasks(f, [(q, )]) m.do_tasks() m.close() assert (q.qsize() == 1) assert (q.pop() == 1) assert (q.qsize() == 0)
def test_stdout(capsys): m = Multiprocess() sys.stderr.write("\r\n") m.add_tasks(lambda: 1, [()]) m.do_tasks() m.close() out, err = capsys.readouterr() assert (out == '') spl = [v.split('\r')[-1] for v in err.split('\n')] while len(spl) and spl[0] == '': spl = spl[1:] assert (len(spl) == 2) assert (spl[0].startswith('100%')) assert (spl[1] == '')
def test_multiple_tasks(): def f(q, num): q.push(num) q = Queue() m = Multiprocess() arr = range(100) m.add_tasks(f, [( q, num, ) for num in arr]) m.do_tasks() m.close() assert (q.qsize() == len(arr)) l = [q.pop() for v in range(q.qsize())] assert (set(l) == set(arr))
def multiprocess(fn, arr_of_args, **kwargs): """Execute several tasks in parallel. Requires a function `fn` and an array of argument tuples `arr_of_args`, each representing a call to the function. Additionally, you can provide arguments the same as you would with `Multiprocess` Example >>> # exec f(x) and f(y) in parallel >>> multiprocess(f, [(x,), (y,)]) If you don't want a loading bar >>> multiprocess(f, [(x,), (y,)], show_loading_bar=False) """ m = Multiprocess(**kwargs) m.add_tasks(fn, arr_of_args) m.do_tasks() m.close()