def check_dispatch_multiprocessing(backend): """ Check that using pre_dispatch Parallel does indeed dispatch items lazily. """ if mp is None: raise SkipTest() manager = mp.Manager() queue = manager.list() def producer(): for i in range(6): queue.append('Produced %i' % i) yield i Parallel(n_jobs=2, batch_size=1, pre_dispatch=3, backend=backend)(delayed(consumer)(queue, 'any') for _ in producer()) # Only 3 tasks are dispatched out of 6. The 4th task is dispatched only # after any of the first 3 jobs have completed. first_four = list(queue)[:4] # The the first consumption event can sometimes happen before the end of # the dispatching, hence, pop it before introspecting the "Produced" events first_four.remove('Consumed any') assert_equal(first_four, ['Produced 0', 'Produced 1', 'Produced 2']) assert len(queue) == 12
def check_dispatch_multiprocessing(backend): """ Check that using pre_dispatch Parallel does indeed dispatch items lazily. """ if mp is None: raise SkipTest() manager = mp.Manager() queue = manager.list() def producer(): for i in range(6): queue.append('Produced %i' % i) yield i Parallel(n_jobs=2, batch_size=1, pre_dispatch=3, backend=backend)( delayed(consumer)(queue, 'any') for _ in producer()) # Only 3 tasks are dispatched out of 6. The 4th task is dispatched only # after any of the first 3 jobs have completed. first_four = list(queue)[:4] # The the first consumption event can sometimes happen before the end of # the dispatching, hence, pop it before introspecting the "Produced" events first_four.remove('Consumed any') assert_equal(first_four, ['Produced 0', 'Produced 1', 'Produced 2']) assert len(queue) == 12
def check_dispatch_one_job(backend): """ Test that with only one job, Parallel does act as a iterator. """ queue = list() def producer(): for i in range(6): queue.append('Produced %i' % i) yield i # disable batching Parallel(n_jobs=1, batch_size=1, backend=backend)(delayed(consumer)(queue, x) for x in producer()) assert_equal(queue, [ 'Produced 0', 'Consumed 0', 'Produced 1', 'Consumed 1', 'Produced 2', 'Consumed 2', 'Produced 3', 'Consumed 3', 'Produced 4', 'Consumed 4', 'Produced 5', 'Consumed 5', ]) assert len(queue) == 12 # empty the queue for the next check queue[:] = [] # enable batching Parallel(n_jobs=1, batch_size=4, backend=backend)(delayed(consumer)(queue, x) for x in producer()) assert_equal( queue, [ # First batch 'Produced 0', 'Produced 1', 'Produced 2', 'Produced 3', 'Consumed 0', 'Consumed 1', 'Consumed 2', 'Consumed 3', # Second batch 'Produced 4', 'Produced 5', 'Consumed 4', 'Consumed 5', ]) assert len(queue) == 12
def check_dispatch_one_job(backend): """ Test that with only one job, Parallel does act as a iterator. """ queue = list() def producer(): for i in range(6): queue.append('Produced %i' % i) yield i # disable batching Parallel(n_jobs=1, batch_size=1, backend=backend)( delayed(consumer)(queue, x) for x in producer()) assert_equal(queue, [ 'Produced 0', 'Consumed 0', 'Produced 1', 'Consumed 1', 'Produced 2', 'Consumed 2', 'Produced 3', 'Consumed 3', 'Produced 4', 'Consumed 4', 'Produced 5', 'Consumed 5', ]) assert len(queue) == 12 # empty the queue for the next check queue[:] = [] # enable batching Parallel(n_jobs=1, batch_size=4, backend=backend)( delayed(consumer)(queue, x) for x in producer()) assert_equal(queue, [ # First batch 'Produced 0', 'Produced 1', 'Produced 2', 'Produced 3', 'Consumed 0', 'Consumed 1', 'Consumed 2', 'Consumed 3', # Second batch 'Produced 4', 'Produced 5', 'Consumed 4', 'Consumed 5', ]) assert len(queue) == 12