Exemple #1
0
def test_pool_context_sync(start_method):
    callbacks = [simple_callback, another_callback]
    groups = [
        MockGroup.from_callback(cb, prefetch_queue_depth=3) for cb in callbacks
    ]
    with create_pool(groups,
                     keep_alive_queue_size=1,
                     num_workers=4,
                     start_method=start_method) as pool:
        capture_processes(pool)
        for i in range(4):
            tasks = [(SampleInfo(j, 0, 0, 0), ) for j in range(10 * (i + 1))]
            work_batch = TaskArgs.make_sample(
                SampleRange(0, 10 * (i + 1), 0, 0))
            pool.schedule_batch(context_i=0, work_batch=work_batch)
            pool.schedule_batch(context_i=1, work_batch=work_batch)
        assert_scheduled_num(pool.contexts[0], 4)
        assert_scheduled_num(pool.contexts[1], 4)
        # pool after a reset should discard all previously scheduled tasks (and sync workers to avoid race on writing to results buffer)
        pool.reset()
        tasks = [(SampleInfo(1000 + j, j, 0, 1), ) for j in range(5)]
        work_batch = TaskArgs.make_sample(SampleRange(1000, 1005, 0, 1))
        pool.schedule_batch(context_i=0, work_batch=work_batch)
        pool.schedule_batch(context_i=1, work_batch=work_batch)
        assert_scheduled_num(pool.contexts[0], 1)
        assert_scheduled_num(pool.contexts[1], 1)
        batch_0 = pool.receive_batch(context_i=0)
        batch_1 = pool.receive_batch(context_i=1)
        assert len(batch_0) == len(tasks)
        assert len(batch_1) == len(tasks)
        for task, sample in zip(tasks, batch_0):
            np.testing.assert_array_equal(answer(-1, *task)[1:], sample[1:])
        for task, sample in zip(tasks, batch_1):
            np.testing.assert_array_equal(
                answer(-1, *task)[1:] + 100, sample[1:])
Exemple #2
0
def test_pool_no_overwrite_batch(start_method):
    groups = [MockGroup.from_callback(simple_callback, prefetch_queue_depth=0)]
    for depth in [1, 2, 4, 8]:
        with create_pool(groups,
                         keep_alive_queue_size=depth,
                         num_workers=1,
                         start_method=start_method) as pool:
            pids = get_pids(pool)
            pid = pids[0]
            work_batches = [
                TaskArgs.make_sample(SampleRange(i, i + 1, i, 0))
                for i in range(depth)
            ]
            task_list = [[(SampleInfo(i, 0, i, 0), )] for i in range(depth)]
            for i, work_batch in enumerate(work_batches):
                pool.schedule_batch(context_i=0, work_batch=work_batch)
            assert_scheduled_num(pool.contexts[0], depth)
            batches = []
            for i in range(depth):
                batches.append(pool.receive_batch(context_i=0))
                assert_scheduled_num(pool.contexts[0], depth - 1 - i)
            tasks_batches = zip(task_list, batches)
            for tasks, batch in tasks_batches:
                for task, sample in zip(tasks, batch):
                    np.testing.assert_array_equal(answer(pid, *task), sample)
Exemple #3
0
def test_pool_invalid_return():
    callbacks = [MockGroup.from_callback(invalid_callback)]
    with create_pool(callbacks,
                     keep_alive_queue_size=1,
                     num_workers=1,
                     start_method="spawn") as pool:
        _ = get_pids(pool)
        work_batch = TaskArgs.make_sample(SampleRange(0, 1, 0, 0))
        pool.schedule_batch(context_i=0, work_batch=work_batch)
        pool.receive_batch(context_i=0)
Exemple #4
0
def test_pool_multi_task(start_method):
    groups = [MockGroup.from_callback(simple_callback)]
    with create_pool(groups,
                     keep_alive_queue_size=1,
                     num_workers=1,
                     start_method=start_method) as pool:
        pids = get_pids(pool)
        pid = pids[0]
        tasks = [(SampleInfo(i, i, 0, 0), ) for i in range(10)]
        work_batch = TaskArgs.make_sample(SampleRange(0, 10, 0, 0))
        pool.schedule_batch(context_i=0, work_batch=work_batch)
        batch = pool.receive_batch(context_i=0)
        for task, sample in zip(tasks, batch):
            np.testing.assert_array_equal(answer(pid, *task), sample)
Exemple #5
0
def test_pool_work_split_multiple_tasks(start_method):
    callbacks = [MockGroup.from_callback(simple_callback)]
    with create_pool(callbacks,
                     keep_alive_queue_size=1,
                     num_workers=2,
                     start_method=start_method) as pool:
        num_tasks = 16
        pids = get_pids(pool)
        assert len(pids) == 2
        work_batch = TaskArgs.make_sample(SampleRange(0, num_tasks, 0, 0))
        tasks = [(SampleInfo(i, i, 0, 0), ) for i in range(num_tasks)]
        pool.schedule_batch(context_i=0, work_batch=work_batch)
        batch = pool.receive_batch(context_i=0)
        for task, sample in zip(tasks, batch):
            np.testing.assert_array_equal(answer(-1, *task)[1:], sample[1:])
Exemple #6
0
def test_pool_many_ctxs(start_method):
    callbacks = [simple_callback, another_callback]
    groups = [MockGroup.from_callback(cb) for cb in callbacks]
    with create_pool(groups,
                     keep_alive_queue_size=1,
                     num_workers=1,
                     start_method=start_method) as pool:
        pids = get_pids(pool)
        pid = pids[0]
        tasks = [(SampleInfo(0, 0, 0, 0), )]
        work_batch = TaskArgs.make_sample(SampleRange(0, 1, 0, 0))
        pool.schedule_batch(context_i=0, work_batch=work_batch)
        pool.schedule_batch(context_i=1, work_batch=work_batch)
        batch_0 = pool.receive_batch(context_i=0)
        batch_1 = pool.receive_batch(context_i=1)
        for task, sample, pid in zip(tasks, batch_0, pids):
            np.testing.assert_array_equal(answer(pid, *task), sample)
        for task, sample, pid in zip(tasks, batch_1, pids):
            np.testing.assert_array_equal(answer(pid, *task) + 100, sample)
Exemple #7
0
def test_pool_iterator_dedicated_worker(start_method):
    groups = [
        MockGroup.from_callback(simple_callback, prefetch_queue_depth=3),
        MockGroup.from_callback(IteratorCb(),
                                prefetch_queue_depth=3,
                                batch=True)
    ]
    with create_pool(groups,
                     keep_alive_queue_size=1,
                     num_workers=4,
                     start_method=start_method) as pool:
        pids = get_pids(pool)
        assert len(pids) == 4
        tasks_list = []
        samples_count = 0
        for i in range(4):
            tasks = [(SampleInfo(samples_count + j, j, i, 0), )
                     for j in range(i + 1)]
            tasks_list.append(tasks)
            work_batch = TaskArgs.make_sample(
                SampleRange(samples_count, samples_count + i + 1, i, 0))
            samples_count += len(tasks)
            pool.schedule_batch(context_i=0, work_batch=work_batch)
            pool.schedule_batch(context_i=1,
                                work_batch=TaskArgs.make_batch((i, )))
        assert pool.contexts[0].dedicated_worker_id is None
        iter_worker_num = pool.contexts[1].dedicated_worker_id
        iter_worker_pid = pool.pool._processes[iter_worker_num].pid
        for i in range(4):
            batch_0 = pool.receive_batch(context_i=0)
            batch_1 = pool.receive_batch(context_i=1)
            tasks = tasks_list[i]
            assert len(batch_0) == len(tasks)
            assert len(batch_1) == len(tasks)
            for task, sample in zip(tasks, batch_0):
                np.testing.assert_array_equal(
                    answer(-1, *task)[1:], sample[1:])
            for sample in batch_1:
                np.testing.assert_array_equal(
                    np.array([iter_worker_pid, i + 1]), sample)