예제 #1
0
def asynchronous(func,
                 params=None,
                 workers=None,
                 log_contexts=None,
                 final_timeout=2.0,
                 **kw):
    """
    Map the list of tuple-parameters onto asynchronous calls to the specified function::

        with asynchronous(connect, [(host1,), (host2,), (host3,)]) as futures:
            ...

        connections = futures.results()

    :param func: The callable to invoke asynchronously.
    :param params: A list of tuples to map onto the function.
    :param workers: The number of workers to use. Defaults to the number of items in ``params``.
    :param log_contexts: A optional list of logging context objects, matching the items in ``params``.
    :param final_timeout: The amount of time to allow for the futures to complete after exiting the asynchronous context.
    """
    if params is None:
        params = [()]

    if not isinstance(
            params, list):  # don't use listify - we want to listify tuples too
        params = [params]

    params = _to_args_list(params)
    log_contexts = _to_log_contexts(params, log_contexts)
    if workers is None:
        workers = min(MAX_THREAD_POOL_SIZE, len(params))

    try:
        signature = inspect.signature(func)
    except ValueError:
        # In Python 3.5+, inspect.signature returns this for built-in types
        pass
    else:
        if '_sync' in signature.parameters and '_sync' not in kw:
            assert len(
                params
            ) <= workers, 'SynchronizationCoordinator with %s tasks but only %s workers' % (
                len(params), workers)
            synchronization_coordinator = SynchronizationCoordinator(
                len(params))
            kw['_sync'] = synchronization_coordinator

            func = synchronization_coordinator._abandon_when_done(func)

    if not params:
        # noop
        yield Futures()
        return

    with Futures.executor(workers=workers) as futures:
        for args, ctx in zip(params, log_contexts):
            futures.submit(func, *args, log_ctx=ctx, **kw)

        yield futures
예제 #2
0
def test_synchronization_coordinator_abandon():
    mo = MultiObject(range(3))

    sync = SynchronizationCoordinator(len(mo))
    executed = []

    def foo(i):
        def execute(caption):
            executed.append((i, caption))

        sync.wait_for_everyone()
        execute('after wait 1')

        if i == 2:
            sync.abandon()
            return
        # Only two waiters should reach here
        sync.wait_for_everyone()
        execute('after wait 2')

        # Even without explicit call to abandon, sync should only wait for two waiters
        sync.wait_for_everyone()
        execute('after wait 3')

    mo.call(foo)
    verify_concurrent_order(executed, {(i, 'after wait 1')
                                       for i in range(3)}, {(i, 'after wait 2')
                                                            for i in range(2)},
                            {(i, 'after wait 3')
                             for i in range(2)})
예제 #3
0
def test_synchronization_coordinator_collect_and_call_once():
    mo = MultiObject(range(3))

    sync = SynchronizationCoordinator(len(mo))
    executed = []

    def foo(i):
        def execute(caption):
            executed.append((i, caption))

        sleep(i / 10)

        def func_to_call_once(param):
            executed.append('params = %s' % sorted(param))
            return sum(param)

        result = sync.collect_and_call_once(i + 1, func_to_call_once)
        execute('result is %s' % result)

        assert sync.collect_and_call_once(
            i, len) == 3, 'parameters remain from previous call'

    mo.call(foo)
    verify_concurrent_order(executed, {'params = [1, 2, 3]'},
                            {(i, 'result is 6')
                             for i in range(3)})
예제 #4
0
def test_synchronization_coordinator_wait_for_everyone():
    mo = MultiObject(range(3))

    sync = SynchronizationCoordinator(len(mo))
    executed = []

    def foo(i):
        def execute(caption):
            executed.append((i, caption))

        sleep(i / 10)
        execute('after sleep')
        sync.wait_for_everyone()
        execute('after wait')
        sync.wait_for_everyone()

        sleep(i / 10)
        execute('after sleep 2')
        sync.wait_for_everyone()
        execute('after wait 2')

    mo.call(foo)
    verify_concurrent_order(executed, {(i, 'after sleep')
                                       for i in range(3)}, {(i, 'after wait')
                                                            for i in range(3)},
                            {(i, 'after sleep 2')
                             for i in range(3)}, {(i, 'after wait 2')
                                                  for i in range(3)})
예제 #5
0
def test_synchronization_coordinator_exception_in_collect_and_call_once():
    mo = MultiObject(range(3))

    sync = SynchronizationCoordinator(len(mo))
    times_called = 0

    class MyException(Exception):
        pass

    def foo(i):
        def func_to_call_once(_):
            nonlocal times_called
            times_called += 1
            raise MyException

        with pytest.raises(MyException):
            sync.collect_and_call_once(i, func_to_call_once)

        assert sync.collect_and_call_once(i + 1, sum) == 6

    mo.call(foo)
    assert times_called == 1, 'collect_and_call_once with exception called the function more than once'
예제 #6
0
def asynchronous(func,
                 params=None,
                 workers=None,
                 log_contexts=None,
                 final_timeout=2.0,
                 **kw):
    """
    Map the list of tuple-parameters onto asynchronous calls to the specified function::

        with asynchronous(connect, [(host1,), (host2,), (host3,)]) as futures:
            ...

        connections = futures.results()

    :param func: The callable to invoke asynchronously.
    :param params: A list of tuples to map onto the function.
    :param workers: The number of workers to use. Defaults to the number of items in ``params``.
    :param log_contexts: A optional list of logging context objects, matching the items in ``params``.
    :param final_timeout: The amount of time to allow for the futures to complete after exiting the asynchronous context.
    """
    if params is None:
        params = [()]

    if not isinstance(
            params, list):  # don't use listify - we want to listify tuples too
        params = [params]

    params = _to_args_list(params)
    log_contexts = _to_log_contexts(params, log_contexts)

    workers = workers or min(MAX_THREAD_POOL_SIZE, len(params))
    executor = ThreadPoolExecutor(workers) if workers else None

    funcname = _get_func_name(func)

    try:
        signature = inspect.signature(func)
    except ValueError:
        # In Python 3.5+, inspect.signature returns this for built-in types
        pass
    else:
        if '_sync' in signature.parameters and '_sync' not in kw:
            assert len(
                params
            ) <= workers, 'SynchronizationCoordinator with %s tasks but only %s workers' % (
                len(params), workers)
            synchronization_coordinator = SynchronizationCoordinator(
                len(params))
            kw['_sync'] = synchronization_coordinator

            func = synchronization_coordinator._abandon_when_done(func)

    futures = Futures()
    for args, ctx in zip(params, log_contexts):
        future = _submit_execution(executor,
                                   func,
                                   args,
                                   kw,
                                   ctx=ctx,
                                   funcname=funcname)
        futures.append(future)

    def kill(wait=False):
        nonlocal killed
        futures.cancel()
        if executor:
            executor.shutdown(wait=wait)
        killed = True

    killed = False
    futures.kill = kill

    try:
        yield futures
    except:
        _logger.debug("shutting down ThreadPoolExecutor due to exception")
        kill(wait=False)
        raise
    else:
        if executor:
            executor.shutdown(wait=not killed)
        if not killed:
            # force exceptions to bubble up
            try:
                futures.result(timeout=final_timeout)
            except CancelledError:
                pass
    finally:
        # break the cycle so that the GC doesn't clean up the executor under a lock (https://bugs.python.org/issue21009)
        futures.kill = None
        futures = None