Esempio n. 1
0
def parallel_execute_stream(objects, func, get_deps):
    if get_deps is None:
        get_deps = _no_deps

    results = Queue()
    state = State(objects)

    while not state.is_done():
        for event in feed_queue(objects, func, get_deps, results, state):
            yield event

        try:
            event = results.get(timeout=0.1)
        except Empty:
            continue
        # See https://github.com/docker/compose/issues/189
        except thread.error:
            raise ShutdownException()

        obj, _, exception = event
        if exception is None:
            log.debug('Finished processing: {}'.format(obj))
            state.finished.add(obj)
        else:
            log.debug('Failed: {}'.format(obj))
            state.failed.add(obj)

        yield event
Esempio n. 2
0
def parallel_execute_iter(objects, func, get_deps, limit):
    """
    Runs func on objects in parallel while ensuring that func is
    ran on object only after it is ran on all its dependencies.

    Returns an iterator of tuples which look like:

    # if func returned normally when run on object
    (object, result, None)

    # if func raised an exception when run on object
    (object, None, exception)

    # if func raised an exception when run on one of object's dependencies
    (object, None, UpstreamError())
    """
    if get_deps is None:
        get_deps = _no_deps

    if limit is None:
        limiter = NoLimit()
    else:
        limiter = Semaphore(limit)

    results = Queue()
    state = State(objects)

    while True:
        feed_queue(objects, func, get_deps, results, state, limiter)

        try:
            event = results.get(timeout=0.1)
        except Empty:
            continue
        # See https://github.com/docker/compose/issues/189
        except thread.error:
            raise ShutdownException()

        if event is STOP:
            break

        obj, _, exception = event
        if exception is None:
            log.debug('Finished processing: {}'.format(obj))
            state.finished.add(obj)
        else:
            log.debug('Failed: {}'.format(obj))
            state.failed.add(obj)

        yield event
Esempio n. 3
0
def parallel_execute(objects, func, get_name, msg, get_deps=None):
    """Runs func on objects in parallel while ensuring that func is
    ran on object only after it is ran on all its dependencies.

    get_deps called on object must return a collection with its dependencies.
    get_name called on object must return its name.
    """
    objects = list(objects)
    stream = get_output_stream(sys.stderr)

    writer = ParallelStreamWriter(stream, msg)
    for obj in objects:
        writer.initialize(get_name(obj))

    q = setup_queue(objects, func, get_deps, get_name)

    done = 0
    errors = {}
    error_to_reraise = None
    returned = [None] * len(objects)

    while done < len(objects):
        try:
            obj, result, exception = q.get(timeout=1)
        except Empty:
            continue
        # See https://github.com/docker/compose/issues/189
        except thread.error:
            raise ShutdownException()

        if exception is None:
            writer.write(get_name(obj), 'done')
            returned[objects.index(obj)] = result
        elif isinstance(exception, APIError):
            errors[get_name(obj)] = exception.explanation
            writer.write(get_name(obj), 'error')
        else:
            errors[get_name(obj)] = exception
            error_to_reraise = exception

        done += 1

    for obj_name, error in errors.items():
        stream.write("\nERROR: for {}  {}\n".format(obj_name, error))

    if error_to_reraise:
        raise error_to_reraise

    return returned
Esempio n. 4
0
def consume_queue(queue, cascade_stop):
    """Consume the queue by reading lines off of it and yielding them."""
    while True:
        try:
            item = queue.get(timeout=0.1)
        except Empty:
            yield None
            continue
        # See https://github.com/docker/compose/issues/189
        except thread.error:
            raise ShutdownException()

        if item.exc:
            raise item.exc

        if item.is_stop and not cascade_stop:
            continue

        yield item.item
Esempio n. 5
0
    def loop(self):
        self._init_readers()

        while self._num_running > 0:
            try:
                item, exception = self.queue.get(timeout=0.1)

                if exception:
                    raise exception

                if item is STOP:
                    if self.cascade_stop is True:
                        break
                    else:
                        self._num_running -= 1
                else:
                    yield item
            except Empty:
                pass
            # See https://github.com/docker/compose/issues/189
            except thread.error:
                raise ShutdownException()
Esempio n. 6
0
def parallel_execute(objects, func, index_func, msg):
    """For a given list of objects, call the callable passing in the first
    object we give it.
    """
    objects = list(objects)
    stream = get_output_stream(sys.stderr)
    writer = ParallelStreamWriter(stream, msg)
    q = setup_queue(writer, objects, func, index_func)

    done = 0
    errors = {}

    while done < len(objects):
        try:
            msg_index, result = q.get(timeout=1)
        except Empty:
            continue
        # See https://github.com/docker/compose/issues/189
        except thread.error:
            raise ShutdownException()

        if isinstance(result, APIError):
            errors[msg_index] = "error", result.explanation
            writer.write(msg_index, 'error')
        elif isinstance(result, Exception):
            errors[msg_index] = "unexpected_exception", result
        else:
            writer.write(msg_index, 'done')
        done += 1

    if not errors:
        return

    stream.write("\n")
    for msg_index, (result, error) in errors.items():
        stream.write("ERROR: for {}  {} \n".format(msg_index, error))
        if result == 'unexpected_exception':
            raise error