示例#1
0
def stream_output(output, stream):
    is_terminal = hasattr(stream, 'isatty') and stream.isatty()
    stream = utils.get_output_stream(stream)
    all_events = []
    lines = {}
    diff = 0

    for event in utils.json_stream(output):
        all_events.append(event)

        if 'progress' in event or 'progressDetail' in event:
            image_id = event.get('id')
            if not image_id:
                continue

            if image_id in lines:
                diff = len(lines) - lines[image_id]
            else:
                lines[image_id] = len(lines)
                stream.write("\n")
                diff = 0

            if is_terminal:
                # move cursor up `diff` rows
                stream.write("%c[%dA" % (27, diff))

        print_output_event(event, stream, is_terminal)

        if 'id' in event and is_terminal:
            # move cursor back down
            stream.write("%c[%dB" % (27, diff))

        stream.flush()

    return all_events
示例#2
0
def parallel_execute(objects, func, get_name, msg, get_deps=None, limit=None):
    """Runs func on objects in parallel while ensuring that func is
    ran on object only after it is ran on all its dependencies.

    get_deps called on object must return a collection with its dependencies.
    get_name called on object must return its name.
    """
    objects = list(objects)
    stream = get_output_stream(sys.stderr)

    if ParallelStreamWriter.instance:
        writer = ParallelStreamWriter.instance
    else:
        writer = ParallelStreamWriter(stream)

    for obj in objects:
        writer.add_object(msg, get_name(obj))
    for obj in objects:
        writer.write_initial(msg, get_name(obj))

    events = parallel_execute_iter(objects, func, get_deps, limit)

    errors = {}
    results = []
    error_to_reraise = parallel_execute_watch(events, writer, errors, results, msg, get_name)

    for obj_name, error in errors.items():
        stream.write("\nERROR: for {}  {}\n".format(obj_name, error))

    if error_to_reraise:
        raise error_to_reraise

    return results, errors
示例#3
0
def parallel_execute(objects, func, get_name, msg, get_deps=None, limit=None, parent_objects=None):
    """Runs func on objects in parallel while ensuring that func is
    ran on object only after it is ran on all its dependencies.

    get_deps called on object must return a collection with its dependencies.
    get_name called on object must return its name.
    """
    objects = list(objects)
    stream = get_output_stream(sys.stderr)

    writer = ParallelStreamWriter(stream, msg)

    display_objects = list(parent_objects) if parent_objects else objects

    for obj in display_objects:
        writer.add_object(get_name(obj))

    # write data in a second loop to consider all objects for width alignment
    # and avoid duplicates when parent_objects exists
    for obj in objects:
        writer.write_initial(get_name(obj))

    events = parallel_execute_iter(objects, func, get_deps, limit)

    errors = {}
    results = []
    error_to_reraise = None

    for obj, result, exception in events:
        if exception is None:
            writer.write(get_name(obj), 'done', green)
            results.append(result)
        elif isinstance(exception, ImageNotFound):
            # This is to bubble up ImageNotFound exceptions to the client so we
            # can prompt the user if they want to rebuild.
            errors[get_name(obj)] = exception.explanation
            writer.write(get_name(obj), 'error', red)
            error_to_reraise = exception
        elif isinstance(exception, APIError):
            errors[get_name(obj)] = exception.explanation
            writer.write(get_name(obj), 'error', red)
        elif isinstance(exception, (OperationFailedError, HealthCheckFailed, NoHealthCheckConfigured)):
            errors[get_name(obj)] = exception.msg
            writer.write(get_name(obj), 'error', red)
        elif isinstance(exception, UpstreamError):
            writer.write(get_name(obj), 'error', red)
        else:
            errors[get_name(obj)] = exception
            error_to_reraise = exception

    for obj_name, error in errors.items():
        stream.write("\nERROR: for {}  {}\n".format(obj_name, error))

    if error_to_reraise:
        raise error_to_reraise

    return results, errors
示例#4
0
 def __init__(self,
              containers,
              output=sys.stdout,
              monochrome=False,
              cascade_stop=False,
              log_args=None):
     log_args = log_args or {}
     self.containers = containers
     self.output = utils.get_output_stream(output)
     self.monochrome = monochrome
     self.cascade_stop = cascade_stop
     self.log_args = log_args
示例#5
0
 def __init__(self,
              containers,
              presenters,
              event_stream,
              output=sys.stdout,
              cascade_stop=False,
              log_args=None):
     self.containers = containers
     self.presenters = presenters
     self.event_stream = event_stream
     self.output = utils.get_output_stream(output)
     self.cascade_stop = cascade_stop
     self.log_args = log_args or {}
示例#6
0
def parallel_execute(objects, func, get_name, msg, get_deps=None):
    """Runs func on objects in parallel while ensuring that func is
    ran on object only after it is ran on all its dependencies.

    get_deps called on object must return a collection with its dependencies.
    get_name called on object must return its name.
    """
    objects = list(objects)
    stream = get_output_stream(sys.stderr)

    writer = ParallelStreamWriter(stream, msg)
    for obj in objects:
        writer.initialize(get_name(obj))

    q = setup_queue(objects, func, get_deps, get_name)

    done = 0
    errors = {}
    error_to_reraise = None
    returned = [None] * len(objects)

    while done < len(objects):
        try:
            obj, result, exception = q.get(timeout=1)
        except Empty:
            continue
        # See https://github.com/docker/compose/issues/189
        except thread.error:
            raise ShutdownException()

        if exception is None:
            writer.write(get_name(obj), 'done')
            returned[objects.index(obj)] = result
        elif isinstance(exception, APIError):
            errors[get_name(obj)] = exception.explanation
            writer.write(get_name(obj), 'error')
        else:
            errors[get_name(obj)] = exception
            error_to_reraise = exception

        done += 1

    for obj_name, error in errors.items():
        stream.write("\nERROR: for {}  {}\n".format(obj_name, error))

    if error_to_reraise:
        raise error_to_reraise

    return returned
示例#7
0
文件: parallel.py 项目: 2k0ri/compose
def parallel_execute(objects, func, index_func, msg):
    """For a given list of objects, call the callable passing in the first
    object we give it.
    """
    objects = list(objects)
    stream = get_output_stream(sys.stderr)
    writer = ParallelStreamWriter(stream, msg)

    for obj in objects:
        writer.initialize(index_func(obj))

    q = Queue()

    # TODO: limit the number of threads #1828
    for obj in objects:
        t = Thread(
            target=perform_operation,
            args=(func, obj, q.put, index_func(obj)))
        t.daemon = True
        t.start()

    done = 0
    errors = {}

    while done < len(objects):
        try:
            msg_index, result = q.get(timeout=1)
        except Empty:
            continue

        if isinstance(result, APIError):
            errors[msg_index] = "error", result.explanation
            writer.write(msg_index, 'error')
        elif isinstance(result, Exception):
            errors[msg_index] = "unexpected_exception", result
        else:
            writer.write(msg_index, 'done')
        done += 1

    if not errors:
        return

    stream.write("\n")
    for msg_index, (result, error) in errors.items():
        stream.write("ERROR: for {}  {} \n".format(msg_index, error))
        if result == 'unexpected_exception':
            raise error
示例#8
0
def parallel_execute(objects, func, get_name, msg, get_deps=None, limit=None):
    """Runs func on objects in parallel while ensuring that func is
    ran on object only after it is ran on all its dependencies.

    get_deps called on object must return a collection with its dependencies.
    get_name called on object must return its name.
    """
    objects = list(objects)
    stream = get_output_stream(sys.stderr)

    writer = ParallelStreamWriter(stream, msg)
    for obj in objects:
        writer.add_object(get_name(obj))
    writer.write_initial()

    events = parallel_execute_iter(objects, func, get_deps, limit)

    errors = {}
    results = []
    error_to_reraise = None

    for obj, result, exception in events:
        if exception is None:
            writer.write(get_name(obj), green('done'))
            results.append(result)
        elif isinstance(exception, APIError):
            errors[get_name(obj)] = exception.explanation
            writer.write(get_name(obj), red('error'))
        elif isinstance(exception, (OperationFailedError, HealthCheckFailed, NoHealthCheckConfigured)):
            errors[get_name(obj)] = exception.msg
            writer.write(get_name(obj), red('error'))
        elif isinstance(exception, UpstreamError):
            writer.write(get_name(obj), red('error'))
        else:
            errors[get_name(obj)] = exception
            error_to_reraise = exception

    for obj_name, error in errors.items():
        stream.write("\nERROR: for {}  {}\n".format(obj_name, error))

    if error_to_reraise:
        raise error_to_reraise

    return results, errors
示例#9
0
def stream_output(output, stream):
    is_terminal = hasattr(stream, "isatty") and stream.isatty()
    stream = utils.get_output_stream(stream)
    all_events = []
    lines = {}
    diff = 0

    for event in utils.json_stream(output):
        all_events.append(event)
        is_progress_event = "progress" in event or "progressDetail" in event

        if not is_progress_event:
            print_output_event(event, stream, is_terminal)
            stream.flush()
            continue

        if not is_terminal:
            continue

        # if it's a progress event and we have a terminal, then display the progress bars
        image_id = event.get("id")
        if not image_id:
            continue

        if image_id in lines:
            diff = len(lines) - lines[image_id]
        else:
            lines[image_id] = len(lines)
            stream.write("\n")
            diff = 0

        # move cursor up `diff` rows
        stream.write("%c[%dA" % (27, diff))

        print_output_event(event, stream, is_terminal)

        if "id" in event:
            # move cursor back down
            stream.write("%c[%dB" % (27, diff))

        stream.flush()

    return all_events
示例#10
0
def stream_output(output, stream):
    is_terminal = hasattr(stream, 'isatty') and stream.isatty()
    stream = utils.get_output_stream(stream)
    all_events = []
    lines = {}
    diff = 0

    for event in utils.json_stream(output):
        all_events.append(event)
        is_progress_event = 'progress' in event or 'progressDetail' in event

        if not is_progress_event:
            print_output_event(event, stream, is_terminal)
            stream.flush()
            continue

        if not is_terminal:
            continue

        # if it's a progress event and we have a terminal, then display the progress bars
        image_id = event.get('id')
        if not image_id:
            continue

        if image_id not in lines:
            lines[image_id] = len(lines)
            write_to_stream("\n", stream)

        diff = len(lines) - lines[image_id]

        # move cursor up `diff` rows
        write_to_stream("%c[%dA" % (27, diff), stream)

        print_output_event(event, stream, is_terminal)

        if 'id' in event:
            # move cursor back down
            write_to_stream("%c[%dB" % (27, diff), stream)

        stream.flush()

    return all_events
示例#11
0
def parallel_execute(objects, func, index_func, msg):
    """For a given list of objects, call the callable passing in the first
    object we give it.
    """
    objects = list(objects)
    stream = get_output_stream(sys.stderr)
    writer = ParallelStreamWriter(stream, msg)
    q = setup_queue(writer, objects, func, index_func)

    done = 0
    errors = {}

    while done < len(objects):
        try:
            msg_index, result = q.get(timeout=1)
        except Empty:
            continue
        # See https://github.com/docker/compose/issues/189
        except thread.error:
            raise ShutdownException()

        if isinstance(result, APIError):
            errors[msg_index] = "error", result.explanation
            writer.write(msg_index, 'error')
        elif isinstance(result, Exception):
            errors[msg_index] = "unexpected_exception", result
        else:
            writer.write(msg_index, 'done')
        done += 1

    if not errors:
        return

    stream.write("\n")
    for msg_index, (result, error) in errors.items():
        stream.write("ERROR: for {}  {} \n".format(msg_index, error))
        if result == 'unexpected_exception':
            raise error
示例#12
0
def parallel_execute(objects, func, get_name, msg, get_deps=None, limit=None, fail_check=None):
    """Runs func on objects in parallel while ensuring that func is
    ran on object only after it is ran on all its dependencies.

    get_deps called on object must return a collection with its dependencies.
    get_name called on object must return its name.
    fail_check is an additional failure check for cases that should display as a failure
        in the CLI logs, but don't raise an exception (such as attempting to start 0 containers)
    """
    objects = list(objects)
    stream = get_output_stream(sys.stderr)

    if ParallelStreamWriter.instance:
        writer = ParallelStreamWriter.instance
    else:
        writer = ParallelStreamWriter(stream)

    for obj in objects:
        writer.add_object(msg, get_name(obj))
    for obj in objects:
        writer.write_initial(msg, get_name(obj))

    events = parallel_execute_iter(objects, func, get_deps, limit)

    errors = {}
    results = []
    error_to_reraise = parallel_execute_watch(
        events, writer, errors, results, msg, get_name, fail_check
    )

    for obj_name, error in errors.items():
        stream.write("\nERROR: for {}  {}\n".format(obj_name, error))

    if error_to_reraise:
        raise error_to_reraise

    return results, errors
示例#13
0
def parallel_execute(objects,
                     func,
                     get_name,
                     msg,
                     get_deps=None,
                     limit=None,
                     parent_objects=None):
    """Runs func on objects in parallel while ensuring that func is
    ran on object only after it is ran on all its dependencies.

    get_deps called on object must return a collection with its dependencies.
    get_name called on object must return its name.
    """
    objects = list(objects)
    stream = get_output_stream(sys.stderr)

    writer = ParallelStreamWriter(stream, msg)

    if parent_objects:
        display_objects = list(parent_objects)
    else:
        display_objects = objects

    for obj in display_objects:
        writer.add_object(get_name(obj))

    # write data in a second loop to consider all objects for width alignment
    # and avoid duplicates when parent_objects exists
    for obj in objects:
        writer.write_initial(get_name(obj))

    events = parallel_execute_iter(objects, func, get_deps, limit)

    errors = {}
    results = []
    error_to_reraise = None

    for obj, result, exception in events:
        if exception is None:
            writer.write(get_name(obj), 'done', green)
            results.append(result)
        elif isinstance(exception, APIError):
            errors[get_name(obj)] = exception.explanation
            writer.write(get_name(obj), 'error', red)
        elif isinstance(exception, (OperationFailedError, HealthCheckFailed,
                                    NoHealthCheckConfigured)):
            errors[get_name(obj)] = exception.msg
            writer.write(get_name(obj), 'error', red)
        elif isinstance(exception, UpstreamError):
            writer.write(get_name(obj), 'error', red)
        else:
            errors[get_name(obj)] = exception
            error_to_reraise = exception

    for obj_name, error in errors.items():
        stream.write("\nERROR: for {}  {}\n".format(obj_name, error))

    if error_to_reraise:
        raise error_to_reraise

    return results, errors
示例#14
0
 def __init__(self, containers, output=sys.stdout, monochrome=False, cascade_stop=False):
     self.containers = containers
     self.output = utils.get_output_stream(output)
     self.monochrome = monochrome
     self.cascade_stop = cascade_stop
示例#15
0
 def __init__(self, containers, output=sys.stdout, monochrome=False):
     self.containers = containers
     self.output = utils.get_output_stream(output)
     self.monochrome = monochrome
示例#16
0
 def __init__(self, containers, attach_params=None, output=sys.stdout, monochrome=False):
     self.containers = containers
     self.attach_params = attach_params or {}
     self.prefix_width = self._calculate_prefix_width(containers)
     self.generators = self._make_log_generators(monochrome)
     self.output = utils.get_output_stream(output)