def start_subprocess(cmd, stream=None, stdout=None, echo=False, **kwargs):
    """Starts a subprocess and returns handle to it."""
    split_cmd = shlex.split(cmd)
    actual_command = cmd if kwargs.get('shell') else split_cmd

    log_level = logging.INFO if echo else logging.DEBUG
    extra_log_info = ''
    if 'cwd' in kwargs:
        extra_log_info += ' in cwd="%s"' % kwargs['cwd']
    logging.log(log_level, 'Running %s%s...', repr(cmd), extra_log_info)

    start_date = datetime.datetime.now()
    if stream:
        stream.write('{time} Spawning {cmd!r}{extra}\n----\n\n'.format(
            time=log_timestring(now=start_date), cmd=cmd,
            extra=extra_log_info))
        stream.flush()

    process = subprocess.Popen(actual_command,
                               close_fds=True,
                               stdout=stdout or subprocess.PIPE,
                               stderr=subprocess.STDOUT,
                               **kwargs)
    logging.log(log_level, 'Running %s as pid %s', split_cmd[0], process.pid)
    process.start_date = start_date

    time.sleep(0)  # yield this thread
    return process
def wait_subprocess(process, stream=None, echo=False, postprocess_hook=None):
    """Waits for subprocess to finish and returns (final status, stdout).

  This will also consume the remaining output to return it.

  Returns:
    Process exit code, stdout remaining in process prior to this invocation.
    Any previously read output from the process will not be included.
  """
    text_lines = []
    if process.stdout is not None:
        # stdout isnt going to another stream; collect it from the pipe.
        for raw_line in iter(process.stdout.readline, ''):
            if not raw_line:
                break
            decoded_line = raw_line.decode(encoding='utf-8')
            text_lines.append(decoded_line)
            if stream:
                stream.write(raw_line)
                stream.flush()

    process.wait()
    if stream is None:
        # Close stdout pipe if we didnt give a stream.
        # Otherwise caller owns the stream.
        process.stdout.close()

    if hasattr(process, 'start_date'):
        end_date = datetime.datetime.now()
        delta_time_str = timedelta_string(end_date - process.start_date)
    else:
        delta_time_str = 'UNKNOWN'

    returncode = process.returncode
    stdout = ''.join(text_lines)

    if stream:
        stream.write('\n\n----\n{time} Spawned process completed'
                     ' with returncode {returncode} in {delta_time}.\n'.format(
                         time=log_timestring(now=end_date),
                         returncode=returncode,
                         delta_time=delta_time_str))
        stream.flush()

    if echo:
        logging.info('%s returned %d with output:\n%s', process.pid,
                     returncode, stdout)
    logging.debug('Finished %s with returncode=%d in %s', process.pid,
                  returncode, delta_time_str)

    if postprocess_hook:
        postprocess_hook(returncode, stdout)

    return returncode, stdout.strip()
Beispiel #3
0
    def __build_from_live_server(self, repository):
        """Implements CommandProcessor interface."""
        docs_url_path = SWAGGER_URL_PATHS[repository.name]
        env = dict(os.environ)
        port = unused_port()
        env['SERVER_PORT'] = str(port)
        base_url = 'http://localhost:' + str(port)

        gate_logfile = self.get_logfile_path(repository.name +
                                             '-apidocs-server')
        logging.info(
            'Starting up prototype %s so we can extract docs from it.'
            ' We will log this instance to %s', repository.name, gate_logfile)
        boot_run_cmd = './gradlew'  # default will run
        ensure_dir_exists(os.path.dirname(gate_logfile))
        gate_logstream = open(gate_logfile, 'w')
        process = start_subprocess(boot_run_cmd,
                                   stream=gate_logstream,
                                   stdout=gate_logstream,
                                   cwd=repository.git_dir,
                                   env=env)

        max_wait_secs = self.options.max_wait_secs_startup
        # pylint: disable=broad-except
        try:
            logging.info('Waiting up to %s secs for %s to be ready on port %d',
                         max_wait_secs, repository.name, port)
            self.wait_for_url(base_url + '/health', max_wait_secs)
            json_path = os.path.join(self.get_output_dir(), 'docs.json')
            self.__generate_json_from_url(repository,
                                          base_url + '/' + docs_url_path,
                                          json_path)
            self.build_swagger_docs(repository, json_path)
        finally:
            try:
                gate_logstream.flush()
                gate_logstream.write(
                    '\n' + log_timestring() +
                    ' ***** buildtool is killing subprocess  *****\n')
                logging.info(
                    'Killing %s subprocess %s now that we are done with it',
                    repository.name, process.pid)
                process.kill()
                wait_subprocess(process)
                gate_logstream.close()
            except Exception as ex:
                maybe_log_exception(
                    self.name, ex,
                    'Ignoring exception while stopping {name} subprocess {pid}.'
                    .format(name=repository.name, pid=process.pid))
  def __build_from_live_server(self, repository):
    """Implements CommandProcessor interface."""
    docs_url_path = SWAGGER_URL_PATHS[repository.name]
    env = dict(os.environ)
    port = unused_port()
    env['SERVER_PORT'] = str(port)
    base_url = 'http://localhost:' + str(port)

    gate_logfile = self.get_logfile_path(repository.name + '-apidocs-server')
    logging.info('Starting up prototype %s so we can extract docs from it.'
                 ' We will log this instance to %s',
                 repository.name, gate_logfile)
    boot_run_cmd = './gradlew'  # default will run
    ensure_dir_exists(os.path.dirname(gate_logfile))
    gate_logstream = open(gate_logfile, 'w')
    process = start_subprocess(
        boot_run_cmd, stream=gate_logstream, stdout=gate_logstream,
        cwd=repository.git_dir, env=env)

    max_wait_secs = self.options.max_wait_secs_startup
    # pylint: disable=broad-except
    try:
      logging.info('Waiting up to %s secs for %s to be ready on port %d',
                   max_wait_secs, repository.name, port)
      self.wait_for_url(base_url + '/health', max_wait_secs)
      json_path = os.path.join(self.get_output_dir(), 'docs.json')
      self.__generate_json_from_url(
          repository, base_url + '/' + docs_url_path, json_path)
      self.build_swagger_docs(repository, json_path)
    finally:
      try:
        gate_logstream.flush()
        gate_logstream.write(
            '\n' + log_timestring()
            + ' ***** buildtool is killing subprocess  *****\n')
        logging.info('Killing %s subprocess %s now that we are done with it',
                     repository.name, process.pid)
        process.kill()
        wait_subprocess(process)
        gate_logstream.close()
      except Exception as ex:
        maybe_log_exception(
            self.name, ex,
            'Ignoring exception while stopping {name} subprocess {pid}.'
            .format(name=repository.name, pid=process.pid))
def wait_subprocess(process, stream=None, echo=False, postprocess_hook=None):
    """Waits for subprocess to finish and returns (final status, stdout).

    This will also consume the remaining output to return it.

    Returns:
      Process exit code, stdout remaining in process prior to this invocation.
      Any previously read output from the process will not be included.
    """
    text_lines = []
    if process.stdout is not None:
        # stdout isnt going to another stream; collect it from the pipe.
        for raw_line in iter(process.stdout.readline, ""):
            if not raw_line:
                break
            decoded_line = raw_line.decode(encoding="utf-8")
            text_lines.append(decoded_line)
            if stream:
                stream.write(decoded_line)
                stream.flush()

    if process.stderr is not None:
        log_level = logging.INFO if echo else logging.DEBUG
        # stderr isn't going to another file handle; log it
        for raw_line in iter(process.stderr.readline, ""):
            decoded_line = raw_line.decode(encoding="utf-8").rstrip("\n")
            logging.log(
                log_level, "PID %s wrote to stderr: %s", process.pid, decoded_line
            )

    process.wait()
    if stream is None and process.stdout is not None:
        # Close stdout pipe if we didnt give a stream.
        # Otherwise caller owns the stream.
        process.stdout.close()

    if hasattr(process, "start_date"):
        end_date = datetime.datetime.now()
        delta_time_str = timedelta_string(end_date - process.start_date)
    else:
        delta_time_str = "UNKNOWN"

    returncode = process.returncode
    stdout = "".join(text_lines)

    if stream:
        stream.write(
            "\n\n----\n{time} Spawned process completed"
            " with returncode {returncode} in {delta_time}.\n".format(
                time=log_timestring(now=end_date),
                returncode=returncode,
                delta_time=delta_time_str,
            )
        )
        stream.flush()

    if echo:
        logging.info("%s returned %d with output:\n%s", process.pid, returncode, stdout)
    logging.debug(
        "Finished %s with returncode=%d in %s", process.pid, returncode, delta_time_str
    )

    if postprocess_hook:
        postprocess_hook(returncode, stdout)

    return returncode, stdout.strip()