def test_communicate_teeing_retrieves_stdout_and_stderr(self):
        process = subprocess.Popen([
            "/bin/bash", "-c", """
  echo "1out"
  echo >&2 "1err"
  sleep 0.05
  echo >&2 "2err"
  echo "2out"
  sleep 0.05
  echo "3out"
  sleep 0.05
  echo >&2 "3err"
  sleep 0.05
exit 1
"""
        ],
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE)
        process_handler = SubprocessProcessHandler(process)
        self.assertEquals(
            process_handler.communicate_teeing_stdout_and_stderr(), (b"""1out
2out
3out
""", b"""1err
2err
3err
"""))
Exemple #2
0
    def join(self,
             stdin_data: Optional[Union[bytes, str]] = None,
             tee_output: bool = False) -> PantsResult:
        """Wait for the pants process to complete, and return a PantsResult for it."""

        communicate_fn = self.process.communicate
        if tee_output:
            # TODO: MyPy complains that SubprocessProcessHandler.communicate_teeing_stdout_and_stderr does
            # not have the same type signature as subprocess.Popen.communicate_teeing_stdout_and_stderr.
            # It's possibly not worth trying to fix this because the type stubs for subprocess.Popen are
            # very complex and also not very precise, given how many different configurations Popen can
            # take.
            communicate_fn = SubprocessProcessHandler(
                self.process
            ).communicate_teeing_stdout_and_stderr  # type: ignore[assignment]
        if stdin_data is not None:
            stdin_data = ensure_binary(stdin_data)
        (stdout_data, stderr_data) = communicate_fn(stdin_data)

        if self.process.returncode != PANTS_SUCCEEDED_EXIT_CODE:
            render_logs(self.workdir)

        return PantsResult(
            command=self.command,
            returncode=self.process.returncode,
            stdout_data=stdout_data.decode(),
            stderr_data=stderr_data.decode(),
            workdir=self.workdir,
            pid=self.process.pid,
        )
Exemple #3
0
def execute_runner_async(runner, workunit_factory=None, workunit_name=None, workunit_labels=None,
                         workunit_log_config=None):
  """Executes the given java runner asynchronously.

  We can't use 'with' here because the workunit_generator's __exit__ function
  must be called after the process exits, in the return_code_handler.
  The wrapper around process.wait() needs to handle the same exceptions
  as the contextmanager does, so we have code duplication.

  We're basically faking the 'with' call to deal with asynchronous
  results.

  If `workunit_factory` is supplied, does so in the context of a workunit.

  :param runner: the java runner to run
  :param workunit_factory: an optional callable that can produce a workunit context
  :param string workunit_name: an optional name for the work unit; defaults to the main
  :param list workunit_labels: an optional sequence of labels for the work unit
  :param WorkUnit.LogConfig workunit_log_config: an optional tuple of task options affecting reporting

  Returns a ProcessHandler to the java process that is spawned.
  Raises `pants.java.Executor.Error` if there was a problem launching java itself.
  """

  if not isinstance(runner, Executor.Runner):
    raise ValueError('The runner argument must be a java Executor.Runner instance, '
                     'given {} of type {}'.format(runner, type(runner)))

  if workunit_factory is None:
    return SubprocessProcessHandler(runner.spawn())
  else:
    workunit_labels = [
                        WorkUnitLabel.TOOL,
                        WorkUnitLabel.NAILGUN if isinstance(runner.executor, NailgunExecutor) else WorkUnitLabel.JVM
                      ] + (workunit_labels or [])

    workunit_generator = workunit_factory(name=workunit_name, labels=workunit_labels,
                                cmd=runner.cmd, log_config=workunit_log_config)
    workunit = workunit_generator.__enter__()
    process = runner.spawn(stdout=workunit.output('stdout'), stderr=workunit.output('stderr'))

    class WorkUnitProcessHandler(ProcessHandler):
      def wait(_):
        try:
          ret = process.wait()
          workunit.set_outcome(WorkUnit.FAILURE if ret else WorkUnit.SUCCESS)
          workunit_generator.__exit__(None, None, None)
          return ret
        except BaseException:
          if not workunit_generator.__exit__(*sys.exc_info()):
            raise

      def kill(_):
        return process.kill()

      def terminate(_):
        return process.terminate()

    return WorkUnitProcessHandler()
Exemple #4
0
 def _spawn(self, pex, workunit, args, setsid=False, env=None):
     env = env or {}
     process = pex.run(args,
                       blocking=False,
                       setsid=setsid,
                       env=env,
                       stdout=workunit.output('stdout'),
                       stderr=workunit.output('stderr'))
     return SubprocessProcessHandler(process)
Exemple #5
0
    def _spawn(self, pex, workunit, args, setsid=False):
        # NB: We don't use pex.run(...) here since it makes a point of running in a clean environment,
        # scrubbing all `PEX_*` environment overrides and we use overrides when running pexes in this
        # task.

        process = subprocess.Popen(pex.cmdline(args),
                                   preexec_fn=os.setsid if setsid else None,
                                   stdout=workunit.output('stdout'),
                                   stderr=workunit.output('stderr'))

        return SubprocessProcessHandler(process)
Exemple #6
0
 def _spawn(self, pex, workunit, args, setsid=False, env=None):
   with self._maybe_run_in_chroot():
     env = env or {}
     process = pex.run(args,
                       with_chroot=False,  # We handle chrooting ourselves.
                       blocking=False,
                       setsid=setsid,
                       env=env,
                       stdout=workunit.output('stdout'),
                       stderr=workunit.output('stderr'))
     return SubprocessProcessHandler(process)
  def run_pants_with_workdir(self, command, workdir, config=None, stdin_data=None, tee_output=False, **kwargs):
    if config:
      kwargs["config"] = config
    pants_command, proc = self.run_pants_with_workdir_without_waiting(command, workdir, **kwargs)

    communicate_fn = proc.communicate
    if tee_output:
      communicate_fn = SubprocessProcessHandler(proc).communicate_teeing_stdout_and_stderr
    (stdout_data, stderr_data) = communicate_fn(stdin_data)

    return PantsResult(pants_command, proc.returncode, stdout_data.decode("utf-8"),
                       stderr_data.decode("utf-8"), workdir)
Exemple #8
0
  def join(self, stdin_data=None, tee_output=False):
    """Wait for the pants process to complete, and return a PantsResult for it."""

    communicate_fn = self.process.communicate
    if tee_output:
      communicate_fn = SubprocessProcessHandler(self.process).communicate_teeing_stdout_and_stderr
    if stdin_data is not None:
      stdin_data = ensure_binary(stdin_data)
    (stdout_data, stderr_data) = communicate_fn(stdin_data)

    return PantsResult(self.command, self.process.returncode, stdout_data.decode("utf-8"),
                       stderr_data.decode("utf-8"), self.workdir)
  def test_communicate_teeing_retrieves_stdout_and_stderr(self):
    process = subprocess.Popen(["/bin/bash", "-c",
"""
  echo "1out"
  echo >&2 "1err"
  sleep 0.05
  echo >&2 "2err"
  echo "2out"
  sleep 0.05
  echo "3out"
  sleep 0.05
  echo >&2 "3err"
  sleep 0.05
exit 1
"""], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    process_handler = SubprocessProcessHandler(process)
    self.assertEqual(process_handler.communicate_teeing_stdout_and_stderr(), (
b"""1out
2out
3out
""", b"""1err
2err
3err
"""))
Exemple #10
0
    def join(self,
             stdin_data: Optional[Union[bytes, str]] = None,
             tee_output: bool = False) -> PantsResult:
        """Wait for the pants process to complete, and return a PantsResult for it."""

        communicate_fn = self.process.communicate
        if tee_output:
            communicate_fn = SubprocessProcessHandler(
                self.process).communicate_teeing_stdout_and_stderr
        if stdin_data is not None:
            stdin_data = ensure_binary(stdin_data)
        (stdout_data, stderr_data) = communicate_fn(stdin_data)

        if self.process.returncode != PANTS_SUCCEEDED_EXIT_CODE:
            render_logs(self.workdir)

        return PantsResult(command=self.command,
                           returncode=self.process.returncode,
                           stdout_data=stdout_data.decode(),
                           stderr_data=stderr_data.decode(),
                           workdir=self.workdir,
                           pid=self.process.pid)
  def run_pants_with_workdir(self, command, workdir, config=None, stdin_data=None, extra_env=None,
                             build_root=None, tee_output=False, print_exception_stacktrace=True,
                             **kwargs):

    args = [
      '--no-pantsrc',
      '--pants-workdir={}'.format(workdir),
      '--kill-nailguns',
      '--print-exception-stacktrace={}'.format(print_exception_stacktrace),
    ]

    if self.hermetic():
      args.extend(['--pants-config-files=[]',
                   # Turn off cache globally.  A hermetic integration test shouldn't rely on cache,
                   # or we have no idea if it's actually testing anything.
                   '--no-cache-read', '--no-cache-write',
                   # Turn cache on just for tool bootstrapping, for performance.
                   '--cache-bootstrap-read', '--cache-bootstrap-write'
                   ])

    if config:
      config_data = config.copy()
      ini = ConfigParser.ConfigParser(defaults=config_data.pop('DEFAULT', None))
      for section, section_config in config_data.items():
        ini.add_section(section)
        for key, value in section_config.items():
          ini.set(section, key, value)
      ini_file_name = os.path.join(workdir, 'pants.ini')
      with safe_open(ini_file_name, mode='w') as fp:
        ini.write(fp)
      args.append('--pants-config-files=' + ini_file_name)

    pants_script = os.path.join(build_root or get_buildroot(), self.PANTS_SCRIPT_NAME)

    # Permit usage of shell=True and string-based commands to allow e.g. `./pants | head`.
    if kwargs.get('shell') is True:
      assert not isinstance(command, list), 'must pass command as a string when using shell=True'
      pants_command = ' '.join([pants_script, ' '.join(args), command])
    else:
      pants_command = [pants_script] + args + command

    # Only whitelisted entries will be included in the environment if hermetic=True.
    if self.hermetic():
      env = dict()
      for h in self.hermetic_env_whitelist():
        env[h] = os.getenv(h) or ''
      hermetic_env = os.getenv('HERMETIC_ENV')
      if hermetic_env:
        for h in hermetic_env.strip(',').split(','):
          env[h] = os.getenv(h)
    else:
      env = os.environ.copy()
    if extra_env:
      env.update(extra_env)

    # Don't overwrite the profile of this process in the called process.
    # Instead, write the profile into a sibling file.
    if env.get('PANTS_PROFILE'):
      prof = '{}.{}'.format(env['PANTS_PROFILE'], self._get_profile_disambiguator())
      env['PANTS_PROFILE'] = prof
      # Make a note the subprocess command, so the user can correctly interpret the profile files.
      with open('{}.cmd'.format(prof), 'w') as fp:
        fp.write(b' '.join(pants_command))

    proc = subprocess.Popen(pants_command, env=env, stdin=subprocess.PIPE,
                            stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
    communicate_fn = proc.communicate
    if tee_output:
      communicate_fn = SubprocessProcessHandler(proc).communicate_teeing_stdout_and_stderr
    (stdout_data, stderr_data) = communicate_fn(stdin_data)

    return PantsResult(pants_command, proc.returncode, stdout_data.decode("utf-8"),
                       stderr_data.decode("utf-8"), workdir)
Exemple #12
0
 def _spawn(self, command, workunit):
     """Implements abstract TestRunnerTaskMixin._spawn."""
     process = command.run(stdout=workunit.output('stdout'),
                           stderr=workunit.output('stderr'))
     return SubprocessProcessHandler(process)
Exemple #13
0
 def test_exit_0(self):
     process = subprocess.Popen(["/bin/sh", "-c", "exit 0"])
     process_handler = SubprocessProcessHandler(process)
     self.assertEquals(process_handler.wait(), 0)
Exemple #14
0
 def _spawn(self, workunit, go_cmd, cwd):
   go_process = go_cmd.spawn(cwd=cwd,
                             stdout=workunit.output('stdout'),
                             stderr=workunit.output('stderr'))
   return SubprocessProcessHandler(go_process)
 def test_exit_0(self):
   process = subprocess.Popen(["/bin/sh", "-c", "exit 0"])
   process_handler = SubprocessProcessHandler(process)
   self.assertEqual(process_handler.wait(), 0)