Exemplo n.º 1
0
    def _parse_libraries_from_compiler_search_dirs(self, compiler_exe, env):
        # This argument is supported by at least gcc and clang.
        cmd = [compiler_exe, '-print-search-dirs']

        try:
            # Get stderr interspersed in the error message too -- this should not affect output parsing.
            compiler_output = subprocess.check_output(cmd,
                                                      env=env,
                                                      stderr=subprocess.STDOUT)
        except OSError as e:
            # We use `safe_shlex_join` here to pretty-print the command.
            raise self.ParseSearchDirsError(
                "Invocation of '{}' with argv '{}' failed.".format(
                    compiler_exe, safe_shlex_join(cmd)), e)
        except subprocess.CalledProcessError as e:
            raise self.ParseSearchDirsError(
                "Invocation of '{}' with argv '{}' exited with non-zero code {}. output:\n{}"
                .format(compiler_exe, safe_shlex_join(cmd), e.returncode,
                        e.output), e)

        libs_line = self._search_dirs_libraries_regex.search(compiler_output)
        if not libs_line:
            raise self.ParseSearchDirsError(
                "Could not parse libraries from output of {!r}:\n{}".format(
                    safe_shlex_join(cmd), compiler_output))
        return libs_line.group(1).split(':')
Exemplo n.º 2
0
    def as_environment(self):
        ret = {}

        if self.setup_requires_site_dir:
            ret['PYTHONPATH'] = self.setup_requires_site_dir.site_dir

        # FIXME(#5951): the below is a lot of error-prone repeated logic -- we need a way to compose
        # executables more hygienically. We should probably be composing each datatype's members, and
        # only creating an environment at the very end.
        native_tools = self.setup_py_native_tools
        if native_tools:
            # TODO: an as_tuple() method for datatypes would make this destructuring cleaner!
            plat = native_tools.platform
            cc = native_tools.c_compiler
            cxx = native_tools.cpp_compiler
            linker = native_tools.linker

            all_path_entries = cc.path_entries + cxx.path_entries + linker.path_entries
            ret['PATH'] = create_path_env_var(all_path_entries)

            all_library_dirs = cc.library_dirs + cxx.library_dirs + linker.library_dirs
            if all_library_dirs:
                joined_library_dirs = create_path_env_var(all_library_dirs)
                ret['LIBRARY_PATH'] = joined_library_dirs
                dynamic_lib_env_var = plat.resolve_platform_specific({
                    'darwin':
                    lambda: 'DYLD_LIBRARY_PATH',
                    'linux':
                    lambda: 'LD_LIBRARY_PATH',
                })
                ret[dynamic_lib_env_var] = joined_library_dirs

            all_include_dirs = cc.include_dirs + cxx.include_dirs
            if all_include_dirs:
                ret['CPATH'] = create_path_env_var(all_include_dirs)

            all_cflags_for_platform = plat.resolve_platform_specific({
                'darwin':
                lambda: ['-mmacosx-version-min=10.11'],
                'linux':
                lambda: [],
            })
            if all_cflags_for_platform:
                ret['CFLAGS'] = safe_shlex_join(all_cflags_for_platform)

            ret['CC'] = cc.exe_filename
            ret['CXX'] = cxx.exe_filename
            ret['LDSHARED'] = linker.exe_filename

            all_new_ldflags = plat.resolve_platform_specific(
                self._SHARED_CMDLINE_ARGS)
            ret['LDFLAGS'] = safe_shlex_join(all_new_ldflags)

        return ret
    def _create_dist(self, dist_tgt, dist_target_dir, setup_requires_pex,
                     snapshot_fingerprint, is_platform_specific):
        """Create a .whl file for the specified python_distribution target."""
        self._copy_sources(dist_tgt, dist_target_dir)

        setup_py_snapshot_version_argv = self._generate_snapshot_bdist_wheel_argv(
            snapshot_fingerprint, is_platform_specific)

        cmd = safe_shlex_join(
            setup_requires_pex.cmdline(setup_py_snapshot_version_argv))
        with self.context.new_workunit('setup.py',
                                       cmd=cmd,
                                       labels=[WorkUnitLabel.TOOL
                                               ]) as workunit:
            with pushd(dist_target_dir):
                result = setup_requires_pex.run(
                    args=setup_py_snapshot_version_argv,
                    stdout=workunit.output('stdout'),
                    stderr=workunit.output('stderr'))
                if result != 0:
                    raise self.BuildLocalPythonDistributionsError(
                        "Installation of python distribution from target {target} into directory {into_dir} "
                        "failed (return value of run() was: {rc!r}).\n"
                        "The pex with any requirements is located at: {interpreter}.\n"
                        "The host system's compiler and linker were used.\n"
                        "The setup command was: {command}.".format(
                            target=dist_tgt,
                            into_dir=dist_target_dir,
                            rc=result,
                            interpreter=setup_requires_pex.path(),
                            command=setup_py_snapshot_version_argv))
Exemplo n.º 4
0
  def _process_session(self):
    """Process the outputs of the nailgun session.

    :raises: :class:`NailgunProtocol.ProcessStreamTimeout` if a timeout set from a signal handler
                                                           with .set_exit_timeout() completes.
    :raises: :class:`Exception` if the session completes before the timeout, the `reason` argument
                                to .set_exit_timeout() will be raised."""
    try:
      for chunk_type, payload in self.iter_chunks(
        MaybeShutdownSocket(self._sock),
        return_bytes=True,
        timeout_object=self,
      ):
        # TODO(#6579): assert that we have at this point received all the chunk types in
        # ChunkType.REQUEST_TYPES, then require PID and PGRP (exactly once?), and then allow any of
        # ChunkType.EXECUTION_TYPES.
        if chunk_type == ChunkType.STDOUT:
          self._write_flush(self._stdout, payload)
        elif chunk_type == ChunkType.STDERR:
          self._write_flush(self._stderr, payload)
        elif chunk_type == ChunkType.EXIT:
          self._write_flush(self._stdout)
          self._write_flush(self._stderr)
          return int(payload)
        elif chunk_type == ChunkType.PID:
          self.remote_pid = int(payload)
          self.remote_process_cmdline = psutil.Process(self.remote_pid).cmdline()
          if self._remote_pid_callback:
            self._remote_pid_callback(self.remote_pid)
        elif chunk_type == ChunkType.PGRP:
          self.remote_pgrp = int(payload)
          if self._remote_pgrp_callback:
            self._remote_pgrp_callback(self.remote_pgrp)
        elif chunk_type == ChunkType.START_READING_INPUT:
          self._maybe_start_input_writer()
        else:
          raise self.ProtocolError('received unexpected chunk {} -> {}'.format(chunk_type, payload))
    except NailgunProtocol.ProcessStreamTimeout as e:
      assert(self.remote_pid is not None)
      # NB: We overwrite the process title in the pantsd process, which causes it to have an
      # argv with lots of empty spaces for some reason. We filter those out and pretty-print the
      # rest here.
      filtered_remote_cmdline = safe_shlex_join(
        arg for arg in self.remote_process_cmdline if arg != '')
      logger.warning(
        "timed out when attempting to gracefully shut down the remote client executing \"{}\". "
        "sending SIGKILL to the remote client at pid: {}. message: {}"
        .format(filtered_remote_cmdline, self.remote_pid, e))
    finally:
      # Bad chunk types received from the server can throw NailgunProtocol.ProtocolError in
      # NailgunProtocol.iter_chunks(). This ensures the NailgunStreamWriter is always stopped.
      self._maybe_stop_input_writer()
      # If an asynchronous error was set at any point (such as in a signal handler), we want to make
      # sure we clean up the remote process before exiting with error.
      if self._exit_reason:
        if self.remote_pgrp:
          safe_kill(self.remote_pgrp, signal.SIGKILL)
        if self.remote_pid:
          safe_kill(self.remote_pid, signal.SIGKILL)
        raise self._exit_reason
Exemplo n.º 5
0
  def _process_session(self):
    """Process the outputs of the nailgun session.

    :raises: :class:`NailgunProtocol.ProcessStreamTimeout` if a timeout set from a signal handler
                                                           with .set_exit_timeout() completes.
    :raises: :class:`Exception` if the session completes before the timeout, the `reason` argument
                                to .set_exit_timeout() will be raised."""
    try:
      for chunk_type, payload in self.iter_chunks(
        MaybeShutdownSocket(self._sock),
        return_bytes=True,
        timeout_object=self,
      ):
        # TODO(#6579): assert that we have at this point received all the chunk types in
        # ChunkType.REQUEST_TYPES, then require PID and PGRP (exactly once?), and then allow any of
        # ChunkType.EXECUTION_TYPES.
        if chunk_type == ChunkType.STDOUT:
          self._write_flush(self._stdout, payload)
        elif chunk_type == ChunkType.STDERR:
          self._write_flush(self._stderr, payload)
        elif chunk_type == ChunkType.EXIT:
          self._write_flush(self._stdout)
          self._write_flush(self._stderr)
          return int(payload)
        elif chunk_type == ChunkType.PID:
          self.remote_pid = int(payload)
          self.remote_process_cmdline = psutil.Process(self.remote_pid).cmdline()
          if self._remote_pid_callback:
            self._remote_pid_callback(self.remote_pid)
        elif chunk_type == ChunkType.PGRP:
          self.remote_pgrp = int(payload)
          if self._remote_pgrp_callback:
            self._remote_pgrp_callback(self.remote_pgrp)
        elif chunk_type == ChunkType.START_READING_INPUT:
          self._maybe_start_input_writer()
        else:
          raise self.ProtocolError('received unexpected chunk {} -> {}'.format(chunk_type, payload))
    except NailgunProtocol.ProcessStreamTimeout as e:
      assert(self.remote_pid is not None)
      # NB: We overwrite the process title in the pantsd process, which causes it to have an
      # argv with lots of empty spaces for some reason. We filter those out and pretty-print the
      # rest here.
      filtered_remote_cmdline = safe_shlex_join(
        arg for arg in self.remote_process_cmdline if arg != '')
      logger.warning(
        "timed out when attempting to gracefully shut down the remote client executing \"{}\". "
        "sending SIGKILL to the remote client at pid: {}. message: {}"
        .format(filtered_remote_cmdline, self.remote_pid, e))
    finally:
      # Bad chunk types received from the server can throw NailgunProtocol.ProtocolError in
      # NailgunProtocol.iter_chunks(). This ensures the NailgunStreamWriter is always stopped.
      self._maybe_stop_input_writer()
      # If an asynchronous error was set at any point (such as in a signal handler), we want to make
      # sure we clean up the remote process before exiting with error.
      if self._exit_reason:
        if self.remote_pgrp:
          safe_kill(self.remote_pgrp, signal.SIGKILL)
        if self.remote_pid:
          safe_kill(self.remote_pid, signal.SIGKILL)
        raise self._exit_reason
    def _create_dist(self, dist_tgt, dist_target_dir, setup_py_runner,
                     snapshot_fingerprint, is_platform_specific):
        """Create a .whl file for the specified python_distribution target."""
        self._copy_sources(dist_tgt, dist_target_dir)

        setup_py_snapshot_version_argv = self._generate_snapshot_bdist_wheel_argv(
            snapshot_fingerprint, is_platform_specific)

        cmd = safe_shlex_join(
            setup_py_runner.cmdline(setup_py_snapshot_version_argv))
        with self.context.new_workunit("setup.py",
                                       cmd=cmd,
                                       labels=[WorkUnitLabel.TOOL
                                               ]) as workunit:
            try:
                setup_py_runner.run_setup_command(
                    source_dir=Path(dist_target_dir),
                    setup_command=setup_py_snapshot_version_argv,
                    stdout=workunit.output("stdout"),
                    stderr=workunit.output("stderr"),
                )
            except SetupPyRunner.CommandFailure as e:
                raise self.BuildLocalPythonDistributionsError(
                    f"Installation of python distribution from target {dist_tgt} into directory "
                    f"{dist_target_dir} failed using the host system's compiler and linker: {e}"
                )
Exemplo n.º 7
0
    def _run_zef_command(self, workunit_factory, argv):
        subproc_env = os.environ.copy()
        subproc_env['PATH'] = create_path_env_var(self.path_entries,
                                                  subproc_env,
                                                  prepend=True)

        all_argv = ['zef'] + argv
        pretty_printed_argv = safe_shlex_join(all_argv)
        try:
            if workunit_factory:
                with workunit_factory(cmd=pretty_printed_argv) as workunit:
                    return subprocess.check_call(
                        all_argv,
                        env=subproc_env,
                        stdout=workunit.output('stdout'),
                        stderr=workunit.output('stderr'))
            else:
                output = subprocess.check_output(all_argv, env=subproc_env)
                logger.debug(
                    "output from running zef command {!r} with env {!r}:\n{}".
                    format(all_argv, subproc_env, output))
        except (OSError, subprocess.CalledProcessError) as e:
            raise self.ZefException("Error with zef command '{}': {}".format(
                pretty_printed_argv, e),
                                    e,
                                    exit_code=e.returncode)
Exemplo n.º 8
0
    def run_tests(self, fail_fast, test_targets, args_by_target):
        self.context.log.debug('test_targets: {}'.format(test_targets))

        with self.chroot(test_targets, self._maybe_workdir) as chroot:
            cmdline_args = self._build_and_test_flags + [
                args_by_target[t].import_path for t in test_targets
            ] + self.get_passthru_args()
            gopath = create_path_env_var(args_by_target[t].gopath
                                         for t in test_targets)
            go_cmd = self.go_dist.create_go_cmd('test',
                                                gopath=gopath,
                                                args=cmdline_args)

            self.context.log.debug('go_cmd: {}'.format(go_cmd))

            workunit_labels = [WorkUnitLabel.TOOL, WorkUnitLabel.TEST]
            with self.context.new_workunit(name='go test',
                                           cmd=safe_shlex_join(go_cmd.cmdline),
                                           labels=workunit_labels) as workunit:

                exit_code = self.spawn_and_wait(test_targets,
                                                workunit=workunit,
                                                go_cmd=go_cmd,
                                                cwd=chroot)
                return TestResult.rc(exit_code)
  def _create_dist(self,
                   dist_tgt,
                   dist_target_dir,
                   setup_requires_pex,
                   snapshot_fingerprint,
                   is_platform_specific):
    """Create a .whl file for the specified python_distribution target."""
    self._copy_sources(dist_tgt, dist_target_dir)

    setup_py_snapshot_version_argv = self._generate_snapshot_bdist_wheel_argv(
      snapshot_fingerprint, is_platform_specific)

    cmd = safe_shlex_join(setup_requires_pex.cmdline(setup_py_snapshot_version_argv))
    with self.context.new_workunit('setup.py', cmd=cmd, labels=[WorkUnitLabel.TOOL]) as workunit:
      with pushd(dist_target_dir):
        result = setup_requires_pex.run(args=setup_py_snapshot_version_argv,
                                        stdout=workunit.output('stdout'),
                                        stderr=workunit.output('stderr'))
        if result != 0:
          raise self.BuildLocalPythonDistributionsError(
            "Installation of python distribution from target {target} into directory {into_dir} "
            "failed (return value of run() was: {rc!r}).\n"
            "The pex with any requirements is located at: {interpreter}.\n"
            "The host system's compiler and linker were used.\n"
            "The setup command was: {command}."
            .format(target=dist_tgt,
                    into_dir=dist_target_dir,
                    rc=result,
                    interpreter=setup_requires_pex.path(),
                    command=setup_py_snapshot_version_argv))
Exemplo n.º 10
0
    def _do_run_tests_with_args(self, pex, args):
        try:
            env = dict(os.environ)

            # Ensure we don't leak source files or undeclared 3rdparty requirements into the pytest PEX
            # environment.
            pythonpath = env.pop('PYTHONPATH', None)
            if pythonpath:
                self.context.log.warn(
                    'scrubbed PYTHONPATH={} from pytest environment'.format(
                        pythonpath))
            # But allow this back door for users who do want to force something onto the test pythonpath,
            # e.g., modules required during a debugging session.
            extra_pythonpath = self.get_options().extra_pythonpath
            if extra_pythonpath:
                env['PYTHONPATH'] = os.pathsep.join(extra_pythonpath)

            # The pytest runner we use accepts a --pdb argument that will launch an interactive pdb
            # session on any test failure.  In order to support use of this pass-through flag we must
            # turn off stdin buffering that otherwise occurs.  Setting the PYTHONUNBUFFERED env var to
            # any value achieves this in python2.7.  We'll need a different solution when we support
            # running pants under CPython 3 which does not unbuffer stdin using this trick.
            env['PYTHONUNBUFFERED'] = '1'

            # pytest uses py.io.terminalwriter for output. That class detects the terminal
            # width and attempts to use all of it. However we capture and indent the console
            # output, leading to weird-looking line wraps. So we trick the detection code
            # into thinking the terminal window is narrower than it is.
            env['COLUMNS'] = str(int(os.environ.get('COLUMNS', 80)) - 30)

            profile = self.get_options().profile
            if profile:
                env['PEX_PROFILE_FILENAME'] = '{0}.subprocess.{1:.6f}'.format(
                    profile, time.time())

            with self.context.new_workunit(
                    name='run',
                    cmd=safe_shlex_join(pex.cmdline(args)),
                    labels=[WorkUnitLabel.TOOL,
                            WorkUnitLabel.TEST]) as workunit:
                # NB: Constrain the pex environment to ensure the use of the selected interpreter!
                env.update(self._ensure_pytest_interpreter_search_path())
                rc = self.spawn_and_wait(pex,
                                         workunit=workunit,
                                         args=args,
                                         setsid=True,
                                         env=env)
                return PytestResult.rc(rc)
        except ErrorWhileTesting:
            # spawn_and_wait wraps the test runner in a timeout, so it could
            # fail with a ErrorWhileTesting. We can't just set PythonTestResult
            # to a failure because the resultslog doesn't have all the failures
            # when tests are killed with a timeout. Therefore we need to re-raise
            # here.
            raise
        except Exception:
            self.context.log.error('Failed to run test!')
            self.context.log.info(traceback.format_exc())
            return PytestResult.exception()
Exemplo n.º 11
0
    def _invoke_compiler_exe(self, cmd, env):
        try:
            # Get stderr interspersed in the error message too -- this should not affect output parsing.
            compiler_output = subprocess.check_output(
                cmd, env=env, stderr=subprocess.STDOUT).decode()
        except OSError as e:
            # We use `safe_shlex_join` here to pretty-print the command.
            raise self.ParseSearchDirsError(
                "Process invocation with argv '{}' and environment {!r} failed."
                .format(safe_shlex_join(cmd), env), e)
        except subprocess.CalledProcessError as e:
            raise self.ParseSearchDirsError(
                "Process invocation with argv '{}' and environment {!r} exited with non-zero code {}. "
                "output:\n{}".format(safe_shlex_join(cmd), env, e.returncode,
                                     e.output), e)

        return compiler_output
Exemplo n.º 12
0
  def _invoke_compiler_exe(self, cmd, env):
    try:
      # Get stderr interspersed in the error message too -- this should not affect output parsing.
      compiler_output = subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT).decode('utf-8')
    except OSError as e:
      # We use `safe_shlex_join` here to pretty-print the command.
      raise self.ParseSearchDirsError(
        "Process invocation with argv '{}' and environment {!r} failed."
        .format(safe_shlex_join(cmd), env),
        e)
    except subprocess.CalledProcessError as e:
      raise self.ParseSearchDirsError(
        "Process invocation with argv '{}' and environment {!r} exited with non-zero code {}. "
        "output:\n{}"
        .format(safe_shlex_join(cmd), env, e.returncode, e.output),
        e)

    return compiler_output
Exemplo n.º 13
0
    def _parse_libraries_from_compiler_search_dirs(self, compiler_exe, env):
        # This argument is supported by at least gcc and clang.
        cmd = [compiler_exe, '-print-search-dirs']

        compiler_output = self._invoke_compiler_exe(cmd, env)

        libs_line = self._search_dirs_libraries_regex.search(compiler_output)
        if not libs_line:
            raise self.ParseSearchDirsError(
                "Could not parse libraries from output of {!r}:\n{}".format(
                    safe_shlex_join(cmd), compiler_output))
        return libs_line.group(1).split(':')
Exemplo n.º 14
0
  def _parse_libraries_from_compiler_search_dirs(self, compiler_exe, env):
    # This argument is supported by at least gcc and clang.
    cmd = [compiler_exe, '-print-search-dirs']

    compiler_output = self._invoke_compiler_exe(cmd, env)

    libs_line = self._search_dirs_libraries_regex.search(compiler_output)
    if not libs_line:
      raise self.ParseSearchDirsError(
        "Could not parse libraries from output of {!r}:\n{}"
        .format(safe_shlex_join(cmd), compiler_output))
    return libs_line.group(1).split(':')
Exemplo n.º 15
0
  def get_invocation_environment_dict(self, platform):
    ret = super(CompilerMixin, self).get_invocation_environment_dict(platform).copy()

    if self.include_dirs:
      ret['CPATH'] = create_path_env_var(self.include_dirs)

    all_cflags_for_platform = platform.resolve_platform_specific({
      'darwin': lambda: ['-mmacosx-version-min=10.11'],
      'linux': lambda: [],
    })
    ret['CFLAGS'] = safe_shlex_join(all_cflags_for_platform)

    return ret
Exemplo n.º 16
0
    def run_setup_command(self, *, source_dir: Path,
                          setup_command: Iterable[str], **kwargs) -> None:
        """Runs the given setup.py command against the setup.py project in `source_dir`.

        :raises: :class:`SetupPyRunner.CommandFailure` if there was a problem executing the command.
        """
        with pushd(str(source_dir)):
            result = self._requirements_pex.run(
                args=self._create_python_args(setup_command), **kwargs)
            if result != 0:
                pex_command = safe_shlex_join(self.cmdline(setup_command))
                raise self.CommandFailure(
                    f"Failed to execute {pex_command} using {self}")
Exemplo n.º 17
0
    def _run_rakudobrew_command(self, argv):
        subproc_env = os.environ.copy()
        subproc_env['PATH'] = create_path_env_var(self.path_entries,
                                                  subproc_env,
                                                  prepend=True)

        all_argv = ['rakudobrew'] + argv
        pretty_printed_argv = safe_shlex_join(all_argv)
        try:
            return subprocess.check_output(all_argv, env=subproc_env)
        except (OSError, subprocess.CalledProcessError) as e:
            raise self.RakudoBrewBootstrapError(
                "Error with rakudobrew command '{}': {}".format(
                    pretty_printed_argv, e), e)
Exemplo n.º 18
0
  def _do_run_tests_with_args(self, pex, args):
    try:
      env = dict(os.environ)

      # Ensure we don't leak source files or undeclared 3rdparty requirements into the py.test PEX
      # environment.
      pythonpath = env.pop('PYTHONPATH', None)
      if pythonpath:
        self.context.log.warn('scrubbed PYTHONPATH={} from py.test environment'.format(pythonpath))
      # But allow this back door for users who do want to force something onto the test pythonpath,
      # e.g., modules required during a debugging session.
      extra_pythonpath = self.get_options().extra_pythonpath
      if extra_pythonpath:
        env['PYTHONPATH'] = os.pathsep.join(extra_pythonpath)

      # The pytest runner we use accepts a --pdb argument that will launch an interactive pdb
      # session on any test failure.  In order to support use of this pass-through flag we must
      # turn off stdin buffering that otherwise occurs.  Setting the PYTHONUNBUFFERED env var to
      # any value achieves this in python2.7.  We'll need a different solution when we support
      # running pants under CPython 3 which does not unbuffer stdin using this trick.
      env['PYTHONUNBUFFERED'] = '1'

      # pytest uses py.io.terminalwriter for output. That class detects the terminal
      # width and attempts to use all of it. However we capture and indent the console
      # output, leading to weird-looking line wraps. So we trick the detection code
      # into thinking the terminal window is narrower than it is.
      env['COLUMNS'] = str(int(os.environ.get('COLUMNS', 80)) - 30)

      profile = self.get_options().profile
      if profile:
        env['PEX_PROFILE_FILENAME'] = '{0}.subprocess.{1:.6f}'.format(profile, time.time())

      with self.context.new_workunit(name='run',
                                     cmd=safe_shlex_join(pex.cmdline(args)),
                                     labels=[WorkUnitLabel.TOOL, WorkUnitLabel.TEST]) as workunit:
        # NB: Constrain the pex environment to ensure the use of the selected interpreter!
        env.update(self._constrain_pytest_interpreter_search_path())
        rc = self.spawn_and_wait(pex, workunit=workunit, args=args, setsid=True, env=env)
        return PytestResult.rc(rc)
    except ErrorWhileTesting:
      # spawn_and_wait wraps the test runner in a timeout, so it could
      # fail with a ErrorWhileTesting. We can't just set PythonTestResult
      # to a failure because the resultslog doesn't have all the failures
      # when tests are killed with a timeout. Therefore we need to re-raise
      # here.
      raise
    except Exception:
      self.context.log.error('Failed to run test!')
      self.context.log.info(traceback.format_exc())
      return PytestResult.exception()
Exemplo n.º 19
0
 def open(self, files: Iterable[PurePath]) -> None:
     for request in self._iter_openers(files):
         open_command = safe_shlex_join(request.argv)
         try:
             result = self.runner.run_process(request)
             if result.exit_code != 0:
                 self.console.print_stderr(
                     f"Failed to open files for viewing using `{open_command}` - received exit "
                     f"code {result.exit_code}.")
         except Exception as e:
             self.console.print_stderr(
                 f"Failed to open files for viewing using "
                 f"`{open_command}`: {e}")
             self.console.print_stderr(
                 f"Ensure {self.program} is installed on your `PATH` and "
                 f"re-run this goal.")
Exemplo n.º 20
0
 def _invoke_capturing_output(self, cmd, env=None):
   env = env or {}
   try:
     with environment_as(**env):
       return subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode('utf-8')
   except subprocess.CalledProcessError as e:
     raise Exception(
       "Command failed while invoking the native toolchain "
       "with code '{code}', cwd='{cwd}', cmd='{cmd}', env='{env}'. "
       "Combined stdout and stderr:\n{out}"
       .format(code=e.returncode,
               cwd=os.getcwd(),
               # safe_shlex_join() is just for pretty-printing.
               cmd=safe_shlex_join(cmd),
               env=env,
               out=e.output),
       e)
Exemplo n.º 21
0
 def _invoke_capturing_output(self, cmd, env=None):
     if env is None:
         env = os.environ.copy()
     try:
         with environment_as(**env):
             return subprocess.check_output(cmd, stderr=subprocess.STDOUT)
     except subprocess.CalledProcessError as e:
         raise Exception(
             "Command failed while invoking the native toolchain "
             "with code '{code}', cwd='{cwd}', cmd='{cmd}', env='{env}'. "
             "Combined stdout and stderr:\n{out}".format(
                 code=e.returncode,
                 cwd=os.getcwd(),
                 # safe_shlex_join() is just for pretty-printing.
                 cmd=safe_shlex_join(cmd),
                 env=env,
                 out=e.output),
             e)
Exemplo n.º 22
0
  def get_invocation_environment_dict(self, platform):
    ret = super(Linker, self).get_invocation_environment_dict(platform).copy()

    # TODO: set all LDFLAGS in here or in further specializations of Linker instead of in individual
    # tasks.
    all_ldflags_for_platform = platform.resolve_platform_specific({
      'darwin': lambda: ['-mmacosx-version-min=10.11'],
      'linux': lambda: [],
    })
    ret.update({
      'LDSHARED': self.exe_filename,
      # FIXME: this overloads the meaning of 'library_dirs' to also mean "directories containing
      # static libraries required for creating an executable" (currently, libc). These concepts
      # should be distinct.
      'LIBRARY_PATH': create_path_env_var(self.library_dirs),
      'LDFLAGS': safe_shlex_join(all_ldflags_for_platform),
    })

    return ret
Exemplo n.º 23
0
  def invoke_perl6(self, argv, perl6_env, workunit_factory=None):
    full_argv = [self._perl6_exe_filename] + list(argv)
    subproc_env = self._get_perl6_subproc_os_env(perl6_env)

    pretty_printed_argv = safe_shlex_join(full_argv)
    try:
      logger.debug('running perl6 comand {!r} with env {!r}'.format(full_argv, subproc_env))
      if workunit_factory:
        with workunit_factory(cmd=pretty_printed_argv) as workunit:
          # TODO: should we be catching KeyboardInterrupt or something?
          return subprocess.check_call(
            full_argv,
            env=subproc_env,
            stdout=workunit.output('stdout'),
            stderr=workunit.output('stderr'))
      else:
        return subprocess.check_call(full_argv, env=subproc_env)
    except (OSError, subprocess.CalledProcessError) as e:
      raise self.Perl6InvocationError(
        "Error with perl6 command '{}': {}".format(pretty_printed_argv, e),
        e,
        exit_code=e.returncode)
Exemplo n.º 24
0
 def _pretty_cmdline(self, args):
   return safe_shlex_join(self._pex.cmdline(args))
Exemplo n.º 25
0
    def _runtool_hermetic(self, main, tool_name, distribution, input_digest,
                          ctx):
        tool_classpath_abs = self._rsc_classpath
        tool_classpath = fast_relpath_collection(tool_classpath_abs)

        jvm_options = self._jvm_options

        if self._rsc.use_native_image:
            #jvm_options = []
            if jvm_options:
                raise ValueError(
                    "`{}` got non-empty jvm_options when running with a graal native-image, but this is "
                    "unsupported. jvm_options received: {}".format(
                        self.options_scope, safe_shlex_join(jvm_options)))
            native_image_path, native_image_snapshot = self._rsc.native_image(
                self.context)
            additional_snapshots = [native_image_snapshot]
            initial_args = [native_image_path]
        else:
            additional_snapshots = []
            initial_args = [
                distribution.java,
            ] + self.get_options().jvm_options + [
                '-cp',
                os.pathsep.join(tool_classpath),
                main,
            ]

        argfile_snapshot, = self.context._scheduler.capture_snapshots([
            PathGlobsAndRoot(
                PathGlobs([fast_relpath(ctx.args_file, get_buildroot())]),
                get_buildroot(),
            ),
        ])

        cmd = initial_args + ['@{}'.format(argfile_snapshot.files[0])]

        pathglobs = list(tool_classpath)

        if pathglobs:
            root = PathGlobsAndRoot(PathGlobs(tuple(pathglobs)),
                                    get_buildroot())
            # dont capture snapshot, if pathglobs is empty
            path_globs_input_digest = self.context._scheduler.capture_snapshots(
                (root, ))[0].directory_digest

        epr_input_files = self.context._scheduler.merge_directories(
            ((path_globs_input_digest, ) if path_globs_input_digest else ()) +
            ((input_digest, ) if input_digest else ()) +
            tuple(s.directory_digest for s in additional_snapshots) +
            (argfile_snapshot.directory_digest, ))

        epr = ExecuteProcessRequest(
            argv=tuple(cmd),
            input_files=epr_input_files,
            output_files=(fast_relpath(ctx.rsc_jar_file.path,
                                       get_buildroot()), ),
            output_directories=tuple(),
            timeout_seconds=15 * 60,
            description='run {} for {}'.format(tool_name, ctx.target),
            # TODO: These should always be unicodes
            # Since this is always hermetic, we need to use `underlying.home` because
            # ExecuteProcessRequest requires an existing, local jdk location.
            jdk_home=distribution.underlying_home,
        )
        res = self.context.execute_process_synchronously_without_raising(
            epr, self.name(), [WorkUnitLabel.COMPILER])

        if res.exit_code != 0:
            raise TaskError(res.stderr, exit_code=res.exit_code)

        # TODO: parse the output of -Xprint:timings for rsc and write it to self._record_target_stats()!

        res.output_directory_digest.dump(ctx.rsc_jar_file.path)

        ctx.rsc_jar_file = ClasspathEntry(ctx.rsc_jar_file.path,
                                          res.output_directory_digest)

        self.context._scheduler.materialize_directories((
            DirectoryToMaterialize(
                # NB the first element here is the root to materialize into, not the dir to snapshot
                get_buildroot(),
                res.output_directory_digest), ))

        return res
Exemplo n.º 26
0
    def _runtool_hermetic(self, main, tool_name, distribution, input_digest,
                          ctx):
        use_youtline = tool_name == "scalac-outliner"

        tool_classpath_abs = self._scalac_classpath if use_youtline else self._rsc_classpath
        tool_classpath = fast_relpath_collection(tool_classpath_abs)

        rsc_jvm_options = Rsc.global_instance().get_options().jvm_options

        if not use_youtline and self._rsc.use_native_image:
            if rsc_jvm_options:
                raise ValueError(
                    "`{}` got non-empty jvm_options when running with a graal native-image, but this is "
                    "unsupported. jvm_options received: {}".format(
                        self.options_scope, safe_shlex_join(rsc_jvm_options)))
            native_image_path, native_image_snapshot = self._rsc.native_image(
                self.context)
            additional_snapshots = [native_image_snapshot]
            initial_args = [native_image_path]
        else:
            additional_snapshots = []
            initial_args = (
                [distribution.java] + rsc_jvm_options +
                ["-cp", os.pathsep.join(tool_classpath), main])

        (argfile_snapshot, ) = self.context._scheduler.capture_snapshots([
            PathGlobsAndRoot(
                PathGlobs([fast_relpath(ctx.args_file, get_buildroot())]),
                get_buildroot(),
            ),
        ])

        cmd = initial_args + [f"@{argfile_snapshot.files[0]}"]

        pathglobs = list(tool_classpath)

        if pathglobs:
            root = PathGlobsAndRoot(PathGlobs(tuple(pathglobs)),
                                    get_buildroot())
            # dont capture snapshot, if pathglobs is empty
            path_globs_input_digest = self.context._scheduler.capture_snapshots(
                (root, ))[0].digest

        epr_input_files = self.context._scheduler.merge_directories(
            ((path_globs_input_digest, ) if path_globs_input_digest else ()) +
            ((input_digest, ) if input_digest else ()) +
            tuple(s.digest
                  for s in additional_snapshots) + (argfile_snapshot.digest, ))

        epr = Process(
            argv=tuple(cmd),
            input_digest=epr_input_files,
            output_files=(fast_relpath(ctx.rsc_jar_file.path,
                                       get_buildroot()), ),
            output_directories=tuple(),
            timeout_seconds=15 * 60,
            description=f"run {tool_name} for {ctx.target}",
            # TODO: These should always be unicodes
            # Since this is always hermetic, we need to use `underlying.home` because
            # Process requires an existing, local jdk location.
            jdk_home=distribution.underlying_home,
            is_nailgunnable=True,
        )
        res = self.context.execute_process_synchronously_without_raising(
            epr, self.name(), [WorkUnitLabel.COMPILER])

        if res.exit_code != 0:
            raise TaskError(res.stderr, exit_code=res.exit_code)

        # TODO: parse the output of -Xprint:timings for rsc and write it to self._record_target_stats()!

        res.output_digest.dump(ctx.rsc_jar_file.path)
        self.context._scheduler.materialize_directory(
            DirectoryToMaterialize(res.output_digest), )
        ctx.rsc_jar_file.hydrate_missing_directory_digest(res.output_digest)

        return res
Exemplo n.º 27
0
 def environment_dict(self) -> Dict[str, str]:
     return {
         "CPPFLAGS": safe_shlex_join(self.cpp_flags),
         "LDFLAGS": safe_shlex_join(self.ld_flags),
     }
Exemplo n.º 28
0
    def as_environment(self):
        ret = {}

        if self.setup_requires_site_dir:
            ret['PYTHONPATH'] = self.setup_requires_site_dir.site_dir

        # FIXME(#5951): the below is a lot of error-prone repeated logic -- we need a way to compose
        # executables more hygienically. We should probably be composing each datatype's members, and
        # only creating an environment at the very end.
        native_tools = self.setup_py_native_tools
        if native_tools:
            # An as_tuple() method for datatypes could make this destructuring cleaner!  Alternatively,
            # constructing this environment could be done more compositionally instead of requiring all of
            # these disparate fields together at once.
            plat = native_tools.platform
            c_toolchain = native_tools.c_toolchain
            c_compiler = c_toolchain.c_compiler
            c_linker = c_toolchain.c_linker

            cpp_toolchain = native_tools.cpp_toolchain
            cpp_compiler = cpp_toolchain.cpp_compiler
            cpp_linker = cpp_toolchain.cpp_linker

            all_path_entries = (c_compiler.path_entries +
                                c_linker.path_entries +
                                cpp_compiler.path_entries +
                                cpp_linker.path_entries)
            ret['PATH'] = create_path_env_var(all_path_entries)

            all_library_dirs = (c_compiler.library_dirs +
                                c_linker.library_dirs +
                                cpp_compiler.library_dirs +
                                cpp_linker.library_dirs)
            joined_library_dirs = create_path_env_var(all_library_dirs)
            dynamic_lib_env_var = plat.resolve_platform_specific({
                'darwin':
                lambda: 'DYLD_LIBRARY_PATH',
                'linux':
                lambda: 'LD_LIBRARY_PATH',
            })
            ret[dynamic_lib_env_var] = joined_library_dirs

            all_linking_library_dirs = (c_linker.linking_library_dirs +
                                        cpp_linker.linking_library_dirs)
            ret['LIBRARY_PATH'] = create_path_env_var(all_linking_library_dirs)

            all_include_dirs = cpp_compiler.include_dirs + c_compiler.include_dirs
            ret['CPATH'] = create_path_env_var(all_include_dirs)

            shared_compile_flags = safe_shlex_join(
                plat.resolve_platform_specific({
                    'darwin':
                    lambda: [MIN_OSX_VERSION_ARG],
                    'linux':
                    lambda: [],
                }))
            ret['CFLAGS'] = shared_compile_flags
            ret['CXXFLAGS'] = shared_compile_flags

            ret['CC'] = c_compiler.exe_filename
            ret['CXX'] = cpp_compiler.exe_filename
            ret['LDSHARED'] = cpp_linker.exe_filename

            all_new_ldflags = cpp_linker.extra_args + plat.resolve_platform_specific(
                self._SHARED_CMDLINE_ARGS)
            ret['LDFLAGS'] = safe_shlex_join(all_new_ldflags)

        return ret
Exemplo n.º 29
0
  def _compile_hermetic(self, jvm_options, ctx, classes_dir, zinc_args,
                        compiler_bridge_classpath_entry, dependency_classpath,
                        scalac_classpath_entries):
    zinc_relpath = fast_relpath(self._zinc.zinc, get_buildroot())

    snapshots = [
      self._zinc.snapshot(self.context._scheduler),
      ctx.target.sources_snapshot(self.context._scheduler),
    ]

    relevant_classpath_entries = dependency_classpath + [compiler_bridge_classpath_entry]
    directory_digests = tuple(
      entry.directory_digest for entry in relevant_classpath_entries if entry.directory_digest
    )
    if len(directory_digests) != len(relevant_classpath_entries):
      for dep in relevant_classpath_entries:
        if dep.directory_digest is None:
          logger.warning(
            "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic "
            "execution".format(dep)
          )

    snapshots.extend(
      classpath_entry.directory_digest for classpath_entry in scalac_classpath_entries
    )

    if self._zinc.use_native_image:
      if jvm_options:
        raise ValueError(
          "`{}` got non-empty jvm_options when running with a graal native-image, but this is "
          "unsupported. jvm_options received: {}".format(self.options_scope, safe_shlex_join(jvm_options))
        )
      native_image_path, native_image_snapshot = self._zinc.native_image(self.context)
      additional_snapshots = (native_image_snapshot.directory_digest,)
      scala_boot_classpath = [
          classpath_entry.path for classpath_entry in scalac_classpath_entries
        ] + [
          # We include rt.jar on the scala boot classpath because the compiler usually gets its
          # contents from the VM it is executing in, but not in the case of a native image. This
          # resolves a `object java.lang.Object in compiler mirror not found.` error.
          '.jdk/jre/lib/rt.jar',
          # The same goes for the rce.jar, which provides javax.crypto.
          '.jdk/jre/lib/jce.jar',
        ]
      image_specific_argv =  [
        native_image_path,
        '-java-home', '.jdk',
        '-Dscala.boot.class.path={}'.format(os.pathsep.join(scala_boot_classpath)),
        '-Dscala.usejavacp=true',
      ]
    else:
      additional_snapshots = ()
      image_specific_argv =  ['.jdk/bin/java'] + jvm_options + [
        '-cp', zinc_relpath,
        Zinc.ZINC_COMPILE_MAIN
      ]

    # TODO: Extract something common from Executor._create_command to make the command line
    # TODO: Lean on distribution for the bin/java appending here
    merged_input_digest = self.context._scheduler.merge_directories(
      tuple(s.directory_digest for s in snapshots) +
      directory_digests +
      additional_snapshots
    )


    argv = image_specific_argv + zinc_args
    # TODO(#6071): Our ExecuteProcessRequest expects a specific string type for arguments,
    # which py2 doesn't default to. This can be removed when we drop python 2.
    argv = [text_type(arg) for arg in argv]

    req = ExecuteProcessRequest(
      argv=tuple(argv),
      input_files=merged_input_digest,
      output_directories=(classes_dir,),
      description="zinc compile for {}".format(ctx.target.address.spec),
      jdk_home=self._zinc.underlying_dist.home,
    )
    res = self.context.execute_process_synchronously_or_raise(
      req, self.name(), [WorkUnitLabel.COMPILER])

    # TODO: Materialize as a batch in do_compile or somewhere
    self.context._scheduler.materialize_directories((
      DirectoryToMaterialize(get_buildroot(), res.output_directory_digest),
    ))

    # TODO: This should probably return a ClasspathEntry rather than a Digest
    return res.output_directory_digest
Exemplo n.º 30
0
    def _compile_hermetic(self, jvm_options, ctx, classes_dir, jar_file,
                          compiler_bridge_classpath_entry,
                          dependency_classpath, scalac_classpath_entries):
        zinc_relpath = fast_relpath(self._zinc.zinc.path, get_buildroot())

        snapshots = [
            ctx.target.sources_snapshot(self.context._scheduler),
        ]

        # scala_library() targets with java_sources have circular dependencies on those java source
        # files, and we provide them to the same zinc command line that compiles the scala, so we need
        # to make sure those source files are available in the hermetic execution sandbox.
        java_sources_targets = getattr(ctx.target, 'java_sources', [])
        java_sources_snapshots = [
            tgt.sources_snapshot(self.context._scheduler)
            for tgt in java_sources_targets
        ]
        snapshots.extend(java_sources_snapshots)

        # Ensure the dependencies and compiler bridge jars are available in the execution sandbox.
        relevant_classpath_entries = (
            dependency_classpath + [
                compiler_bridge_classpath_entry,
                self._nailgun_server_classpath_entry(
                ),  # We include nailgun-server, to use it to start servers when needed from the hermetic execution case.
            ])
        directory_digests = [
            entry.directory_digest for entry in relevant_classpath_entries
            if entry.directory_digest
        ]
        if len(directory_digests) != len(relevant_classpath_entries):
            for dep in relevant_classpath_entries:
                if not dep.directory_digest:
                    raise AssertionError(
                        "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic "
                        "execution of zinc".format(dep))
        directory_digests.extend(
            classpath_entry.directory_digest
            for classpath_entry in scalac_classpath_entries)

        if self._zinc.use_native_image:
            if jvm_options:
                raise ValueError(
                    "`{}` got non-empty jvm_options when running with a graal native-image, but this is "
                    "unsupported. jvm_options received: {}".format(
                        self.options_scope, safe_shlex_join(jvm_options)))
            native_image_path, native_image_snapshot = self._zinc.native_image(
                self.context)
            native_image_snapshots = [
                native_image_snapshot.directory_digest,
            ]
            scala_boot_classpath = [
                classpath_entry.path
                for classpath_entry in scalac_classpath_entries
            ] + [
                # We include rt.jar on the scala boot classpath because the compiler usually gets its
                # contents from the VM it is executing in, but not in the case of a native image. This
                # resolves a `object java.lang.Object in compiler mirror not found.` error.
                '.jdk/jre/lib/rt.jar',
                # The same goes for the jce.jar, which provides javax.crypto.
                '.jdk/jre/lib/jce.jar',
            ]
            image_specific_argv = [
                native_image_path,
                '-java-home',
                '.jdk',
                '-Dscala.boot.class.path={}'.format(
                    os.pathsep.join(scala_boot_classpath)),
                '-Dscala.usejavacp=true',
            ]
        else:
            native_image_snapshots = []
            # TODO: Lean on distribution for the bin/java appending here
            image_specific_argv = ['.jdk/bin/java'] + jvm_options + [
                '-cp', zinc_relpath, Zinc.ZINC_COMPILE_MAIN
            ]

        argfile_snapshot, = self.context._scheduler.capture_snapshots([
            PathGlobsAndRoot(
                PathGlobs([fast_relpath(ctx.args_file, get_buildroot())]),
                get_buildroot(),
            ),
        ])

        relpath_to_analysis = fast_relpath(ctx.analysis_file, get_buildroot())
        merged_local_only_scratch_inputs = self._compute_local_only_inputs(
            classes_dir, relpath_to_analysis, jar_file)

        # TODO: Extract something common from Executor._create_command to make the command line
        argv = image_specific_argv + ['@{}'.format(argfile_snapshot.files[0])]

        merged_input_digest = self.context._scheduler.merge_directories(
            [self._zinc.zinc.directory_digest] +
            [s.directory_digest for s in snapshots] + directory_digests +
            native_image_snapshots + [
                self.post_compile_extra_resources_digest(ctx),
                argfile_snapshot.directory_digest
            ])

        # NB: We always capture the output jar, but if classpath jars are not used, we additionally
        # capture loose classes from the workspace. This is because we need to both:
        #   1) allow loose classes as an input to dependent compiles
        #   2) allow jars to be materialized at the end of the run.
        output_directories = () if self.get_options().use_classpath_jars else (
            classes_dir, )

        req = ExecuteProcessRequest(
            argv=tuple(argv),
            input_files=merged_input_digest,
            output_files=(jar_file, relpath_to_analysis),
            output_directories=output_directories,
            description="zinc compile for {}".format(ctx.target.address.spec),
            unsafe_local_only_files_because_we_favor_speed_over_correctness_for_this_rule
            =merged_local_only_scratch_inputs,
            jdk_home=self._zinc.underlying_dist.home,
            is_nailgunnable=True,
        )
        res = self.context.execute_process_synchronously_or_raise(
            req, self.name(), [WorkUnitLabel.COMPILER])

        # TODO: Materialize as a batch in do_compile or somewhere
        self.context._scheduler.materialize_directory(
            DirectoryToMaterialize(res.output_directory_digest))

        # TODO: This should probably return a ClasspathEntry rather than a Digest
        return res.output_directory_digest
Exemplo n.º 31
0
  def as_environment(self):
    ret = {}

    if self.setup_requires_site_dir:
      ret['PYTHONPATH'] = self.setup_requires_site_dir.site_dir

    # FIXME(#5951): the below is a lot of error-prone repeated logic -- we need a way to compose
    # executables more hygienically. We should probably be composing each datatype's members, and
    # only creating an environment at the very end.
    native_tools = self.setup_py_native_tools
    if native_tools:
      # An as_tuple() method for datatypes could make this destructuring cleaner!  Alternatively,
      # constructing this environment could be done more compositionally instead of requiring all of
      # these disparate fields together at once.
      plat = native_tools.platform
      c_toolchain = native_tools.c_toolchain
      c_compiler = c_toolchain.c_compiler
      c_linker = c_toolchain.c_linker

      cpp_toolchain = native_tools.cpp_toolchain
      cpp_compiler = cpp_toolchain.cpp_compiler
      cpp_linker = cpp_toolchain.cpp_linker

      all_path_entries = (
        c_compiler.path_entries +
        c_linker.path_entries +
        cpp_compiler.path_entries +
        cpp_linker.path_entries)
      ret['PATH'] = create_path_env_var(all_path_entries)

      all_library_dirs = (
        c_compiler.library_dirs +
        c_linker.library_dirs +
        cpp_compiler.library_dirs +
        cpp_linker.library_dirs)
      joined_library_dirs = create_path_env_var(all_library_dirs)
      dynamic_lib_env_var = plat.resolve_platform_specific({
        'darwin': lambda: 'DYLD_LIBRARY_PATH',
        'linux': lambda: 'LD_LIBRARY_PATH',
      })
      ret[dynamic_lib_env_var] = joined_library_dirs

      all_linking_library_dirs = (c_linker.linking_library_dirs + cpp_linker.linking_library_dirs)
      ret['LIBRARY_PATH'] = create_path_env_var(all_linking_library_dirs)

      all_include_dirs = cpp_compiler.include_dirs + c_compiler.include_dirs
      ret['CPATH'] = create_path_env_var(all_include_dirs)

      shared_compile_flags = safe_shlex_join(plat.resolve_platform_specific({
        'darwin': lambda: [MIN_OSX_VERSION_ARG],
        'linux': lambda: [],
      }))
      ret['CFLAGS'] = shared_compile_flags
      ret['CXXFLAGS'] = shared_compile_flags

      ret['CC'] = c_compiler.exe_filename
      ret['CXX'] = cpp_compiler.exe_filename
      ret['LDSHARED'] = cpp_linker.exe_filename

      all_new_ldflags = cpp_linker.extra_args + plat.resolve_platform_specific(
        self._SHARED_CMDLINE_ARGS)
      ret['LDFLAGS'] = safe_shlex_join(all_new_ldflags)

    return ret
Exemplo n.º 32
0
 def invocation_environment_dict(self):
   return {
     'CPPFLAGS': safe_shlex_join(self.cpp_flags),
     'LDFLAGS': safe_shlex_join(self.ld_flags),
   }
Exemplo n.º 33
0
 def _pretty_cmdline(self, args):
   return safe_shlex_join(self._pex.cmdline(args))