Exemple #1
0
 def environment_dict(self) -> Mapping[str, str]:
     return dict(
         PATH=create_path_env_var(self.path),
         PEX_PYTHON_PATH=create_path_env_var(self.interpreter_search_paths),
         PEX_INHERIT_PATH="false",
         PEX_IGNORE_RCFILES="true",
         **self.subprocess_environment_dict,
     )
Exemple #2
0
 def as_invocation_environment_dict(self):
   lib_env_var = self._platform.resolve_platform_specific({
     'darwin': lambda: 'DYLD_LIBRARY_PATH',
     'linux': lambda: 'LD_LIBRARY_PATH',
   })
   return {
     'PATH': create_path_env_var(self.path_entries),
     lib_env_var: create_path_env_var(self.library_dirs),
   }
Exemple #3
0
 def as_invocation_environment_dict(self):
   lib_env_var = self._platform.resolve_platform_specific({
     'darwin': lambda: 'DYLD_LIBRARY_PATH',
     'linux': lambda: 'LD_LIBRARY_PATH',
   })
   return {
     'PATH': create_path_env_var(self.path_entries),
     lib_env_var: create_path_env_var(self.library_dirs),
   }
Exemple #4
0
    def as_environment(self):
        ret = {}

        if self.setup_requires_site_dir:
            ret['PYTHONPATH'] = self.setup_requires_site_dir.site_dir

        # FIXME(#5951): the below is a lot of error-prone repeated logic -- we need a way to compose
        # executables more hygienically. We should probably be composing each datatype's members, and
        # only creating an environment at the very end.
        native_tools = self.setup_py_native_tools
        if native_tools:
            # TODO: an as_tuple() method for datatypes would make this destructuring cleaner!
            plat = native_tools.platform
            cc = native_tools.c_compiler
            cxx = native_tools.cpp_compiler
            linker = native_tools.linker

            all_path_entries = cc.path_entries + cxx.path_entries + linker.path_entries
            ret['PATH'] = create_path_env_var(all_path_entries)

            all_library_dirs = cc.library_dirs + cxx.library_dirs + linker.library_dirs
            if all_library_dirs:
                joined_library_dirs = create_path_env_var(all_library_dirs)
                ret['LIBRARY_PATH'] = joined_library_dirs
                dynamic_lib_env_var = plat.resolve_platform_specific({
                    'darwin':
                    lambda: 'DYLD_LIBRARY_PATH',
                    'linux':
                    lambda: 'LD_LIBRARY_PATH',
                })
                ret[dynamic_lib_env_var] = joined_library_dirs

            all_include_dirs = cc.include_dirs + cxx.include_dirs
            if all_include_dirs:
                ret['CPATH'] = create_path_env_var(all_include_dirs)

            all_cflags_for_platform = plat.resolve_platform_specific({
                'darwin':
                lambda: ['-mmacosx-version-min=10.11'],
                'linux':
                lambda: [],
            })
            if all_cflags_for_platform:
                ret['CFLAGS'] = safe_shlex_join(all_cflags_for_platform)

            ret['CC'] = cc.exe_filename
            ret['CXX'] = cxx.exe_filename
            ret['LDSHARED'] = linker.exe_filename

            all_new_ldflags = plat.resolve_platform_specific(
                self._SHARED_CMDLINE_ARGS)
            ret['LDFLAGS'] = safe_shlex_join(all_new_ldflags)

        return ret
Exemple #5
0
  def invocation_environment_dict(self):
    """A dict to use as this _Executable's execution environment.

    This isn't made into an "algebraic" field because its contents (the keys of the dict) are
    generally known to the specific class which is overriding this property. Implementations of this
    property can then make use of the data in the algebraic fields to populate this dict.

    :rtype: dict of string -> string
    """
    return {
      'PATH': create_path_env_var(self.path_entries),
      self._platform.runtime_lib_path_env_var: create_path_env_var(self.runtime_library_dirs),
    }
    def as_invocation_environment_dict(self):
        """A dict to use as this Executable's execution environment.

    :rtype: dict of string -> string
    """
        lib_env_var = self._platform.resolve_platform_specific({
            'darwin':
            lambda: 'DYLD_LIBRARY_PATH',
            'linux':
            lambda: 'LD_LIBRARY_PATH',
        })
        return {
            'PATH': create_path_env_var(self.path_entries),
            lib_env_var: create_path_env_var(self.library_dirs),
        }
    def as_environment(self):
        ret = {}

        # TODO(#5951): the below is a lot of error-prone repeated logic -- we need a way to compose
        # executables more hygienically. We should probably be composing each datatype's members, and
        # only creating an environment at the very end.
        native_tools = self.setup_py_native_tools
        if native_tools:
            # An as_tuple() method for datatypes could make this destructuring cleaner!  Alternatively,
            # constructing this environment could be done more compositionally instead of requiring all of
            # these disparate fields together at once.
            c_toolchain = native_tools.c_toolchain
            c_compiler = c_toolchain.c_compiler
            c_linker = c_toolchain.c_linker

            cpp_toolchain = native_tools.cpp_toolchain
            cpp_compiler = cpp_toolchain.cpp_compiler
            cpp_linker = cpp_toolchain.cpp_linker

            all_path_entries = (c_compiler.path_entries +
                                c_linker.path_entries +
                                cpp_compiler.path_entries +
                                cpp_linker.path_entries)
            # TODO(#6273): We prepend our toolchain to the PATH instead of overwriting it -- we need
            # better control of the distutils compilation environment if we want to actually isolate the
            # PATH (distutils does lots of sneaky things).
            ret['PATH'] = create_path_env_var(all_path_entries,
                                              env=os.environ.copy(),
                                              prepend=True)

            # GCC will output smart quotes in a variety of situations (leading to decoding errors
            # downstream) unless we set this environment variable.
            ret['LC_ALL'] = 'C'

        return ret
Exemple #8
0
    def test_generic_pex_creation(self) -> None:
        input_files_content = InputFilesContent((
            FileContent(path='main.py', content=b'print("from main")'),
            FileContent(path='subdir/sub.py', content=b'print("from sub")'),
        ))

        input_files = self.request_single_product(Digest, input_files_content)
        pex_output = self.create_pex_and_get_all_data(entry_point='main',
                                                      input_files=input_files)

        pex_files = pex_output['files']
        self.assertTrue('pex' not in pex_files)
        self.assertTrue('main.py' in pex_files)
        self.assertTrue('subdir/sub.py' in pex_files)

        python_setup = PythonSetup.global_instance()
        env = {
            "PATH": create_path_env_var(python_setup.interpreter_search_paths)
        }

        pex = pex_output['pex']

        req = ExecuteProcessRequest(
            argv=('python', 'test.pex'),
            env=env,
            input_files=pex.directory_digest,
            description="Run the pex and make sure it works")
        result = self.request_single_product(ExecuteProcessResult, req)
        self.assertEqual(result.stdout, b"from main\n")
Exemple #9
0
  def invocation_environment_dict(self):
    ret = super(_CompilerMixin, self).invocation_environment_dict.copy()

    if self.include_dirs:
      ret['CPATH'] = create_path_env_var(self.include_dirs)

    return ret
Exemple #10
0
  def as_invocation_environment_dict(self):
    ret = super(CompilerMixin, self).as_invocation_environment_dict.copy()

    if self.include_dirs:
      ret['CPATH'] = create_path_env_var(self.include_dirs)

    return ret
Exemple #11
0
    def test_pex_execution(self) -> None:
        input_files_content = InputFilesContent(
            (
                FileContent(path="main.py", content=b'print("from main")'),
                FileContent(path="subdir/sub.py", content=b'print("from sub")'),
            )
        )

        input_files = self.request_single_product(Digest, input_files_content)
        pex_output = self.create_pex_and_get_all_data(entry_point="main", input_files=input_files)

        pex_files = pex_output["files"]
        self.assertTrue("pex" not in pex_files)
        self.assertTrue("main.py" in pex_files)
        self.assertTrue("subdir/sub.py" in pex_files)

        init_subsystem(PythonSetup)
        python_setup = PythonSetup.global_instance()
        env = {"PATH": create_path_env_var(python_setup.interpreter_search_paths)}

        req = ExecuteProcessRequest(
            argv=("python", "test.pex"),
            env=env,
            input_files=pex_output["pex"].directory_digest,
            description="Run the pex and make sure it works",
        )
        result = self.request_single_product(ExecuteProcessResult, req)
        self.assertEqual(result.stdout, b"from main\n")
Exemple #12
0
    def run_tests(self, fail_fast, test_targets, args_by_target):
        self.context.log.debug('test_targets: {}'.format(test_targets))

        with self.chroot(test_targets, self._maybe_workdir) as chroot:
            cmdline_args = self._build_and_test_flags + [
                args_by_target[t].import_path for t in test_targets
            ] + self.get_passthru_args()
            gopath = create_path_env_var(args_by_target[t].gopath
                                         for t in test_targets)
            go_cmd = self.go_dist.create_go_cmd('test',
                                                gopath=gopath,
                                                args=cmdline_args)

            self.context.log.debug('go_cmd: {}'.format(go_cmd))

            workunit_labels = [WorkUnitLabel.TOOL, WorkUnitLabel.TEST]
            with self.context.new_workunit(name='go test',
                                           cmd=safe_shlex_join(go_cmd.cmdline),
                                           labels=workunit_labels) as workunit:

                exit_code = self.spawn_and_wait(test_targets,
                                                workunit=workunit,
                                                go_cmd=go_cmd,
                                                cwd=chroot)
                return TestResult.rc(exit_code)
Exemple #13
0
    def _run_zef_command(self, workunit_factory, argv):
        subproc_env = os.environ.copy()
        subproc_env['PATH'] = create_path_env_var(self.path_entries,
                                                  subproc_env,
                                                  prepend=True)

        all_argv = ['zef'] + argv
        pretty_printed_argv = safe_shlex_join(all_argv)
        try:
            if workunit_factory:
                with workunit_factory(cmd=pretty_printed_argv) as workunit:
                    return subprocess.check_call(
                        all_argv,
                        env=subproc_env,
                        stdout=workunit.output('stdout'),
                        stderr=workunit.output('stderr'))
            else:
                output = subprocess.check_output(all_argv, env=subproc_env)
                logger.debug(
                    "output from running zef command {!r} with env {!r}:\n{}".
                    format(all_argv, subproc_env, output))
        except (OSError, subprocess.CalledProcessError) as e:
            raise self.ZefException("Error with zef command '{}': {}".format(
                pretty_printed_argv, e),
                                    e,
                                    exit_code=e.returncode)
Exemple #14
0
    def env_dict(self) -> FrozenDict[str, str]:
        """Setup for the `env` for `Process`es that run Bash.

        Call sites must opt into using this value by requesting
        `BashSetup` as a parameter to their rule, then setting
        `env=bash_setup.env_dict` for any relevant `Process.`
        """
        return FrozenDict({"PATH": create_path_env_var(self.executable_search_path)})
Exemple #15
0
  def invocation_environment_dict(self):
    """A dict to use as this _Executable's execution environment.

    This isn't made into an "algebraic" field because its contents (the keys of the dict) are
    generally known to the specific class which is overriding this property. Implementations of this
    property can then make use of the data in the algebraic fields to populate this dict.

    :rtype: dict of string -> string
    """
    lib_env_var = self._platform.resolve_for_enum_variant({
      'darwin': 'DYLD_LIBRARY_PATH',
      'linux': 'LD_LIBRARY_PATH',
    })
    return {
      'PATH': create_path_env_var(self.path_entries),
      lib_env_var: create_path_env_var(self.runtime_library_dirs),
    }
Exemple #16
0
  def as_invocation_environment_dict(self):
    ret = super(LinkerMixin, self).as_invocation_environment_dict.copy()

    ret.update({
      'LDSHARED': self.exe_filename,
      'LIBRARY_PATH': create_path_env_var(self.linking_library_dirs),
    })

    return ret
def create_requirements_pex(request, pex_bin, python_setup,
                            pex_build_environment, platform):
    """Returns a PEX with the given requirements, optional entry point, and optional
  interpreter constraints."""

    interpreter_search_paths = create_path_env_var(
        python_setup.interpreter_search_paths)
    env = {
        "PATH": interpreter_search_paths,
        **pex_build_environment.invocation_environment_dict
    }

    interpreter_constraint_args = []
    for constraint in request.interpreter_constraints:
        interpreter_constraint_args.extend(
            ["--interpreter-constraint", constraint])

    # NB: we use the hardcoded and generic bin name `python`, rather than something dynamic like
    # `sys.executable`, to ensure that the interpreter may be discovered both locally and in remote
    # execution (so long as `env` is populated with a `PATH` env var and `python` is discoverable
    # somewhere on that PATH). This is only used to run the downloaded PEX tool; it is not
    # necessarily the interpreter that PEX will use to execute the generated .pex file.
    # TODO(#7735): Set --python-setup-interpreter-search-paths differently for the host and target
    # platforms, when we introduce platforms in https://github.com/pantsbuild/pants/issues/7735.
    argv = [
        "python", f"./{pex_bin.executable}", "--output-file",
        request.output_filename
    ]
    if request.entry_point is not None:
        argv.extend(["--entry-point", request.entry_point])
    argv.extend(interpreter_constraint_args + list(request.requirements))
    # NOTE
    # PEX outputs are platform dependent so in order to get a PEX that we can use locally, without cross-building
    # we specify that out PEX command be run on the current local platform. When we support cross-building
    # through CLI flags we can configure requests that build a PEX for out local platform that are
    # able to execute on a different platform, but for now in order to guarantee correct build we need
    # to restrict this command to execute on the same platform type that the output is intended for.
    # The correct way to interpret the keys (execution_platform_constraint, target_platform_constraint)
    # of this dictionary is "The output of this command is intended for `target_platform_constraint` iff
    # it is run on `execution_platform_constraint`".
    execute_process_request = MultiPlatformExecuteProcessRequest(
        {
            (PlatformConstraint(platform.value),
             PlatformConstraint(platform.value)):
            ExecuteProcessRequest(
                argv=tuple(argv),
                env=env,
                input_files=pex_bin.directory_digest,
                description=
                f"Create a requirements PEX: {', '.join(request.requirements)}",
                output_files=(request.output_filename, ))
        })

    result = yield Get(ExecuteProcessResult,
                       MultiPlatformExecuteProcessRequest,
                       execute_process_request)
    yield RequirementsPex(directory_digest=result.output_directory_digest)
Exemple #18
0
  def as_invocation_environment_dict(self):
    ret = super(LinkerMixin, self).as_invocation_environment_dict.copy()

    ret.update({
      'LDSHARED': self.exe_filename,
      'LIBRARY_PATH': create_path_env_var(self.linking_library_dirs),
    })

    return ret
Exemple #19
0
    def environment_dict(self, *,
                         python_configured: bool) -> Mapping[str, str]:
        """The environment to use for running anything with PEX.

        If the Process is run with a pre-selected Python interpreter, set `python_configured=True`
        to avoid PEX from trying to find a new interpreter.
        """
        d = dict(
            PATH=create_path_env_var(self.path),
            PEX_INHERIT_PATH="false",
            PEX_IGNORE_RCFILES="true",
            **self.subprocess_environment_dict,
        )
        # NB: We only set `PEX_PYTHON_PATH` if the Python interpreter has not already been
        # pre-selected by Pants. Otherwise, Pex may try to find another interpreter.
        if not python_configured:
            d["PEX_PYTHON_PATH"] = create_path_env_var(
                self.interpreter_search_paths)
        return d
Exemple #20
0
    def environment_dict(self, *, python_configured: bool) -> Mapping[str, str]:
        """The environment to use for running anything with PEX.

        If the Process is run with a pre-selected Python interpreter, set `python_configured=True`
        to avoid PEX from trying to find a new interpreter.
        """
        d = dict(
            PATH=create_path_env_var(self.path),
            PEX_INHERIT_PATH="false",
            PEX_IGNORE_RCFILES="true",
            PEX_ROOT=os.path.join(self.named_caches_dir, "pex_root"),
            **self.subprocess_environment_dict,
        )
        # NB: We only set `PEX_PYTHON_PATH` if the Python interpreter has not already been
        # pre-selected by Pants. Otherwise, Pex would inadvertently try to find another interpreter
        # when running PEXes. (Creating a PEX will ignore this env var in favor of `--python-path`.)
        if not python_configured:
            d["PEX_PYTHON_PATH"] = create_path_env_var(self.interpreter_search_paths)
        return d
Exemple #21
0
    def invocation_environment_dict(self):
        """A dict to use as this _Executable's execution environment.

        This isn't made into an "algebraic" field because its contents (the keys of the dict) are
        generally known to the specific class which is overriding this property. Implementations of this
        property can then make use of the data in the algebraic fields to populate this dict.

        :rtype: dict of string -> string
        """
        lib_path_env_var: str = match(
            Platform.current,
            {
                Platform.darwin: "DYLD_LIBRARY_PATH",
                Platform.linux: "LD_LIBRARY_PATH"
            },
        )

        return {
            "PATH": create_path_env_var(self.path_entries),
            lib_path_env_var: create_path_env_var(self.runtime_library_dirs),
        }
Exemple #22
0
  def get_invocation_environment_dict(self, platform):
    ret = super(CompilerMixin, self).get_invocation_environment_dict(platform).copy()

    if self.include_dirs:
      ret['CPATH'] = create_path_env_var(self.include_dirs)

    all_cflags_for_platform = platform.resolve_platform_specific({
      'darwin': lambda: ['-mmacosx-version-min=10.11'],
      'linux': lambda: [],
    })
    ret['CFLAGS'] = safe_shlex_join(all_cflags_for_platform)

    return ret
Exemple #23
0
  def invocation_environment_dict(self):
    ret = super(_LinkerMixin, self).invocation_environment_dict.copy()

    full_library_path_dirs = self.linking_library_dirs + [
      os.path.dirname(f) for f in self.extra_object_files
    ]

    ret.update({
      'LDSHARED': self.exe_filename,
      'LIBRARY_PATH': create_path_env_var(full_library_path_dirs),
    })

    return ret
Exemple #24
0
  def invocation_environment_dict(self):
    ret = super(_LinkerMixin, self).invocation_environment_dict.copy()

    full_library_path_dirs = self.linking_library_dirs + [
      os.path.dirname(f) for f in self.extra_object_files
    ]

    ret.update({
      'LDSHARED': self.exe_filename,
      'LIBRARY_PATH': create_path_env_var(full_library_path_dirs),
    })

    return ret
Exemple #25
0
def resolve_requirements(request, python_setup, pex_build_environment):
    """Returns a PEX with the given requirements, optional entry point, and optional
  interpreter constraints."""

    # TODO: Inject versions and digests here through some option, rather than hard-coding it.
    url = 'https://github.com/pantsbuild/pex/releases/download/v1.6.8/pex'
    digest = Digest(
        '2ca320aede7e7bbcb907af54c9de832707a1df965fb5a0d560f2df29ba8a2f3d',
        1866441)
    pex_snapshot = yield Get(Snapshot, UrlToFetch(url, digest))

    interpreter_search_paths = create_path_env_var(
        python_setup.interpreter_search_paths)
    env = {
        "PATH": interpreter_search_paths,
        **pex_build_environment.invocation_environment_dict
    }

    interpreter_constraint_args = []
    for constraint in request.interpreter_constraints:
        interpreter_constraint_args.extend(
            ["--interpreter-constraint", constraint])

    # NB: we use the hardcoded and generic bin name `python`, rather than something dynamic like
    # `sys.executable`, to ensure that the interpreter may be discovered both locally and in remote
    # execution (so long as `env` is populated with a `PATH` env var and `python` is discoverable
    # somewhere on that PATH). This is only used to run the downloaded PEX tool; it is not
    # necessarily the interpreter that PEX will use to execute the generated .pex file.
    # TODO(#7735): Set --python-setup-interpreter-search-paths differently for the host and target
    # platforms, when we introduce platforms in https://github.com/pantsbuild/pants/issues/7735.
    argv = [
        "python", "./{}".format(pex_snapshot.files[0]), "-o",
        request.output_filename
    ]
    if request.entry_point is not None:
        argv.extend(["-e", request.entry_point])
    argv.extend(interpreter_constraint_args + list(request.requirements))

    request = ExecuteProcessRequest(
        argv=tuple(argv),
        env=env,
        input_files=pex_snapshot.directory_digest,
        description='Resolve requirements: {}'.format(", ".join(
            request.requirements)),
        output_files=(request.output_filename, ),
    )

    result = yield Get(ExecuteProcessResult, ExecuteProcessRequest, request)
    yield ResolvedRequirementsPex(
        directory_digest=result.output_directory_digest, )
Exemple #26
0
  def _prepare_env(self, kwargs):
    """Returns a modifed copy of kwargs['env'], and a copy of kwargs with 'env' removed.

    If there is no 'env' field in the kwargs, os.environ.copy() is used.
    env['PATH'] is set/modified to contain the Node distribution's bin directory at the front.

    :param kwargs: The original kwargs.
    :returns: An (env, kwargs) tuple containing the modified env and kwargs copies.
    :rtype: (dict, dict)
    """
    kwargs = kwargs.copy()
    env = kwargs.pop('env', os.environ).copy()
    env['PATH'] = create_path_env_var(self.extra_paths, env=env, prepend=True)
    return env, kwargs
    def _run_rakudobrew_command(self, argv):
        subproc_env = os.environ.copy()
        subproc_env['PATH'] = create_path_env_var(self.path_entries,
                                                  subproc_env,
                                                  prepend=True)

        all_argv = ['rakudobrew'] + argv
        pretty_printed_argv = safe_shlex_join(all_argv)
        try:
            return subprocess.check_output(all_argv, env=subproc_env)
        except (OSError, subprocess.CalledProcessError) as e:
            raise self.RakudoBrewBootstrapError(
                "Error with rakudobrew command '{}': {}".format(
                    pretty_printed_argv, e), e)
Exemple #28
0
  def _prepare_env(self, kwargs):
    """Returns a modifed copy of kwargs['env'], and a copy of kwargs with 'env' removed.

    If there is no 'env' field in the kwargs, os.environ.copy() is used.
    env['PATH'] is set/modified to contain the Node distribution's bin directory at the front.

    :param kwargs: The original kwargs.
    :returns: An (env, kwargs) tuple containing the modified env and kwargs copies.
    :rtype: (dict, dict)
    """
    kwargs = kwargs.copy()
    env = kwargs.pop('env', os.environ).copy()
    env['PATH'] = create_path_env_var(self.extra_paths, env=env, prepend=True)
    return env, kwargs
Exemple #29
0
    def create_execute_request(
            self,
            python_setup: PythonSetup,
            subprocess_encoding_environment: SubprocessEncodingEnvironment,
            *,
            pex_path: str,
            pex_args: Iterable[str],
            description: str,
            input_files: Digest,
            env: Optional[Mapping[str, str]] = None,
            **kwargs: Any) -> ExecuteProcessRequest:
        """Creates an ExecuteProcessRequest that will run a PEX hermetically.

        :param python_setup: The parameters for selecting python interpreters to use when invoking
                             the PEX.
        :param subprocess_encoding_environment: The locale settings to use for the PEX invocation.
        :param pex_path: The path within `input_files` of the PEX file (or directory if a loose
                         pex).
        :param pex_args: The arguments to pass to the PEX executable.
        :param description: A description of the process execution to be performed.
        :param input_files: The files that contain the pex itself and any input files it needs to
                            run against.
        :param env: The environment to run the PEX in.
        :param **kwargs: Any additional :class:`ExecuteProcessRequest` kwargs to pass through.
        """

        # NB: we use the hardcoded and generic bin name `python`, rather than something dynamic like
        # `sys.executable`, to ensure that the interpreter may be discovered both locally and in remote
        # execution (so long as `env` is populated with a `PATH` env var and `python` is discoverable
        # somewhere on that PATH). This is only used to run the downloaded PEX tool; it is not
        # necessarily the interpreter that PEX will use to execute the generated .pex file.
        # TODO(#7735): Set --python-setup-interpreter-search-paths differently for the host and target
        # platforms, when we introduce platforms in https://github.com/pantsbuild/pants/issues/7735.
        argv = ("python", pex_path, *pex_args)

        hermetic_env = dict(
            PATH=create_path_env_var(python_setup.interpreter_search_paths),
            PEX_ROOT="./pex_root",
            PEX_INHERIT_PATH="false",
            PEX_IGNORE_RCFILES="true",
            **subprocess_encoding_environment.invocation_environment_dict)
        if env:
            hermetic_env.update(env)

        return ExecuteProcessRequest(argv=argv,
                                     input_files=input_files,
                                     description=description,
                                     env=hermetic_env,
                                     **kwargs)
def create_requirements_pex(request, pex_bin, python_setup,
                            pex_build_environment):
    """Returns a PEX with the given requirements, optional entry point, and optional
  interpreter constraints."""

    interpreter_search_paths = create_path_env_var(
        python_setup.interpreter_search_paths)
    env = {
        "PATH": interpreter_search_paths,
        **pex_build_environment.invocation_environment_dict
    }

    interpreter_constraint_args = []
    for constraint in request.interpreter_constraints:
        interpreter_constraint_args.extend(
            ["--interpreter-constraint", constraint])

    # NB: we use the hardcoded and generic bin name `python`, rather than something dynamic like
    # `sys.executable`, to ensure that the interpreter may be discovered both locally and in remote
    # execution (so long as `env` is populated with a `PATH` env var and `python` is discoverable
    # somewhere on that PATH). This is only used to run the downloaded PEX tool; it is not
    # necessarily the interpreter that PEX will use to execute the generated .pex file.
    # TODO(#7735): Set --python-setup-interpreter-search-paths differently for the host and target
    # platforms, when we introduce platforms in https://github.com/pantsbuild/pants/issues/7735.
    argv = [
        "python", f"./{pex_bin.executable}", "--output-file",
        request.output_filename
    ]
    if request.entry_point is not None:
        argv.extend(["--entry-point", request.entry_point])
    argv.extend(interpreter_constraint_args + list(request.requirements))

    execute_process_request = ExecuteProcessRequest(
        argv=tuple(argv),
        env=env,
        input_files=pex_bin.directory_digest,
        description=
        f"Create a requirements PEX: {', '.join(request.requirements)}",
        output_files=(request.output_filename, ),
    )

    result = yield Get(ExecuteProcessResult, ExecuteProcessRequest,
                       execute_process_request)
    yield RequirementsPex(directory_digest=result.output_directory_digest)
Exemple #31
0
  def _get_perl6_subproc_os_env(self, perl6_env):
    # NB: These source file containing directory paths are assumed to have been de-duped.
    source_lib_containing_dirs = list(perl6_env.source_lib_entries.containing_lib_dirs)
    zef_install_specs = [r.install_spec for r in perl6_env.zef_resolve_results]

    # NB: put the thirdparty resolve at the end.
    all_lib_entries = source_lib_containing_dirs + zef_install_specs
    perl6lib_joined = ensure_binary(self.PERL6LIB_SEP.join(map(ensure_binary, all_lib_entries)))

    full_path_var = create_path_env_var(self._rakudo_moar.path_entries, os.environ.copy(),
                                        prepend=True)

    invocation_env = os.environ.copy()
    invocation_env.update({
      'PERL6LIB': perl6lib_joined,
      'PATH': full_path_var,
    })

    return invocation_env
Exemple #32
0
async def find_binary(request: BinaryPathRequest) -> BinaryPaths:
    # TODO(John Sirois): Replace this script with a statically linked native binary so we don't
    #  depend on either /bin/bash being available on the Process host.
    # TODO(#10507): Running the script directly from a shebang sometimes results in a "Text file
    #  busy" error.
    script_path = "./script.sh"
    script_content = dedent("""
        #!/usr/bin/env bash

        set -euo pipefail

        if command -v which > /dev/null; then
            command which -a $1
        else
            command -v $1
        fi
        """)
    script_digest = await Get(
        Digest,
        CreateDigest([
            FileContent(script_path,
                        script_content.encode(),
                        is_executable=True)
        ]),
    )

    paths = []
    search_path = create_path_env_var(request.search_path)
    result = await Get(
        FallibleProcessResult,
        Process(
            description=
            f"Searching for `{request.binary_name}` on PATH={search_path}",
            level=LogLevel.DEBUG,
            input_digest=script_digest,
            argv=[script_path, request.binary_name],
            env={"PATH": search_path},
        ),
    )
    if result.exit_code == 0:
        paths.extend(result.stdout.decode().splitlines())

    return BinaryPaths(binary_name=request.binary_name, paths=paths)
Exemple #33
0
  def get_invocation_environment_dict(self, platform):
    ret = super(Linker, self).get_invocation_environment_dict(platform).copy()

    # TODO: set all LDFLAGS in here or in further specializations of Linker instead of in individual
    # tasks.
    all_ldflags_for_platform = platform.resolve_platform_specific({
      'darwin': lambda: ['-mmacosx-version-min=10.11'],
      'linux': lambda: [],
    })
    ret.update({
      'LDSHARED': self.exe_filename,
      # FIXME: this overloads the meaning of 'library_dirs' to also mean "directories containing
      # static libraries required for creating an executable" (currently, libc). These concepts
      # should be distinct.
      'LIBRARY_PATH': create_path_env_var(self.library_dirs),
      'LDFLAGS': safe_shlex_join(all_ldflags_for_platform),
    })

    return ret
  def as_environment(self):
    ret = {}

    if self.setup_requires_site_dir:
      ret['PYTHONPATH'] = self.setup_requires_site_dir.site_dir

    # FIXME(#5951): the below is a lot of error-prone repeated logic -- we need a way to compose
    # executables more hygienically. We should probably be composing each datatype's members, and
    # only creating an environment at the very end.
    native_tools = self.setup_py_native_tools
    if native_tools:
      # An as_tuple() method for datatypes could make this destructuring cleaner!  Alternatively,
      # constructing this environment could be done more compositionally instead of requiring all of
      # these disparate fields together at once.
      plat = native_tools.platform
      c_toolchain = native_tools.c_toolchain
      c_compiler = c_toolchain.c_compiler
      c_linker = c_toolchain.c_linker

      cpp_toolchain = native_tools.cpp_toolchain
      cpp_compiler = cpp_toolchain.cpp_compiler
      cpp_linker = cpp_toolchain.cpp_linker

      all_path_entries = (
        c_compiler.path_entries +
        c_linker.path_entries +
        cpp_compiler.path_entries +
        cpp_linker.path_entries)
      ret['PATH'] = create_path_env_var(all_path_entries)

      all_library_dirs = (
        c_compiler.library_dirs +
        c_linker.library_dirs +
        cpp_compiler.library_dirs +
        cpp_linker.library_dirs)
      joined_library_dirs = create_path_env_var(all_library_dirs)
      dynamic_lib_env_var = plat.resolve_platform_specific({
        'darwin': lambda: 'DYLD_LIBRARY_PATH',
        'linux': lambda: 'LD_LIBRARY_PATH',
      })
      ret[dynamic_lib_env_var] = joined_library_dirs

      all_linking_library_dirs = (c_linker.linking_library_dirs + cpp_linker.linking_library_dirs)
      ret['LIBRARY_PATH'] = create_path_env_var(all_linking_library_dirs)

      all_include_dirs = cpp_compiler.include_dirs + c_compiler.include_dirs
      ret['CPATH'] = create_path_env_var(all_include_dirs)

      shared_compile_flags = safe_shlex_join(plat.resolve_platform_specific({
        'darwin': lambda: [MIN_OSX_VERSION_ARG],
        'linux': lambda: [],
      }))
      ret['CFLAGS'] = shared_compile_flags
      ret['CXXFLAGS'] = shared_compile_flags

      ret['CC'] = c_compiler.exe_filename
      ret['CXX'] = cpp_compiler.exe_filename
      ret['LDSHARED'] = cpp_linker.exe_filename

      all_new_ldflags = cpp_linker.extra_args + plat.resolve_platform_specific(
        self._SHARED_CMDLINE_ARGS)
      ret['LDFLAGS'] = safe_shlex_join(all_new_ldflags)

    return ret
Exemple #35
0
    def as_environment(self):
        ret = {}

        if self.setup_requires_site_dir:
            ret['PYTHONPATH'] = self.setup_requires_site_dir.site_dir

        # FIXME(#5951): the below is a lot of error-prone repeated logic -- we need a way to compose
        # executables more hygienically. We should probably be composing each datatype's members, and
        # only creating an environment at the very end.
        native_tools = self.setup_py_native_tools
        if native_tools:
            # An as_tuple() method for datatypes could make this destructuring cleaner!  Alternatively,
            # constructing this environment could be done more compositionally instead of requiring all of
            # these disparate fields together at once.
            plat = native_tools.platform
            c_toolchain = native_tools.c_toolchain
            c_compiler = c_toolchain.c_compiler
            c_linker = c_toolchain.c_linker

            cpp_toolchain = native_tools.cpp_toolchain
            cpp_compiler = cpp_toolchain.cpp_compiler
            cpp_linker = cpp_toolchain.cpp_linker

            all_path_entries = (c_compiler.path_entries +
                                c_linker.path_entries +
                                cpp_compiler.path_entries +
                                cpp_linker.path_entries)
            ret['PATH'] = create_path_env_var(all_path_entries)

            all_library_dirs = (c_compiler.library_dirs +
                                c_linker.library_dirs +
                                cpp_compiler.library_dirs +
                                cpp_linker.library_dirs)
            joined_library_dirs = create_path_env_var(all_library_dirs)
            dynamic_lib_env_var = plat.resolve_platform_specific({
                'darwin':
                lambda: 'DYLD_LIBRARY_PATH',
                'linux':
                lambda: 'LD_LIBRARY_PATH',
            })
            ret[dynamic_lib_env_var] = joined_library_dirs

            all_linking_library_dirs = (c_linker.linking_library_dirs +
                                        cpp_linker.linking_library_dirs)
            ret['LIBRARY_PATH'] = create_path_env_var(all_linking_library_dirs)

            all_include_dirs = cpp_compiler.include_dirs + c_compiler.include_dirs
            ret['CPATH'] = create_path_env_var(all_include_dirs)

            shared_compile_flags = safe_shlex_join(
                plat.resolve_platform_specific({
                    'darwin':
                    lambda: [MIN_OSX_VERSION_ARG],
                    'linux':
                    lambda: [],
                }))
            ret['CFLAGS'] = shared_compile_flags
            ret['CXXFLAGS'] = shared_compile_flags

            ret['CC'] = c_compiler.exe_filename
            ret['CXX'] = cpp_compiler.exe_filename
            ret['LDSHARED'] = cpp_linker.exe_filename

            all_new_ldflags = cpp_linker.extra_args + plat.resolve_platform_specific(
                self._SHARED_CMDLINE_ARGS)
            ret['LDFLAGS'] = safe_shlex_join(all_new_ldflags)

        return ret
Exemple #36
0
async def setup_pex_cli_process(
    request: PexCliProcess,
    pex_pex: PexPEX,
    pex_env: PexEnvironment,
    python_native_code: PythonNativeCode,
    global_options: GlobalOptions,
    pex_runtime_env: PexRuntimeEnvironment,
) -> Process:
    tmpdir = ".tmp"
    gets: List[Get] = [Get(Digest, CreateDigest([Directory(tmpdir)]))]

    # The certs file will typically not be in the repo, so we can't digest it via a PathGlobs.
    # Instead we manually create a FileContent for it.
    cert_args = []
    if global_options.ca_certs_path:
        ca_certs_content = Path(global_options.ca_certs_path).read_bytes()
        chrooted_ca_certs_path = os.path.basename(global_options.ca_certs_path)

        gets.append(
            Get(
                Digest,
                CreateDigest((FileContent(chrooted_ca_certs_path, ca_certs_content),)),
            )
        )
        cert_args = ["--cert", chrooted_ca_certs_path]

    digests_to_merge = [pex_pex.digest]
    digests_to_merge.extend(await MultiGet(gets))
    if request.additional_input_digest:
        digests_to_merge.append(request.additional_input_digest)
    input_digest = await Get(Digest, MergeDigests(digests_to_merge))

    global_args = [
        # Ensure Pex and its subprocesses create temporary files in the the process execution
        # sandbox. It may make sense to do this generally for Processes, but in the short term we
        # have known use cases where /tmp is too small to hold large wheel downloads Pex is asked to
        # perform. Making the TMPDIR local to the sandbox allows control via
        # --local-execution-root-dir for the local case and should work well with remote cases where
        # a remoting implementation has to allow for processes producing large binaries in a
        # sandbox to support reasonable workloads. Communicating TMPDIR via --tmpdir instead of via
        # environment variable allows Pex to absolutize the path ensuring subprocesses that change
        # CWD can find the TMPDIR.
        "--tmpdir",
        tmpdir,
    ]

    if request.concurrency_available > 0:
        global_args.extend(["--jobs", "{pants_concurrency}"])

    if pex_runtime_env.verbosity > 0:
        global_args.append(f"-{'v' * pex_runtime_env.verbosity}")

    resolve_args = (
        [*cert_args, "--python-path", create_path_env_var(pex_env.interpreter_search_paths)]
        if request.set_resolve_args
        else []
    )
    args = [
        *global_args,
        *request.subcommand,
        *resolve_args,
        # NB: This comes at the end because it may use `--` passthrough args, # which must come at
        # the end.
        *request.extra_args,
    ]

    complete_pex_env = pex_env.in_sandbox(working_directory=None)
    normalized_argv = complete_pex_env.create_argv(pex_pex.exe, *args, python=request.python)
    env = {
        **complete_pex_env.environment_dict(python_configured=request.python is not None),
        **python_native_code.environment_dict,
        **(request.extra_env or {}),
        # If a subcommand is used, we need to use the `pex3` console script.
        **({"PEX_SCRIPT": "pex3"} if request.subcommand else {}),
    }

    return Process(
        normalized_argv,
        description=request.description,
        input_digest=input_digest,
        env=env,
        output_files=request.output_files,
        output_directories=request.output_directories,
        append_only_caches=complete_pex_env.append_only_caches,
        level=request.level,
        concurrency_available=request.concurrency_available,
        cache_scope=request.cache_scope,
    )
async def setup_shunit2_for_target(
    request: TestSetupRequest,
    shell_setup: ShellSetup,
    test_subsystem: TestSubsystem,
    test_extra_env: TestExtraEnv,
    global_options: GlobalOptions,
) -> TestSetup:
    shunit2_download_file = DownloadFile(
        "https://raw.githubusercontent.com/kward/shunit2/b9102bb763cc603b3115ed30a5648bf950548097/shunit2",
        FileDigest(
            "1f11477b7948150d1ca50cdd41d89be4ed2acd137e26d2e0fe23966d0e272cc5",
            40987),
    )
    shunit2_script, transitive_targets, built_package_dependencies, env = await MultiGet(
        Get(Digest, DownloadFile, shunit2_download_file),
        Get(TransitiveTargets,
            TransitiveTargetsRequest([request.field_set.address])),
        Get(
            BuiltPackageDependencies,
            BuildPackageDependenciesRequest(
                request.field_set.runtime_package_dependencies),
        ),
        Get(Environment, EnvironmentRequest(["PATH"])),
    )

    dependencies_source_files_request = Get(
        SourceFiles,
        SourceFilesRequest(
            (tgt.get(Sources) for tgt in transitive_targets.dependencies),
            for_sources_types=(ShellSources, FilesSources, ResourcesSources),
            enable_codegen=True,
        ),
    )
    dependencies_source_files, field_set_sources = await MultiGet(
        dependencies_source_files_request,
        Get(SourceFiles, SourceFilesRequest([request.field_set.sources])),
    )

    field_set_digest_content = await Get(DigestContents, Digest,
                                         field_set_sources.snapshot.digest)
    # Because a FieldSet corresponds to a file address, there should be exactly 1 file in the
    # sources. This assumption allows us to simplify determining which shell to use via inspecting
    # the shebang.
    if len(field_set_digest_content) != 1:
        raise AssertionError(
            f"The file address {request.field_set.address} had sources != 1, which is unexpected: "
            f"{field_set_sources.snapshot.files}. Please file a bug at "
            "https://github.com/pantsbuild/pants/issues/new with this error message copied."
        )
    original_test_file_content = field_set_digest_content[0]
    updated_test_file_content = add_source_shunit2(original_test_file_content)

    updated_test_digest, runner = await MultiGet(
        Get(Digest, CreateDigest([updated_test_file_content])),
        Get(
            Shunit2Runner,
            Shunit2RunnerRequest(request.field_set.address,
                                 original_test_file_content,
                                 request.field_set.shell),
        ),
    )

    input_digest = await Get(
        Digest,
        MergeDigests((
            shunit2_script,
            updated_test_digest,
            dependencies_source_files.snapshot.digest,
            *(pkg.digest for pkg in built_package_dependencies),
        )),
    )

    env_dict = {
        "PATH": create_path_env_var(shell_setup.executable_search_path(env)),
        "SHUNIT_COLOR": "always" if global_options.options.colors else "none",
        **test_extra_env.env,
    }
    argv = (
        # Zsh requires extra args. See https://github.com/kward/shunit2/#-zsh.
        [
            runner.binary_path.path, "-o", "shwordsplit", "--",
            *field_set_sources.snapshot.files
        ] if runner.shell == Shunit2Shell.zsh else
        [runner.binary_path.path, *field_set_sources.snapshot.files])
    cache_scope = ProcessCacheScope.NEVER if test_subsystem.force else ProcessCacheScope.SUCCESSFUL
    process = Process(
        argv=argv,
        input_digest=input_digest,
        description=f"Run shunit2 for {request.field_set.address}.",
        level=LogLevel.DEBUG,
        env=env_dict,
        timeout_seconds=request.field_set.timeout.value,
        cache_scope=cache_scope,
    )
    return TestSetup(process)