Exemple #1
0
    def create(
            cls,
            path,  # type: str
            interpreter=None,  # type: Optional[PythonInterpreter]
    ):
        # type: (...) -> Pip
        """Creates a pip tool with PEX isolation at path.

        :param path: The path to assemble the pip tool at.
        :param interpreter: The interpreter to run Pip with. The current interpreter by default.
        :return: The path of a PEX that can be used to execute Pip in isolation.
        """
        pip_interpreter = interpreter or PythonInterpreter.get()
        pip_pex_path = os.path.join(path, isolated().pex_hash)
        with atomic_directory(pip_pex_path, exclusive=True) as chroot:
            if not chroot.is_finalized:
                from pex.pex_builder import PEXBuilder

                isolated_pip_builder = PEXBuilder(path=chroot.work_dir)
                isolated_pip_builder.info.venv = True
                for dist_location in third_party.expose(
                    ["pip", "setuptools", "wheel"]):
                    isolated_pip_builder.add_dist_location(dist=dist_location)
                isolated_pip_builder.set_script("pip")
                isolated_pip_builder.freeze()
        pex_info = PexInfo.from_pex(pip_pex_path)
        pex_info.add_interpreter_constraint(
            str(pip_interpreter.identity.requirement))
        return cls(
            ensure_venv(
                PEX(pip_pex_path,
                    interpreter=pip_interpreter,
                    pex_info=pex_info)))
Exemple #2
0
def build_pex(args, options, resolver_option_builder):
  with TRACER.timed('Resolving interpreter', V=2):
    interpreter = interpreter_from_options(options)

  if interpreter is None:
    die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

  pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter)

  pex_info = pex_builder.info
  pex_info.zip_safe = options.zip_safe
  pex_info.always_write_cache = options.always_write_cache
  pex_info.ignore_errors = options.ignore_errors
  pex_info.inherit_path = options.inherit_path

  resolvables = [Resolvable.get(arg, resolver_option_builder) for arg in args]

  for requirements_txt in options.requirement_files:
    resolvables.extend(requirements_from_file(requirements_txt, resolver_option_builder))

  # pip states the constraints format is identical tor requirements
  # https://pip.pypa.io/en/stable/user_guide/#constraints-files
  for constraints_txt in options.constraint_files:
    constraints = []
    for r in requirements_from_file(constraints_txt, resolver_option_builder):
      r.is_constraint = True
      constraints.append(r)
    resolvables.extend(constraints)

  resolver_kwargs = dict(interpreter=interpreter, platform=options.platform)

  if options.cache_dir:
    resolver = CachingResolver(options.cache_dir, options.cache_ttl, **resolver_kwargs)
  else:
    resolver = Resolver(**resolver_kwargs)

  with TRACER.timed('Resolving distributions'):
    try:
      resolveds = resolver.resolve(resolvables)
    except Unsatisfiable as e:
      die(e)

  for dist in resolveds:
    log('  %s' % dist, v=options.verbosity)
    pex_builder.add_distribution(dist)
    pex_builder.add_requirement(dist.as_requirement())

  if options.entry_point and options.script:
    die('Must specify at most one entry point or script.', INVALID_OPTIONS)

  if options.entry_point:
    pex_builder.set_entry_point(options.entry_point)
  elif options.script:
    pex_builder.set_script(options.script)

  if options.python_shebang:
    pex_builder.set_shebang(options.python_shebang)

  return pex_builder
Exemple #3
0
 def build_isort_pex(cls, context, interpreter, pex_path,
                     requirements_lib):
     with safe_concurrent_creation(pex_path) as chroot:
         builder = PEXBuilder(path=chroot, interpreter=interpreter)
         dump_requirement_libs(builder=builder,
                               interpreter=interpreter,
                               req_libs=[requirements_lib],
                               log=context.log)
         builder.set_script('isort')
         builder.freeze()
Exemple #4
0
def build_pex(args, options, resolver_option_builder, interpreter=None):
    if interpreter is None:
        with TRACER.timed('Resolving interpreter', V=2):
            interpreter = interpreter_from_options(options)

    if interpreter is None:
        die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

    pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.inherit_path = options.inherit_path

    resolvables = [
        Resolvable.get(arg, resolver_option_builder) for arg in args
    ]

    for requirements_txt in options.requirement_files:
        resolvables.extend(
            requirements_from_file(requirements_txt, resolver_option_builder))

    resolver_kwargs = dict(interpreter=interpreter, platform=options.platform)

    if options.cache_dir:
        resolver = CachingResolver(options.cache_dir, options.cache_ttl,
                                   **resolver_kwargs)
    else:
        resolver = Resolver(**resolver_kwargs)

    with TRACER.timed('Resolving distributions'):
        try:
            resolveds = resolver.resolve(resolvables)
        except Unsatisfiable as e:
            die(e)

    for dist in resolveds:
        log('  %s' % dist, v=options.verbosity)
        pex_builder.add_distribution(dist)
        pex_builder.add_requirement(dist.as_requirement())

    if options.entry_point and options.script:
        die('Must specify at most one entry point or script.', INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
Exemple #5
0
def build_pex(args, options, resolver_option_builder, interpreter=None):
    if interpreter is None:
        with TRACER.timed("Resolving interpreter", V=2):
            interpreter = interpreter_from_options(options)

    if interpreter is None:
        die("Could not find compatible interpreter", CANNOT_SETUP_INTERPRETER)

    pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.inherit_path = options.inherit_path

    resolvables = [Resolvable.get(arg, resolver_option_builder) for arg in args]

    for requirements_txt in options.requirement_files:
        resolvables.extend(requirements_from_file(requirements_txt, resolver_option_builder))

    resolver_kwargs = dict(interpreter=interpreter, platform=options.platform)

    if options.cache_dir:
        resolver = CachingResolver(options.cache_dir, options.cache_ttl, **resolver_kwargs)
    else:
        resolver = Resolver(**resolver_kwargs)

    with TRACER.timed("Resolving distributions"):
        try:
            resolveds = resolver.resolve(resolvables)
        except Unsatisfiable as e:
            die(e)

    for dist in resolveds:
        log("  %s" % dist, v=options.verbosity)
        pex_builder.add_distribution(dist)
        pex_builder.add_requirement(dist.as_requirement())

    if options.entry_point and options.script:
        die("Must specify at most one entry point or script.", INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
Exemple #6
0
def test_prex_builder_script_from_pex_path(tmpdir):
    # type: (Any) -> None

    pex_with_script = os.path.join(str(tmpdir), "script.pex")
    with built_wheel(
            name="my_project",
            entry_points={
                "console_scripts":
                ["my_app = my_project.my_module:do_something"]
            },
    ) as my_whl:
        pb = PEXBuilder()
        pb.add_dist_location(my_whl)
        pb.build(pex_with_script)

    pex_file = os.path.join(str(tmpdir), "app.pex")
    pb = PEXBuilder()
    pb.info.pex_path = pex_with_script
    pb.set_script("my_app")
    pb.build(pex_file)

    assert "hello world!\n" == subprocess.check_output(
        args=[pex_file]).decode("utf-8")
Exemple #7
0
def build_pex(reqs, options, cache=None):
    interpreters = None  # Default to the current interpreter.

    pex_python_path = options.python_path  # If None, this will result in using $PATH.
    # TODO(#1075): stop looking at PEX_PYTHON_PATH and solely consult the `--python-path` flag.
    if pex_python_path is None and (options.rc_file
                                    or not ENV.PEX_IGNORE_RCFILES):
        rc_variables = Variables(rc=options.rc_file)
        pex_python_path = rc_variables.PEX_PYTHON_PATH

    # NB: options.python and interpreter constraints cannot be used together.
    if options.python:
        with TRACER.timed("Resolving interpreters", V=2):

            def to_python_interpreter(full_path_or_basename):
                if os.path.isfile(full_path_or_basename):
                    return PythonInterpreter.from_binary(full_path_or_basename)
                else:
                    interp = PythonInterpreter.from_env(full_path_or_basename)
                    if interp is None:
                        die("Failed to find interpreter: %s" %
                            full_path_or_basename)
                    return interp

            interpreters = [
                to_python_interpreter(interp) for interp in options.python
            ]
    elif options.interpreter_constraint:
        with TRACER.timed("Resolving interpreters", V=2):
            constraints = options.interpreter_constraint
            validate_constraints(constraints)
            try:
                interpreters = list(
                    iter_compatible_interpreters(
                        path=pex_python_path,
                        interpreter_constraints=constraints))
            except UnsatisfiableInterpreterConstraintsError as e:
                die(
                    e.create_message(
                        "Could not find a compatible interpreter."),
                    CANNOT_SETUP_INTERPRETER,
                )

    platforms = OrderedSet(options.platforms)
    interpreters = interpreters or []
    if options.platforms and options.resolve_local_platforms:
        with TRACER.timed(
                "Searching for local interpreters matching {}".format(
                    ", ".join(map(str, platforms)))):
            candidate_interpreters = OrderedSet(
                iter_compatible_interpreters(path=pex_python_path))
            candidate_interpreters.add(PythonInterpreter.get())
            for candidate_interpreter in candidate_interpreters:
                resolved_platforms = candidate_interpreter.supported_platforms.intersection(
                    platforms)
                if resolved_platforms:
                    for resolved_platform in resolved_platforms:
                        TRACER.log("Resolved {} for platform {}".format(
                            candidate_interpreter, resolved_platform))
                        platforms.remove(resolved_platform)
                    interpreters.append(candidate_interpreter)
        if platforms:
            TRACER.log(
                "Could not resolve a local interpreter for {}, will resolve only binary distributions "
                "for {}.".format(
                    ", ".join(map(str, platforms)),
                    "this platform"
                    if len(platforms) == 1 else "these platforms",
                ))

    interpreter = (PythonInterpreter.latest_release_of_min_compatible_version(
        interpreters) if interpreters else None)

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    pex_builder = PEXBuilder(
        path=safe_mkdtemp(),
        interpreter=interpreter,
        preamble=preamble,
        copy_mode=CopyMode.SYMLINK,
        include_tools=options.include_tools or options.venv,
    )

    if options.resources_directory:
        pex_warnings.warn(
            "The `-R/--resources-directory` option is deprecated. Resources should be added via "
            "`-D/--sources-directory` instead.")

    for directory in OrderedSet(options.sources_directory +
                                options.resources_directory):
        src_dir = os.path.normpath(directory)
        for root, _, files in os.walk(src_dir):
            for f in files:
                src_file_path = os.path.join(root, f)
                dst_path = os.path.relpath(src_file_path, src_dir)
                pex_builder.add_source(src_file_path, dst_path)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.unzip = options.unzip
    pex_info.venv = bool(options.venv)
    pex_info.venv_bin_path = options.venv
    pex_info.venv_copies = options.venv_copies
    pex_info.pex_path = options.pex_path
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.emit_warnings = options.emit_warnings
    pex_info.inherit_path = InheritPath.for_value(options.inherit_path)
    pex_info.pex_root = options.runtime_pex_root
    pex_info.strip_pex_env = options.strip_pex_env

    if options.interpreter_constraint:
        for ic in options.interpreter_constraint:
            pex_builder.add_interpreter_constraint(ic)

    indexes = compute_indexes(options)

    for requirements_pex in options.requirements_pexes:
        pex_builder.add_from_requirements_pex(requirements_pex)

    with TRACER.timed(
            "Resolving distributions ({})".format(reqs +
                                                  options.requirement_files)):
        if options.cache_ttl:
            pex_warnings.warn(
                "The --cache-ttl option is deprecated and no longer has any effect."
            )
        if options.headers:
            pex_warnings.warn(
                "The --header option is deprecated and no longer has any effect."
            )

        network_configuration = NetworkConfiguration(
            retries=options.retries,
            timeout=options.timeout,
            proxy=options.proxy,
            cert=options.cert,
            client_cert=options.client_cert,
        )

        try:
            if options.pex_repository:
                with TRACER.timed("Resolving requirements from PEX {}.".format(
                        options.pex_repository)):
                    resolveds = resolve_from_pex(
                        pex=options.pex_repository,
                        requirements=reqs,
                        requirement_files=options.requirement_files,
                        constraint_files=options.constraint_files,
                        network_configuration=network_configuration,
                        transitive=options.transitive,
                        interpreters=interpreters,
                        platforms=list(platforms),
                        manylinux=options.manylinux,
                        ignore_errors=options.ignore_errors,
                    )
            else:
                with TRACER.timed("Resolving requirements."):
                    resolveds = resolve_multi(
                        requirements=reqs,
                        requirement_files=options.requirement_files,
                        constraint_files=options.constraint_files,
                        allow_prereleases=options.allow_prereleases,
                        transitive=options.transitive,
                        interpreters=interpreters,
                        platforms=list(platforms),
                        indexes=indexes,
                        find_links=options.find_links,
                        resolver_version=ResolverVersion.for_value(
                            options.resolver_version),
                        network_configuration=network_configuration,
                        cache=cache,
                        build=options.build,
                        use_wheel=options.use_wheel,
                        compile=options.compile,
                        manylinux=options.manylinux,
                        max_parallel_jobs=options.max_parallel_jobs,
                        ignore_errors=options.ignore_errors,
                    )

            for resolved_dist in resolveds:
                pex_builder.add_distribution(resolved_dist.distribution)
                if resolved_dist.direct_requirement:
                    pex_builder.add_requirement(
                        resolved_dist.direct_requirement)
        except Unsatisfiable as e:
            die(str(e))

    if options.entry_point and options.script:
        die("Must specify at most one entry point or script.", INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
Exemple #8
0
def build_pex(reqs, options, cache=None):
    interpreters = None  # Default to the current interpreter.

    pex_python_path = None  # Defaults to $PATH
    if options.rc_file or not ENV.PEX_IGNORE_RCFILES:
        rc_variables = Variables(rc=options.rc_file)
        pex_python_path = rc_variables.PEX_PYTHON_PATH

    # NB: options.python and interpreter constraints cannot be used together.
    if options.python:
        with TRACER.timed("Resolving interpreters", V=2):

            def to_python_interpreter(full_path_or_basename):
                if os.path.isfile(full_path_or_basename):
                    return PythonInterpreter.from_binary(full_path_or_basename)
                else:
                    interpreter = PythonInterpreter.from_env(full_path_or_basename)
                    if interpreter is None:
                        die("Failed to find interpreter: %s" % full_path_or_basename)
                    return interpreter

            interpreters = [to_python_interpreter(interp) for interp in options.python]
    elif options.interpreter_constraint:
        with TRACER.timed("Resolving interpreters", V=2):
            constraints = options.interpreter_constraint
            validate_constraints(constraints)
            try:
                interpreters = list(iter_compatible_interpreters(pex_python_path, constraints))
            except UnsatisfiableInterpreterConstraintsError as e:
                die(
                    e.create_message("Could not find a compatible interpreter."),
                    CANNOT_SETUP_INTERPRETER,
                )

    platforms = OrderedSet(options.platforms)
    interpreters = interpreters or []
    if options.platforms and options.resolve_local_platforms:
        with TRACER.timed(
            "Searching for local interpreters matching {}".format(", ".join(map(str, platforms)))
        ):
            candidate_interpreters = OrderedSet(iter_compatible_interpreters(pex_python_path))
            candidate_interpreters.add(PythonInterpreter.get())
            for candidate_interpreter in candidate_interpreters:
                resolved_platforms = candidate_interpreter.supported_platforms.intersection(
                    platforms
                )
                if resolved_platforms:
                    for resolved_platform in resolved_platforms:
                        TRACER.log(
                            "Resolved {} for platform {}".format(
                                candidate_interpreter, resolved_platform
                            )
                        )
                        platforms.remove(resolved_platform)
                    interpreters.append(candidate_interpreter)
        if platforms:
            TRACER.log(
                "Could not resolve a local interpreter for {}, will resolve only binary distributions "
                "for {}.".format(
                    ", ".join(map(str, platforms)),
                    "this platform" if len(platforms) == 1 else "these platforms",
                )
            )

    interpreter = min(interpreters) if interpreters else None
    if options.use_first_matching_interpreter and interpreters:
        if len(interpreters) > 1:
            unused_interpreters = set(interpreters) - {interpreter}
            TRACER.log(
                "Multiple interpreters resolved, but only using {} because "
                "`--use-first-matching-interpreter` was used. These interpreters were matched but "
                "will not be used: {}".format(
                    interpreter.binary,
                    ", ".join(interpreter.binary for interpreter in sorted(unused_interpreters)),
                )
            )
        interpreters = [interpreter]

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble)

    def walk_and_do(fn, src_dir):
        src_dir = os.path.normpath(src_dir)
        for root, dirs, files in os.walk(src_dir):
            for f in files:
                src_file_path = os.path.join(root, f)
                dst_path = os.path.relpath(src_file_path, src_dir)
                fn(src_file_path, dst_path)

    for directory in options.sources_directory:
        walk_and_do(pex_builder.add_source, directory)

    for directory in options.resources_directory:
        walk_and_do(pex_builder.add_resource, directory)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.unzip = options.unzip
    pex_info.pex_path = options.pex_path
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.emit_warnings = options.emit_warnings
    pex_info.inherit_path = options.inherit_path
    pex_info.pex_root = options.runtime_pex_root
    pex_info.strip_pex_env = options.strip_pex_env

    # If we're only building the PEX for the first of many interpreters due to
    # `--use-first-matching-interpreter` selection, we do not want to enable those same interpreter
    # constraints at runtime, where they could lead to a different interpreter being selected
    # leading to a failure to execute the PEX. Instead we rely on the shebang set by that single
    # interpreter to pick out a similar interpreter at runtime (for a CPython interpreter, the
    # shebang will be `#!/usr/bin/env pythonX.Y` which should generally be enough to select a
    # matching interpreter. To be clear though, there are many corners this will not work for
    # including mismatching abi (python2.7m vs python2.7mu) when the PEX contains platform specific
    # wheels, etc.
    if options.interpreter_constraint and not options.use_first_matching_interpreter:
        for ic in options.interpreter_constraint:
            pex_builder.add_interpreter_constraint(ic)

    # NB: `None` means use the default (pypi) index, `[]` means use no indexes.
    indexes = None
    if options.indexes != [_PYPI] and options.indexes is not None:
        indexes = [str(index) for index in options.indexes]

    for requirements_pex in options.requirements_pexes:
        pex_builder.add_from_requirements_pex(requirements_pex)

    with TRACER.timed("Resolving distributions ({})".format(reqs + options.requirement_files)):
        network_configuration = NetworkConfiguration.create(
            cache_ttl=options.cache_ttl,
            retries=options.retries,
            timeout=options.timeout,
            headers=options.headers,
            proxy=options.proxy,
            cert=options.cert,
            client_cert=options.client_cert,
        )

        try:
            resolveds = resolve_multi(
                requirements=reqs,
                requirement_files=options.requirement_files,
                constraint_files=options.constraint_files,
                allow_prereleases=options.allow_prereleases,
                transitive=options.transitive,
                interpreters=interpreters,
                platforms=list(platforms),
                indexes=indexes,
                find_links=options.find_links,
                network_configuration=network_configuration,
                cache=cache,
                build=options.build,
                use_wheel=options.use_wheel,
                compile=options.compile,
                manylinux=options.manylinux,
                max_parallel_jobs=options.max_parallel_jobs,
                ignore_errors=options.ignore_errors,
            )

            for resolved_dist in resolveds:
                log(
                    "  %s -> %s" % (resolved_dist.requirement, resolved_dist.distribution),
                    V=options.verbosity,
                )
                pex_builder.add_distribution(resolved_dist.distribution)
                pex_builder.add_requirement(resolved_dist.requirement)
        except Unsatisfiable as e:
            die(e)

    if options.entry_point and options.script:
        die("Must specify at most one entry point or script.", INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
Exemple #9
0
def assert_force_local_implicit_ns_packages_issues_598(
    interpreter=None, requirements=(), create_ns_packages=True
):
    def create_foo_bar_setup(name, **extra_args):
        # type: (str, **Any) -> str
        setup_args = dict(name=name, version="0.0.1", packages=["foo", "foo.bar"])
        if create_ns_packages:
            setup_args.update(namespace_packages=["foo", "foo.bar"])
        if requirements:
            setup_args.update(install_requires=list(requirements))
        setup_args.update(extra_args)

        return dedent(
            """
            from setuptools import setup
            
            setup(**{setup_args!r})
            """.format(
                setup_args=setup_args
            )
        )

    def with_foo_bar_ns_packages(content):
        # type: (Dict[str, str]) -> Dict[str, str]
        ns_packages = (
            {
                os.path.join(
                    pkg, "__init__.py"
                ): '__import__("pkg_resources").declare_namespace(__name__)'
                for pkg in ("foo", "foo/bar")
            }
            if create_ns_packages
            else {}
        )
        ns_packages.update(content)
        return ns_packages

    content1 = with_foo_bar_ns_packages(
        {
            "foo/bar/spam.py": "identify = lambda: 42",
            "setup.py": create_foo_bar_setup("foo-bar-spam"),
        }
    )

    content2 = with_foo_bar_ns_packages(
        {
            "foo/bar/eggs.py": dedent(
                """
                # NB: This only works when this content is unpacked loose on the filesystem!
                def read_self():
                    with open(__file__) as fp:
                        return fp.read()
                """
            )
        }
    )

    content3 = with_foo_bar_ns_packages(
        {
            "foobaz": dedent(
                """\
                #!python
                import sys
                
                from foo.bar import baz
                
                sys.exit(baz.main())
                """
            ),
            "foo/bar/baz.py": dedent(
                """
                import sys
                
                from foo.bar import eggs, spam
                
                def main():
                    assert len(eggs.read_self()) > 0
                    return spam.identify()
                """
            ),
            "setup.py": create_foo_bar_setup("foo-bar-baz", scripts=["foobaz"]),
        }
    )

    def add_requirements(builder, cache):
        # type: (PEXBuilder, str) -> None
        for resolved_dist in resolve(requirements, cache=cache, interpreter=builder.interpreter):
            builder.add_distribution(resolved_dist.distribution)
            if resolved_dist.direct_requirement:
                builder.add_requirement(resolved_dist.direct_requirement)

    def add_wheel(builder, content):
        # type: (PEXBuilder, Dict[str, str]) -> None
        with temporary_content(content) as project:
            dist = WheelBuilder(project, interpreter=builder.interpreter).bdist()
            builder.add_dist_location(dist)

    def add_sources(builder, content):
        # type: (PEXBuilder, Dict[str, str]) -> None
        with temporary_content(content) as project:
            for path in content.keys():
                builder.add_source(os.path.join(project, path), path)

    with temporary_dir() as root, temporary_dir() as cache:
        pex_info1 = PexInfo.default()
        pex_info1.zip_safe = False
        pex1 = os.path.join(root, "pex1.pex")
        builder1 = PEXBuilder(interpreter=interpreter, pex_info=pex_info1)
        add_requirements(builder1, cache)
        add_wheel(builder1, content1)
        add_sources(builder1, content2)
        builder1.build(pex1)

        pex_info2 = PexInfo.default()
        pex_info2.pex_path = pex1
        pex2 = os.path.join(root, "pex2")
        builder2 = PEXBuilder(path=pex2, interpreter=interpreter, pex_info=pex_info2)
        add_requirements(builder2, cache)
        add_wheel(builder2, content3)
        builder2.set_script("foobaz")
        builder2.freeze()

        assert 42 == PEX(pex2, interpreter=interpreter).run(env=dict(PEX_VERBOSE="9"))
Exemple #10
0
def build_pex(args, options, resolver_option_builder):
  with TRACER.timed('Resolving interpreters', V=2):
    interpreters = [
      get_interpreter(interpreter,
                      options.interpreter_cache_dir,
                      options.repos,
                      options.use_wheel)
      for interpreter in options.python or [None]
    ]

  if not interpreters:
    die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

  try:
    with open(options.preamble_file) as preamble_fd:
      preamble = preamble_fd.read()
  except TypeError:
    # options.preamble_file is None
    preamble = None

  interpreter = _lowest_version_interpreter(interpreters)
  pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble)

  pex_info = pex_builder.info
  pex_info.zip_safe = options.zip_safe
  pex_info.pex_path = options.pex_path
  pex_info.always_write_cache = options.always_write_cache
  pex_info.ignore_errors = options.ignore_errors
  pex_info.inherit_path = options.inherit_path

  resolvables = [Resolvable.get(arg, resolver_option_builder) for arg in args]

  for requirements_txt in options.requirement_files:
    resolvables.extend(requirements_from_file(requirements_txt, resolver_option_builder))

  # pip states the constraints format is identical tor requirements
  # https://pip.pypa.io/en/stable/user_guide/#constraints-files
  for constraints_txt in options.constraint_files:
    constraints = []
    for r in requirements_from_file(constraints_txt, resolver_option_builder):
      r.is_constraint = True
      constraints.append(r)
    resolvables.extend(constraints)

  with TRACER.timed('Resolving distributions'):
    try:
      resolveds = resolve_multi(resolvables,
                                interpreters=interpreters,
                                platforms=options.platform,
                                cache=options.cache_dir,
                                cache_ttl=options.cache_ttl,
                                allow_prereleases=resolver_option_builder.prereleases_allowed)

      for dist in resolveds:
        log('  %s' % dist, v=options.verbosity)
        pex_builder.add_distribution(dist)
        pex_builder.add_requirement(dist.as_requirement())
    except Unsatisfiable as e:
      die(e)

  if options.entry_point and options.script:
    die('Must specify at most one entry point or script.', INVALID_OPTIONS)

  if options.entry_point:
    pex_builder.set_entry_point(options.entry_point)
  elif options.script:
    pex_builder.set_script(options.script)

  if options.python_shebang:
    pex_builder.set_shebang(options.python_shebang)

  return pex_builder
Exemple #11
0
def build_pex(args, options, resolver_option_builder):
  with TRACER.timed('Resolving interpreters', V=2):
    def to_python_interpreter(full_path_or_basename):
      if os.path.exists(full_path_or_basename):
        return PythonInterpreter.from_binary(full_path_or_basename)
      else:
        interpreter = PythonInterpreter.from_env(full_path_or_basename)
        if interpreter is None:
          die('Failed to find interpreter: %s' % full_path_or_basename)
        return interpreter

    interpreters = [to_python_interpreter(interp) for interp in options.python or [sys.executable]]

  if options.interpreter_constraint:
    # NB: options.python and interpreter constraints cannot be used together, so this will not
    # affect usages of the interpreter(s) specified by the "--python" command line flag.
    constraints = options.interpreter_constraint
    validate_constraints(constraints)
    rc_variables = Variables.from_rc(rc=options.rc_file)
    pex_python_path = rc_variables.get('PEX_PYTHON_PATH', '')
    interpreters = find_compatible_interpreters(pex_python_path, constraints)

  if not interpreters:
    die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

  try:
    with open(options.preamble_file) as preamble_fd:
      preamble = preamble_fd.read()
  except TypeError:
    # options.preamble_file is None
    preamble = None

  interpreter = min(interpreters)

  pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble)

  def walk_and_do(fn, src_dir):
    src_dir = os.path.normpath(src_dir)
    for root, dirs, files in os.walk(src_dir):
      for f in files:
        src_file_path = os.path.join(root, f)
        dst_path = os.path.relpath(src_file_path, src_dir)
        fn(src_file_path, dst_path)

  for directory in options.sources_directory:
    walk_and_do(pex_builder.add_source, directory)

  for directory in options.resources_directory:
    walk_and_do(pex_builder.add_resource, directory)

  pex_info = pex_builder.info
  pex_info.zip_safe = options.zip_safe
  pex_info.pex_path = options.pex_path
  pex_info.always_write_cache = options.always_write_cache
  pex_info.ignore_errors = options.ignore_errors
  pex_info.inherit_path = options.inherit_path
  if options.interpreter_constraint:
    for ic in options.interpreter_constraint:
      pex_builder.add_interpreter_constraint(ic)

  resolvables = resolvables_from_iterable(args, resolver_option_builder, interpreter=interpreter)

  for requirements_txt in options.requirement_files:
    resolvables.extend(requirements_from_file(requirements_txt,
                                              builder=resolver_option_builder,
                                              interpreter=interpreter))

  # pip states the constraints format is identical tor requirements
  # https://pip.pypa.io/en/stable/user_guide/#constraints-files
  for constraints_txt in options.constraint_files:
    constraints = []
    for r in requirements_from_file(constraints_txt,
                                    builder=resolver_option_builder,
                                    interpreter=interpreter):
      r.is_constraint = True
      constraints.append(r)
    resolvables.extend(constraints)

  with TRACER.timed('Resolving distributions'):
    try:
      resolveds = resolve_multi(resolvables,
                                interpreters=interpreters,
                                platforms=options.platforms,
                                cache=options.cache_dir,
                                cache_ttl=options.cache_ttl,
                                allow_prereleases=resolver_option_builder.prereleases_allowed,
                                use_manylinux=options.use_manylinux)

      for resolved_dist in resolveds:
        log('  %s -> %s' % (resolved_dist.requirement, resolved_dist.distribution),
            V=options.verbosity)
        pex_builder.add_distribution(resolved_dist.distribution)
        pex_builder.add_requirement(resolved_dist.requirement)
    except Unsatisfiable as e:
      die(e)

  if options.entry_point and options.script:
    die('Must specify at most one entry point or script.', INVALID_OPTIONS)

  if options.entry_point:
    pex_builder.set_entry_point(options.entry_point)
  elif options.script:
    pex_builder.set_script(options.script)

  if options.python_shebang:
    pex_builder.set_shebang(options.python_shebang)

  return pex_builder
Exemple #12
0
def build_pex(reqs, options):
    interpreters = None  # Default to the current interpreter.

    # NB: options.python and interpreter constraints cannot be used together.
    if options.python:
        with TRACER.timed('Resolving interpreters', V=2):

            def to_python_interpreter(full_path_or_basename):
                if os.path.exists(full_path_or_basename):
                    return PythonInterpreter.from_binary(full_path_or_basename)
                else:
                    interpreter = PythonInterpreter.from_env(
                        full_path_or_basename)
                    if interpreter is None:
                        die('Failed to find interpreter: %s' %
                            full_path_or_basename)
                    return interpreter

            interpreters = [
                to_python_interpreter(interp) for interp in options.python
            ]
    elif options.interpreter_constraint:
        with TRACER.timed('Resolving interpreters', V=2):
            constraints = options.interpreter_constraint
            validate_constraints(constraints)
            if options.rc_file or not ENV.PEX_IGNORE_RCFILES:
                rc_variables = Variables.from_rc(rc=options.rc_file)
                pex_python_path = rc_variables.get('PEX_PYTHON_PATH', None)
            else:
                pex_python_path = None
            interpreters = list(
                iter_compatible_interpreters(pex_python_path, constraints))
            if not interpreters:
                die('Could not find compatible interpreter',
                    CANNOT_SETUP_INTERPRETER)

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    interpreter = min(interpreters) if interpreters else None

    pex_builder = PEXBuilder(path=safe_mkdtemp(),
                             interpreter=interpreter,
                             preamble=preamble)

    def walk_and_do(fn, src_dir):
        src_dir = os.path.normpath(src_dir)
        for root, dirs, files in os.walk(src_dir):
            for f in files:
                src_file_path = os.path.join(root, f)
                dst_path = os.path.relpath(src_file_path, src_dir)
                fn(src_file_path, dst_path)

    for directory in options.sources_directory:
        walk_and_do(pex_builder.add_source, directory)

    for directory in options.resources_directory:
        walk_and_do(pex_builder.add_resource, directory)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.pex_path = options.pex_path
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.emit_warnings = options.emit_warnings
    pex_info.inherit_path = options.inherit_path
    if options.interpreter_constraint:
        for ic in options.interpreter_constraint:
            pex_builder.add_interpreter_constraint(ic)

    # NB: `None` means use the default (pypi) index, `[]` means use no indexes.
    indexes = None
    if options.indexes != [_PYPI] and options.indexes is not None:
        indexes = [str(index) for index in options.indexes]

    with TRACER.timed(
            'Resolving distributions ({})'.format(reqs +
                                                  options.requirement_files)):
        try:
            resolveds = resolve_multi(
                requirements=reqs,
                requirement_files=options.requirement_files,
                constraint_files=options.constraint_files,
                allow_prereleases=options.allow_prereleases,
                transitive=options.transitive,
                interpreters=interpreters,
                platforms=options.platforms,
                indexes=indexes,
                find_links=options.find_links,
                cache=options.cache_dir,
                build=options.build,
                use_wheel=options.use_wheel,
                compile=options.compile,
                manylinux=options.manylinux,
                max_parallel_jobs=options.max_parallel_jobs,
                ignore_errors=options.ignore_errors)

            for resolved_dist in resolveds:
                log('  %s -> %s' %
                    (resolved_dist.requirement, resolved_dist.distribution),
                    V=options.verbosity)
                pex_builder.add_distribution(resolved_dist.distribution)
                pex_builder.add_requirement(resolved_dist.requirement)
        except Unsatisfiable as e:
            die(e)

    if options.entry_point and options.script:
        die('Must specify at most one entry point or script.', INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
Exemple #13
0
def build_pex(args, options, resolver_option_builder):
  with TRACER.timed('Resolving interpreters', V=2):
    def to_python_interpreter(full_path_or_basename):
      if os.path.exists(full_path_or_basename):
        return PythonInterpreter.from_binary(full_path_or_basename)
      else:
        interpreter = PythonInterpreter.from_env(full_path_or_basename)
        if interpreter is None:
          die('Failed to find interpreter: %s' % full_path_or_basename)
        return interpreter

    interpreters = [to_python_interpreter(interp) for interp in options.python or [sys.executable]]

  if options.interpreter_constraint:
    # NB: options.python and interpreter constraints cannot be used together, so this will not
    # affect usages of the interpreter(s) specified by the "--python" command line flag.
    constraints = options.interpreter_constraint
    validate_constraints(constraints)
    if options.rc_file or not ENV.PEX_IGNORE_RCFILES:
      rc_variables = Variables.from_rc(rc=options.rc_file)
      pex_python_path = rc_variables.get('PEX_PYTHON_PATH', '')
    else:
      pex_python_path = ""
    interpreters = find_compatible_interpreters(pex_python_path, constraints)

  if not interpreters:
    die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

  try:
    with open(options.preamble_file) as preamble_fd:
      preamble = preamble_fd.read()
  except TypeError:
    # options.preamble_file is None
    preamble = None

  interpreter = min(interpreters)

  pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble)

  def walk_and_do(fn, src_dir):
    src_dir = os.path.normpath(src_dir)
    for root, dirs, files in os.walk(src_dir):
      for f in files:
        src_file_path = os.path.join(root, f)
        dst_path = os.path.relpath(src_file_path, src_dir)
        fn(src_file_path, dst_path)

  for directory in options.sources_directory:
    walk_and_do(pex_builder.add_source, directory)

  for directory in options.resources_directory:
    walk_and_do(pex_builder.add_resource, directory)

  pex_info = pex_builder.info
  pex_info.zip_safe = options.zip_safe
  pex_info.pex_path = options.pex_path
  pex_info.always_write_cache = options.always_write_cache
  pex_info.ignore_errors = options.ignore_errors
  pex_info.emit_warnings = options.emit_warnings
  pex_info.inherit_path = options.inherit_path
  if options.interpreter_constraint:
    for ic in options.interpreter_constraint:
      pex_builder.add_interpreter_constraint(ic)

  resolvables = resolvables_from_iterable(args, resolver_option_builder, interpreter=interpreter)

  for requirements_txt in options.requirement_files:
    resolvables.extend(requirements_from_file(requirements_txt,
                                              builder=resolver_option_builder,
                                              interpreter=interpreter))

  # pip states the constraints format is identical tor requirements
  # https://pip.pypa.io/en/stable/user_guide/#constraints-files
  for constraints_txt in options.constraint_files:
    constraints = []
    for r in requirements_from_file(constraints_txt,
                                    builder=resolver_option_builder,
                                    interpreter=interpreter):
      r.is_constraint = True
      constraints.append(r)
    resolvables.extend(constraints)

  with TRACER.timed('Resolving distributions'):
    try:
      resolveds = resolve_multi(resolvables,
                                interpreters=interpreters,
                                platforms=options.platforms,
                                cache=options.cache_dir,
                                cache_ttl=options.cache_ttl,
                                allow_prereleases=resolver_option_builder.prereleases_allowed,
                                use_manylinux=options.use_manylinux)

      for resolved_dist in resolveds:
        log('  %s -> %s' % (resolved_dist.requirement, resolved_dist.distribution),
            V=options.verbosity)
        pex_builder.add_distribution(resolved_dist.distribution)
        pex_builder.add_requirement(resolved_dist.requirement)
    except Unsatisfiable as e:
      die(e)

  if options.entry_point and options.script:
    die('Must specify at most one entry point or script.', INVALID_OPTIONS)

  if options.entry_point:
    pex_builder.set_entry_point(options.entry_point)
  elif options.script:
    pex_builder.set_script(options.script)

  if options.python_shebang:
    pex_builder.set_shebang(options.python_shebang)

  return pex_builder
Exemple #14
0
Fichier : pex.py Projet : t-cas/pex
def build_pex(args, options, resolver_option_builder):
    with TRACER.timed('Resolving interpreters', V=2):
        interpreters = [
            get_interpreter(interpreter, options.interpreter_cache_dir,
                            options.repos, options.use_wheel)
            for interpreter in options.python or [None]
        ]

    if not interpreters:
        die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    interpreter = _lowest_version_interpreter(interpreters)
    pex_builder = PEXBuilder(path=safe_mkdtemp(),
                             interpreter=interpreter,
                             preamble=preamble)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.inherit_path = options.inherit_path

    resolvables = [
        Resolvable.get(arg, resolver_option_builder) for arg in args
    ]

    for requirements_txt in options.requirement_files:
        resolvables.extend(
            requirements_from_file(requirements_txt, resolver_option_builder))

    # pip states the constraints format is identical tor requirements
    # https://pip.pypa.io/en/stable/user_guide/#constraints-files
    for constraints_txt in options.constraint_files:
        constraints = []
        for r in requirements_from_file(constraints_txt,
                                        resolver_option_builder):
            r.is_constraint = True
            constraints.append(r)
        resolvables.extend(constraints)

    with TRACER.timed('Resolving distributions'):
        try:
            resolveds = resolve_multi(resolvables,
                                      interpreters=interpreters,
                                      platforms=options.platform,
                                      cache=options.cache_dir,
                                      cache_ttl=options.cache_ttl)

            for dist in resolveds:
                log('  %s' % dist, v=options.verbosity)
                pex_builder.add_distribution(dist)
                pex_builder.add_requirement(dist.as_requirement())
        except Unsatisfiable as e:
            die(e)

    if options.entry_point and options.script:
        die('Must specify at most one entry point or script.', INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
Exemple #15
0
def build_pex(args, options, resolver_option_builder):
    with TRACER.timed('Resolving interpreters', V=2):
        interpreters = [
            get_interpreter(interpreter, options.interpreter_cache_dir,
                            options.repos, options.use_wheel)
            for interpreter in options.python or [None]
        ]

    if options.interpreter_constraint:
        # NB: options.python and interpreter constraints cannot be used together, so this will not
        # affect usages of the interpreter(s) specified by the "--python" command line flag.
        constraints = options.interpreter_constraint
        validate_constraints(constraints)
        rc_variables = Variables.from_rc(rc=options.rc_file)
        pex_python_path = rc_variables.get('PEX_PYTHON_PATH', '')
        interpreters = find_compatible_interpreters(pex_python_path,
                                                    constraints)

    if not interpreters:
        die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    interpreter = min(interpreters)

    pex_builder = PEXBuilder(path=safe_mkdtemp(),
                             interpreter=interpreter,
                             preamble=preamble)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.pex_path = options.pex_path
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.inherit_path = options.inherit_path
    if options.interpreter_constraint:
        for ic in options.interpreter_constraint:
            pex_builder.add_interpreter_constraint(ic)

    resolvables = [
        Resolvable.get(arg, resolver_option_builder) for arg in args
    ]

    for requirements_txt in options.requirement_files:
        resolvables.extend(
            requirements_from_file(requirements_txt, resolver_option_builder))

    # pip states the constraints format is identical tor requirements
    # https://pip.pypa.io/en/stable/user_guide/#constraints-files
    for constraints_txt in options.constraint_files:
        constraints = []
        for r in requirements_from_file(constraints_txt,
                                        resolver_option_builder):
            r.is_constraint = True
            constraints.append(r)
        resolvables.extend(constraints)

    with TRACER.timed('Resolving distributions'):
        try:
            resolveds = resolve_multi(
                resolvables,
                interpreters=interpreters,
                platforms=options.platform,
                cache=options.cache_dir,
                cache_ttl=options.cache_ttl,
                allow_prereleases=resolver_option_builder.prereleases_allowed)

            for dist in resolveds:
                log('  %s' % dist, v=options.verbosity)
                pex_builder.add_distribution(dist)
                pex_builder.add_requirement(dist.as_requirement())
        except Unsatisfiable as e:
            die(e)

    if options.entry_point and options.script:
        die('Must specify at most one entry point or script.', INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
Exemple #16
0
class PexBuilderWrapper:
    """Wraps PEXBuilder to provide an API that consumes targets and other BUILD file entities."""
    class Factory(Subsystem):
        options_scope = "pex-builder-wrapper"

        @classmethod
        def register_options(cls, register):
            super(PexBuilderWrapper.Factory, cls).register_options(register)
            # TODO: make an analogy to cls.register_jvm_tool that can be overridden for python subsystems
            # by a python_requirement_library() target, not just via pants.ini!
            register(
                "--setuptools-version",
                advanced=True,
                default="40.6.3",
                fingerprint=True,
                help=
                "The setuptools version to include in the pex if namespace packages need "
                "to be injected.",
            )
            register(
                "--pex-version",
                advanced=True,
                default=pex_version,
                fingerprint=True,
                help="The pex version to include in any generated ipex files. "
                "NOTE: This should ideally be the same as the pex version which pants "
                f"itself depends on, which right now is {pex_version}.",
            )

        @classmethod
        def subsystem_dependencies(cls):
            return super(PexBuilderWrapper.Factory,
                         cls).subsystem_dependencies() + (
                             PythonRepos,
                             PythonSetup,
                         )

        @classmethod
        def create(cls, builder, log=None, generate_ipex=False):
            options = cls.global_instance().get_options()
            setuptools_requirement = f"setuptools=={options.setuptools_version}"
            pex_requirement = f"pex=={options.pex_version}"

            log = log or logging.getLogger(__name__)

            return PexBuilderWrapper(
                builder=builder,
                python_repos_subsystem=PythonRepos.global_instance(),
                python_setup_subsystem=PythonSetup.global_instance(),
                setuptools_requirement=PythonRequirement(
                    setuptools_requirement),
                pex_requirement=PythonRequirement(pex_requirement),
                log=log,
                generate_ipex=generate_ipex,
            )

    def __init__(
        self,
        builder: PEXBuilder,
        python_repos_subsystem: PythonRepos,
        python_setup_subsystem: PythonSetup,
        setuptools_requirement: PythonRequirement,
        pex_requirement: PythonRequirement,
        log,
        generate_ipex: bool = False,
    ):
        assert log is not None

        self._builder = builder
        self._python_repos_subsystem = python_repos_subsystem
        self._python_setup_subsystem = python_setup_subsystem
        self._setuptools_requirement = setuptools_requirement
        self._pex_requirement = pex_requirement
        self._log = log

        self._distributions: Dict[str, Distribution] = {}
        self._frozen = False

        self._generate_ipex = generate_ipex
        # If we generate a .ipex, we need to ensure all the code we copy into the underlying PEXBuilder
        # is also added to the new PEXBuilder created in `._shuffle_original_build_info_into_ipex()`.
        self._all_added_sources_resources: List[Path] = []
        # If we generate a dehydrated "ipex" file, we need to make sure that it is aware of any special
        # find_links repos attached to any single requirement, so it can later resolve those
        # requirements when it is first bootstrapped, using the same resolve options.
        self._all_find_links: OrderedSet[str] = OrderedSet()

    def add_requirement_libs_from(self, req_libs, platforms=None):
        """Multi-platform dependency resolution for PEX files.

        :param builder: Dump the requirements into this builder.
        :param interpreter: The :class:`PythonInterpreter` to resolve requirements for.
        :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve.
        :param log: Use this logger.
        :param platforms: A list of :class:`Platform`s to resolve requirements for.
                                            Defaults to the platforms specified by PythonSetup.
        """
        reqs = [req for req_lib in req_libs for req in req_lib.requirements]
        self.add_resolved_requirements(reqs, platforms=platforms)

    class SingleDistExtractionError(Exception):
        pass

    def extract_single_dist_for_current_platform(self, reqs,
                                                 dist_key) -> Distribution:
        """Resolve a specific distribution from a set of requirements matching the current platform.

        :param list reqs: A list of :class:`PythonRequirement` to resolve.
        :param str dist_key: The value of `distribution.key` to match for a `distribution` from the
                                                 resolved requirements.
        :return: The single :class:`pkg_resources.Distribution` matching `dist_key`.
        :raises: :class:`self.SingleDistExtractionError` if no dists or multiple dists matched the
                 given `dist_key`.
        """
        distributions = self.resolve_distributions(reqs, platforms=["current"])
        try:
            matched_dist = assert_single_element(
                dist for dists in distributions.values() for dist in dists
                if dist.key == dist_key)
        except (StopIteration, ValueError) as e:
            raise self.SingleDistExtractionError(
                f"Exactly one dist was expected to match name {dist_key} in requirements {reqs}: {e!r}"
            )
        return matched_dist

    def resolve_distributions(
        self,
        reqs: List[PythonRequirement],
        platforms: Optional[List[Platform]] = None,
    ) -> Dict[str, List[Distribution]]:
        """Multi-platform dependency resolution.

        :param reqs: A list of :class:`PythonRequirement` to resolve.
        :param platforms: A list of platform strings to resolve requirements for.
                          Defaults to the platforms specified by PythonSetup.
        :returns: A tuple `(map, transitive_reqs)`, where `map` is a dict mapping distribution name
                  to a list of resolved distributions, and `reqs` contains all transitive ==
                  requirements
                  needed to resolve the initial given requirements `reqs` for the given platforms.
        """
        deduped_reqs = OrderedSet(reqs)
        find_links: OrderedSet[str] = OrderedSet()
        for req in deduped_reqs:
            self._log.debug(f"  Dumping requirement: {req}")
            self._builder.add_requirement(str(req.requirement))
            if req.repository:
                find_links.add(req.repository)

        # Resolve the requirements into distributions.
        distributions = self._resolve_multi(
            self._builder.interpreter,
            list(deduped_reqs),
            platforms,
            list(find_links),
        )
        return distributions

    def add_resolved_requirements(
        self,
        reqs: List[PythonRequirement],
        platforms: Optional[List[Platform]] = None,
        override_ipex_build_do_actually_add_distribution: bool = False,
    ) -> None:
        """Multi-platform dependency resolution for PEX files.

        :param builder: Dump the requirements into this builder.
        :param interpreter: The :class:`PythonInterpreter` to resolve requirements for.
        :param reqs: A list of :class:`PythonRequirement` to resolve.
        :param log: Use this logger.
        :param platforms: A list of :class:`Platform`s to resolve requirements for.
                                            Defaults to the platforms specified by PythonSetup.
        :param bool override_ipex_build_do_actually_add_distribution: When this PexBuilderWrapper is configured with
                                                                        generate_ipex=True, this method won't add any distributions to
                                                                        the output pex. The internal implementation of this class adds a
                                                                        pex dependency to the output ipex file, and therefore needs to
                                                                        override the default behavior of this method.
        """
        distributions = self.resolve_distributions(reqs, platforms=platforms)
        locations: Set[str] = set()
        for platform, dists in distributions.items():
            for dist in dists:
                if dist.location not in locations:
                    if self._generate_ipex and not override_ipex_build_do_actually_add_distribution:
                        self._log.debug(
                            f"  *AVOIDING* dumping distribution into ipex: .../{os.path.basename(dist.location)}"
                        )
                        self._register_distribution(dist)
                    else:
                        self._log.debug(
                            f"  Dumping distribution: .../{os.path.basename(dist.location)}"
                        )
                        self.add_distribution(dist)
                locations.add(dist.location)

    def _resolve_multi(
        self,
        interpreter: PythonInterpreter,
        requirements: List[PythonRequirement],
        platforms: Optional[List[Platform]],
        find_links: Optional[List[str]],
    ) -> Dict[str, List[Distribution]]:
        """Multi-platform dependency resolution for PEX files.

        Returns a tuple containing a list of distributions that must be included in order to satisfy a
        set of requirements, and the transitive == requirements for those distributions. This may
        involve distributions for multiple platforms.

        :param interpreter: The :class:`PythonInterpreter` to resolve for.
        :param requirements: A list of :class:`PythonRequirement` objects to resolve.
        :param platforms: A list of :class:`Platform`s to resolve for.
        :param find_links: Additional paths to search for source packages during resolution.
        :return: Map of platform name -> list of :class:`pkg_resources.Distribution` instances needed
                         to satisfy the requirements on that platform.
        """
        python_setup = self._python_setup_subsystem
        python_repos = self._python_repos_subsystem
        platforms = platforms or python_setup.platforms

        find_links = list(find_links) if find_links else []
        find_links.extend(python_repos.repos)

        # Individual requirements from pants may have a `repository` link attached to them, which is
        # extracted in `self.resolve_distributions()`. When generating a .ipex file with
        # `generate_ipex=True`, we want to ensure these repos are known to the ipex launcher when it
        # tries to resolve all the requirements from BOOTSTRAP-PEX-INFO.
        self._all_find_links.update(OrderedSet(find_links))

        distributions: Dict[str, List[Distribution]] = defaultdict(list)

        for platform in platforms:
            requirements_cache_dir = os.path.join(
                python_setup.resolver_cache_dir, str(interpreter.identity))
            resolved_dists = resolve(
                requirements=[str(req.requirement) for req in requirements],
                interpreter=interpreter,
                platform=platform,
                indexes=python_repos.indexes,
                find_links=find_links,
                cache=requirements_cache_dir,
                allow_prereleases=python_setup.resolver_allow_prereleases,
                manylinux=python_setup.manylinux,
            )
            for resolved_dist in resolved_dists:
                distributions[platform].append(resolved_dist.distribution)

        return distributions

    def _create_source_dumper(self, tgt: Target) -> Callable[[str], None]:
        buildroot = get_buildroot()

        def get_chroot_path(relpath: str) -> str:
            if type(tgt) == Files:
                # Loose `Files`, as opposed to `Resources` or `PythonTarget`s, have no (implied) package
                # structure and so we chroot them relative to the build root so that they can be accessed
                # via the normal Python filesystem APIs just as they would be accessed outside the
                # chrooted environment. NB: This requires we mark the pex as not zip safe so
                # these `Files` can still be accessed in the context of a built pex distribution.
                self._builder.info.zip_safe = False
                return relpath
            return str(Path(relpath).relative_to(tgt.target_base))

        def dump_source(relpath: str) -> None:
            source_path = str(Path(buildroot, relpath))
            dest_path = get_chroot_path(relpath)

            self._all_added_sources_resources.append(Path(dest_path))
            if has_resources(tgt):
                self._builder.add_resource(filename=source_path,
                                           env_filename=dest_path)
            else:
                self._builder.add_source(filename=source_path,
                                         env_filename=dest_path)

        return dump_source

    def add_sources_from(self, tgt: Target) -> None:
        dump_source = self._create_source_dumper(tgt)
        self._log.debug(f"  Dumping sources: {tgt}")
        for relpath in tgt.sources_relative_to_buildroot():
            try:
                dump_source(relpath)
            except OSError:
                self._log.error(
                    f"Failed to copy {relpath} for target {tgt.address.spec}")
                raise

        if getattr(tgt, "_resource_target_specs", None) or getattr(
                tgt, "_synthetic_resources_target", None):
            # No one should be on old-style resources any more.  And if they are,
            # switching to the new python pipeline will be a great opportunity to fix that.
            raise TaskError(
                f"Old-style resources not supported for target {tgt.address.spec}. Depend on resources() "
                "targets instead.")

    def _prepare_inits(self) -> Set[str]:
        chroot = self._builder.chroot()
        sources = chroot.get("source") | chroot.get("resource")
        missing_init_files = identify_missing_init_files(sources)
        if missing_init_files:
            with temporary_file(permissions=0o644) as ns_package:
                ns_package.write(
                    b'__import__("pkg_resources").declare_namespace(__name__)  # type: ignore[attr-defined]'
                )
                ns_package.flush()
                for missing_init_file in missing_init_files:
                    self._all_added_sources_resources.append(
                        Path(missing_init_file))
                    self._builder.add_source(filename=ns_package.name,
                                             env_filename=missing_init_file)
        return missing_init_files

    def set_emit_warnings(self, emit_warnings):
        self._builder.info.emit_warnings = emit_warnings

    def _set_major_minor_interpreter_constraint_for_ipex(
        self,
        info: PexInfo,
        identity: PythonIdentity,
    ) -> PexInfo:
        interpreter_name = identity.requirement.name
        major, minor, _patch = identity.version
        major_minor_only_constraint = f"{interpreter_name}=={major}.{minor}.*"
        return ipex_launcher.modify_pex_info(
            info, interpreter_constraints=[str(major_minor_only_constraint)])

    def _shuffle_underlying_pex_builder(self) -> Tuple[PexInfo, Path]:
        """Replace the original builder with a new one, and just pull files from the old chroot."""
        # Ensure that (the interpreter selected to resolve requirements when the ipex is first run) is
        # (the exact same interpreter we used to resolve those requirements here). This is the only (?)
        # way to ensure that the ipex bootstrap uses the *exact* same interpreter version.
        self._builder.info = self._set_major_minor_interpreter_constraint_for_ipex(
            self._builder.info, self._builder.interpreter.identity)

        # Remove all the original top-level requirements in favor of the transitive == requirements.
        self._builder.info = ipex_launcher.modify_pex_info(self._builder.info,
                                                           requirements=[])
        transitive_reqs = [
            dist.as_requirement() for dist in self._distributions.values()
        ]
        self.add_direct_requirements(transitive_reqs)

        orig_info = self._builder.info.copy()

        orig_chroot = self._builder.chroot()

        # Mutate the PexBuilder object which is manipulated by this subsystem.
        self._builder = PEXBuilder(interpreter=self._builder.interpreter)
        self._builder.info = self._set_major_minor_interpreter_constraint_for_ipex(
            self._builder.info, self._builder.interpreter.identity)

        self._distributions = {}

        return (orig_info, Path(orig_chroot.path()))

    def _shuffle_original_build_info_into_ipex(self):
        """Create a "dehydrated" ipex file without any of its requirements, and specify that in two.

        *-INFO files.

        See ipex_launcher.py for details of how these files are used.
        """
        orig_pex_info, orig_chroot = self._shuffle_underlying_pex_builder()

        # Gather information needed to create IPEX-INFO.
        all_code = [str(src) for src in self._all_added_sources_resources]
        prefixed_code_paths = [
            os.path.join(ipex_launcher.APP_CODE_PREFIX, src)
            for src in all_code
        ]
        for src, prefixed in zip(all_code, prefixed_code_paths):
            # NB: Need to add under 'source' label for `self._prepare_inits()` to pick it up!
            self._builder.chroot().copy(os.path.join(str(orig_chroot), src),
                                        prefixed,
                                        label="source")

        python_repos = self._python_repos_subsystem
        python_setup = self._python_setup_subsystem

        # NB: self._all_find_links is updated on every call to self._resolve_multi(), and therefore
        # includes all of the links from python_repos.repos, as well as any links added within any
        # individual requirements from that resolve.

        resolver_settings = dict(
            indexes=list(python_repos.indexes),
            find_links=list(self._all_find_links),
            allow_prereleases=UnsetBool.coerce_bool(
                python_setup.resolver_allow_prereleases, default=True),
            manylinux=python_setup.manylinux,
        )

        # IPEX-INFO: A json mapping interpreted in ipex_launcher.py:
        # {
        #   "code": [<which source files to add to the "hydrated" pex when bootstrapped>],
        #   "resolver_settings": {<which indices to search for requirements from when bootstrapping>},
        # }
        ipex_info = dict(
            code=prefixed_code_paths,
            resolver_settings=resolver_settings,
        )
        with temporary_file(permissions=0o644) as ipex_info_file:
            ipex_info_file.write(json.dumps(ipex_info).encode())
            ipex_info_file.flush()
            self._builder.add_resource(filename=ipex_info_file.name,
                                       env_filename="IPEX-INFO")

        # BOOTSTRAP-PEX-INFO: The original PEX-INFO, which should be the PEX-INFO in the hydrated .pex
        #                     file that is generated when the .ipex is first executed.
        with temporary_file(permissions=0o644) as bootstrap_pex_info_file:
            bootstrap_pex_info_file.write(orig_pex_info.dump().encode())
            bootstrap_pex_info_file.flush()
            self._builder.add_resource(filename=bootstrap_pex_info_file.name,
                                       env_filename="BOOTSTRAP-PEX-INFO")

        # ipex.py: The special bootstrap script to hydrate the .ipex with the fully resolved
        #          requirements when it is first executed.
        # Extract the file contents of our custom app launcher script from the pants package.
        parent_module = module_dirname(module_dirname(ipex_launcher.__name__))
        ipex_launcher_provider = get_provider(parent_module)
        ipex_launcher_script = ipex_launcher_provider.get_resource_string(
            parent_module, "ipex/ipex_launcher.py")
        with temporary_file(permissions=0o644) as ipex_launcher_file:
            ipex_launcher_file.write(ipex_launcher_script)
            ipex_launcher_file.flush()
            # Our .ipex file will use our custom app launcher!
            self._builder.set_executable(ipex_launcher_file.name,
                                         env_filename="ipex.py")

        # The PEX-INFO we generate shouldn't have any requirements (except pex itself), or they will
        # fail to bootstrap because they were unable to find those distributions. Instead, the .pex file
        # produced when the .ipex is first executed will read and resolve all those requirements from
        # the BOOTSTRAP-PEX-INFO.
        self.add_resolved_requirements(
            [self._pex_requirement, self._setuptools_requirement],
            override_ipex_build_do_actually_add_distribution=True,
        )

    def freeze(self) -> None:
        if self._frozen:
            return

        if self._prepare_inits():
            dist = self._distributions.get("setuptools")
            if not dist:
                self.add_resolved_requirements([self._setuptools_requirement])

        if self._generate_ipex:
            self._shuffle_original_build_info_into_ipex()

        self._builder.freeze(bytecode_compile=False)
        self._frozen = True

    def set_entry_point(self, entry_point):
        self._builder.set_entry_point(entry_point)

    def build(self, safe_path):
        self.freeze()
        self._builder.build(safe_path,
                            bytecode_compile=False,
                            deterministic_timestamp=True)

    def set_shebang(self, shebang):
        self._builder.set_shebang(shebang)

    def add_interpreter_constraint(self, constraint):
        self._builder.add_interpreter_constraint(constraint)

    def add_interpreter_constraints_from(self, constraint_tgts):
        # TODO this would be a great place to validate the constraints and present a good error message
        # if they are incompatible because all the sources of the constraints are available.
        # See: https://github.com/pantsbuild/pex/blob/584b6e367939d24bc28aa9fa36eb911c8297dac8/pex/interpreter_constraints.py
        constraint_tuples = {
            self._python_setup_subsystem.compatibility_or_constraints(
                tgt.compatibility)
            for tgt in constraint_tgts
        }
        for constraint_tuple in constraint_tuples:
            for constraint in constraint_tuple:
                self.add_interpreter_constraint(constraint)

    def add_direct_requirements(self, reqs):
        for req in reqs:
            self._builder.add_requirement(str(req))

    def add_distribution(self, dist):
        self._builder.add_distribution(dist)
        self._register_distribution(dist)

    def add_dist_location(self, location):
        self._builder.add_dist_location(location)
        dist = DistributionHelper.distribution_from_path(location)
        self._register_distribution(dist)

    def _register_distribution(self, dist):
        self._distributions[dist.key] = dist

    def set_script(self, script):
        self._builder.set_script(script)
Exemple #17
0
def assert_force_local_implicit_ns_packages_issues_598(interpreter=None,
                                                       requirements=(),
                                                       create_ns_packages=True):

  def create_foo_bar_setup(name, **extra_args):
    setup_args = dict(
      name=name,
      version='0.0.1',
      packages=['foo', 'foo.bar']
    )
    if create_ns_packages:
      setup_args.update(namespace_packages=['foo', 'foo.bar'])
    if requirements:
      setup_args.update(install_requires=list(requirements))
    setup_args.update(extra_args)

    return dedent("""
      from setuptools import setup

      setup(**{setup_args!r})
    """.format(setup_args=setup_args))

  def with_foo_bar_ns_packages(content):
    ns_packages = {
      os.path.join(pkg, '__init__.py'): '__import__("pkg_resources").declare_namespace(__name__)'
      for pkg in ('foo', 'foo/bar')
    } if create_ns_packages else {}
    ns_packages.update(content)
    return ns_packages

  content1 = with_foo_bar_ns_packages({
    'foo/bar/spam.py': 'identify = lambda: 42',
    'setup.py': create_foo_bar_setup('foo-bar-spam')
  })

  content2 = with_foo_bar_ns_packages({
    'foo/bar/eggs.py': dedent("""
      # NB: This only works when this content is unpacked loose on the filesystem!
      def read_self():
        with open(__file__) as fp:
          return fp.read()
    """)
  })

  content3 = with_foo_bar_ns_packages({
    'foobaz': dedent("""\
      #!python
      import sys

      from foo.bar import baz

      sys.exit(baz.main())
    """),
    'foo/bar/baz.py': dedent("""
      import sys

      from foo.bar import eggs, spam

      def main():
        assert len(eggs.read_self()) > 0
        return spam.identify()
    """),
    'setup.py': create_foo_bar_setup('foo-bar-baz', scripts=['foobaz'])
  })

  def add_requirements(builder, cache):
    for resolved_dist in resolve(requirements, cache=cache, interpreter=builder.interpreter):
      builder.add_requirement(resolved_dist.requirement)
      builder.add_distribution(resolved_dist.distribution)

  def add_wheel(builder, content):
    with temporary_content(content) as project:
      dist = WheelInstaller(project, interpreter=builder.interpreter).bdist()
      builder.add_dist_location(dist)

  def add_sources(builder, content):
    with temporary_content(content) as project:
      for path in content.keys():
        builder.add_source(os.path.join(project, path), path)

  with nested(temporary_dir(), temporary_dir()) as (root, cache):
    pex_info1 = PexInfo.default()
    pex_info1.zip_safe = False
    pex1 = os.path.join(root, 'pex1.pex')
    builder1 = PEXBuilder(interpreter=interpreter, pex_info=pex_info1)
    add_requirements(builder1, cache)
    add_wheel(builder1, content1)
    add_sources(builder1, content2)
    builder1.build(pex1)

    pex_info2 = PexInfo.default()
    pex_info2.pex_path = pex1
    pex2 = os.path.join(root, 'pex2')
    builder2 = PEXBuilder(path=pex2, interpreter=interpreter, pex_info=pex_info2)
    add_requirements(builder2, cache)
    add_wheel(builder2, content3)
    builder2.set_script('foobaz')
    builder2.freeze()

    assert 42 == PEX(pex2, interpreter=interpreter).run(env=dict(PEX_VERBOSE='9'))