예제 #1
0
 def test_output_multiple_targets_one_source(self):
   target_address1 = FileDepsTest.make_build_target_address("some/target")
   hydrated_target1 = self.mock_hydrated_target(target_address1, {"some/file.py": "", }, ())
   
   target_address2 = FileDepsTest.make_build_target_address("other/target")
   hydrated_target2 = self.mock_hydrated_target(target_address2, {"other/file.py": "", }, ())
    
   transitive_targets = TransitiveHydratedTargets(
     (hydrated_target1, hydrated_target2), 
     OrderedSet([hydrated_target1, hydrated_target2])
   )
   
   self.filedeps_rule_test(
     transitive_targets,
     dedent(
       '''\
       some/target/BUILD
       some/file.py
       other/target/BUILD
       other/file.py
       ''')
   )
예제 #2
0
파일: resolver.py 프로젝트: ofek/pex
  def to_requirement(self, dist):
    req = dist.as_requirement()

    markers = OrderedSet()

    # Here we map any wheel python requirement to the equivalent environment marker:
    # See:
    # + https://www.python.org/dev/peps/pep-0345/#requires-python
    # + https://www.python.org/dev/peps/pep-0508/#environment-markers
    python_requires = dist_metadata.requires_python(dist)
    if python_requires:
      markers.update(
        Marker(python_version)
        for python_version in sorted(
          'python_version {operator} {version!r}'.format(
            operator=specifier.operator,
            version=specifier.version
          ) for specifier in python_requires
        )
      )

    markers.update(self._markers_by_requirement_key.get(req.key, ()))

    if not markers:
      return req

    if len(markers) == 1:
      marker = next(iter(markers))
      req.marker = marker
      return req

    # We may have resolved with multiple paths to the dependency represented by dist and at least
    # two of those paths had (different) conditional requirements for dist based on environment
    # marker predicates. In that case, since the pip resolve succeeded, the implication is that the
    # environment markers are compatible; i.e.: their intersection selects the target interpreter.
    # Here we make that intersection explicit.
    # See: https://www.python.org/dev/peps/pep-0508/#grammar
    marker = ' and '.join('({})'.format(marker) for marker in markers)
    return Requirement.parse('{}; {}'.format(req, marker))
예제 #3
0
    def set_script(self, script):
        """Set the entry point of this PEX environment based upon a distribution script.

        :param script: The script name as defined either by a console script or ordinary
          script within the setup.py of one of the distributions added to the PEX.
        :raises: :class:`PEXBuilder.InvalidExecutableSpecification` if the script is not found
          in any distribution added to the PEX.
        """

        distributions = OrderedSet(self._distributions.values())
        if self._pex_info.pex_path:
            for pex in self._pex_info.pex_path.split(":"):
                if os.path.exists(pex):
                    distributions.update(
                        PEX(pex, interpreter=self._interpreter).resolve())

        # Check if 'script' is a console_script.
        dist, entry_point = get_entry_point_from_console_script(
            script, distributions)
        if entry_point:
            self.set_entry_point(entry_point)
            TRACER.log("Set entrypoint to console_script {!r} in {!r}".format(
                entry_point, dist))
            return

        # Check if 'script' is an ordinary script.
        dist_script = get_script_from_distributions(script, distributions)
        if dist_script:
            if self._pex_info.entry_point:
                raise self.InvalidExecutableSpecification(
                    "Cannot set both entry point and script of PEX!")
            self._pex_info.script = script
            TRACER.log("Set entrypoint to script {!r} in {!r}".format(
                script, dist_script.dist))
            return

        raise self.InvalidExecutableSpecification(
            "Could not find script {!r} in any distribution {} within PEX!".
            format(script, ", ".join(str(d) for d in distributions)))
예제 #4
0
  def test_output_multiple_targets_one_source_overlapping(self):
    #   target1                target2
    #  source="some/file.py"  source="some/file.py"
    #   /                      /
    #  dep                   dep
    dep_address = FileDepsTest.make_build_target_address("dep/target")
    dep_target = self.mock_hydrated_target(dep_address, {"dep/file.py": "", }, ())
  
    target_address1 = FileDepsTest.make_build_target_address("some/target")
    hydrated_target1 = self.mock_hydrated_target(
      target_address1,
      {"some/file.py": "", },
      (dep_target,)
    )
    
    target_address2 = FileDepsTest.make_build_target_address("some/target")
    hydrated_target2 = self.mock_hydrated_target(
      target_address2,
      {"some/file.py": "", },
      (dep_target,)
    )
    
    transitive_targets = TransitiveHydratedTargets(
      (hydrated_target1, hydrated_target2),
      OrderedSet([hydrated_target1, hydrated_target2, dep_target])
    )

    self.filedeps_rule_test(
      transitive_targets,
      dedent(
        '''\
        some/target/BUILD
        some/file.py
        dep/target/BUILD
        dep/file.py
        ''')
    )
예제 #5
0
    def _identify_interpreters(
            cls,
            filter,  # type: PathFilter
            error_handler=None,  # type: Optional[ErrorHandler]
            paths=None,  # type: Optional[Iterable[str]]
    ):
        # type: (...) -> Union[Iterator[PythonInterpreter], Iterator[InterpreterOrJobError]]
        def iter_candidates():
            # type: () -> Iterator[str]
            for path in cls._paths(paths=paths):
                for fn in cls._expand_path(path):
                    if filter(fn):
                        binary = cls._resolve_pyenv_shim(fn)
                        if binary:
                            yield binary

        results = execute_parallel(
            inputs=OrderedSet(iter_candidates()),
            spawn_func=cls._spawn_from_binary,
            error_handler=error_handler,
        )
        return cast(
            "Union[Iterator[PythonInterpreter], Iterator[InterpreterOrJobError]]",
            results)
예제 #6
0
파일: resolver.py 프로젝트: tdyas/pex
    def install_distributions(self,
                              ignore_errors=False,
                              workspace=None,
                              max_parallel_jobs=None):
        if not any((self._build_requests, self._install_requests)):
            # Nothing to build or install.
            return []

        cache = self._cache or workspace or safe_mkdtemp()

        built_wheels_dir = os.path.join(cache, 'built_wheels')
        spawn_wheel_build = functools.partial(self._spawn_wheel_build,
                                              built_wheels_dir)

        installed_wheels_dir = os.path.join(cache, PexInfo.INSTALL_CACHE)
        spawn_install = functools.partial(self._spawn_install,
                                          installed_wheels_dir)

        to_install = self._install_requests[:]
        to_calculate_requirements_for = []

        # 1. Build local projects and sdists.
        if self._build_requests:
            with TRACER.timed('Building distributions for:'
                              '\n  {}'.format('\n  '.join(
                                  map(str, self._build_requests)))):

                build_requests, install_requests = self._categorize_build_requests(
                    build_requests=self._build_requests,
                    dist_root=built_wheels_dir)
                to_install.extend(install_requests)

                for build_result in execute_parallel(
                        inputs=build_requests,
                        spawn_func=spawn_wheel_build,
                        raise_type=Untranslateable,
                        max_jobs=max_parallel_jobs):
                    to_install.extend(build_result.finalize_build())

        # 2. Install wheels in individual chroots.

        # Dedup by wheel name; e.g.: only install universal wheels once even though they'll get
        # downloaded / built for each interpreter or platform.
        install_requests_by_wheel_file = OrderedDict()
        for install_request in to_install:
            install_requests = install_requests_by_wheel_file.setdefault(
                install_request.wheel_file, [])
            install_requests.append(install_request)

        representative_install_requests = [
            requests[0]
            for requests in install_requests_by_wheel_file.values()
        ]

        def add_requirements_requests(install_result):
            install_requests = install_requests_by_wheel_file[
                install_result.request.wheel_file]
            to_calculate_requirements_for.extend(
                install_result.finalize_install(install_requests))

        with TRACER.timed('Installing:'
                          '\n  {}'.format('\n  '.join(
                              map(str, representative_install_requests)))):

            install_requests, install_results = self._categorize_install_requests(
                install_requests=representative_install_requests,
                installed_wheels_dir=installed_wheels_dir)
            for install_result in install_results:
                add_requirements_requests(install_result)

            for install_result in execute_parallel(inputs=install_requests,
                                                   spawn_func=spawn_install,
                                                   raise_type=Untranslateable,
                                                   max_jobs=max_parallel_jobs):
                add_requirements_requests(install_result)

        # 3. Calculate the final installed requirements.
        with TRACER.timed('Calculating installed requirements for:'
                          '\n  {}'.format('\n  '.join(
                              map(str, to_calculate_requirements_for)))):
            distribution_requirements = DistributionRequirements.merged(
                execute_parallel(inputs=to_calculate_requirements_for,
                                 spawn_func=DistributionRequirements.Request.
                                 spawn_calculation,
                                 raise_type=Untranslateable,
                                 max_jobs=max_parallel_jobs))

        installed_distributions = OrderedSet()
        for requirements_request in to_calculate_requirements_for:
            for distribution in requirements_request.distributions:
                installed_distributions.add(
                    InstalledDistribution(
                        target=requirements_request.target,
                        requirement=distribution_requirements.to_requirement(
                            distribution),
                        distribution=distribution))

        if not ignore_errors:
            self._check_install(installed_distributions)
        return installed_distributions
예제 #7
0
 def empty(cls):
     return cls(None, OrderedSet(), None, False)
예제 #8
0
 def map_packages(resolved_packages):
     packages = OrderedSet(
         built_packages.get(p, p) for p in resolved_packages.packages)
     return _ResolvedPackages(resolved_packages.resolvable, packages,
                              resolved_packages.parent,
                              resolved_packages.constraint_only)
예제 #9
0
 def merge(self, resolvable, packages, parent=None):
     """Add a resolvable and its resolved packages."""
     self.__tuples.append(
         _ResolvedPackages(resolvable, OrderedSet(packages), parent,
                           resolvable.is_constraint))
     self._check()
예제 #10
0
    def resolve(self, reqs):
        # type: (Iterable[Requirement]) -> Iterable[Distribution]

        self._update_candidate_distributions(self._load_internal_cache())

        unresolved_reqs = OrderedDict(
        )  # type: OrderedDict[Requirement, OrderedSet]

        def record_unresolved(dist_not_found):
            # type: (_DistributionNotFound) -> None
            TRACER.log("Failed to resolve a requirement: {}".format(
                dist_not_found.requirement))
            requirers = unresolved_reqs.get(dist_not_found.requirement)
            if requirers is None:
                requirers = OrderedSet()
                unresolved_reqs[dist_not_found.requirement] = requirers
            if dist_not_found.required_by:
                requirers.add(dist_not_found.required_by)

        resolved_dists_by_key = OrderedDict(
        )  # type: OrderedDict[_RequirementKey, Distribution]
        for qualified_req_or_not_found in self._root_requirements_iter(reqs):
            if isinstance(qualified_req_or_not_found, _DistributionNotFound):
                record_unresolved(qualified_req_or_not_found)
                continue

            with TRACER.timed("Resolving {}".format(
                    qualified_req_or_not_found.requirement),
                              V=2):
                for not_found in self._resolve_requirement(
                        requirement=qualified_req_or_not_found.requirement,
                        required=qualified_req_or_not_found.required,
                        resolved_dists_by_key=resolved_dists_by_key,
                ):
                    record_unresolved(not_found)

        if unresolved_reqs:
            TRACER.log("Unresolved requirements:")
            for req in unresolved_reqs:
                TRACER.log("  - {}".format(req))

            TRACER.log("Distributions contained within this pex:")
            if not self._pex_info.distributions:
                TRACER.log("  None")
            else:
                for dist_name in self._pex_info.distributions:
                    TRACER.log("  - {}".format(dist_name))

            if not self._pex_info.ignore_errors:
                items = []
                for index, (requirement,
                            requirers) in enumerate(unresolved_reqs.items()):
                    rendered_requirers = ""
                    if requirers:
                        rendered_requirers = "\n    Required by:" "\n      {requirers}".format(
                            requirers="\n      ".join(map(str, requirers)))
                    contains = self._available_ranked_dists_by_key[
                        requirement.key]
                    if contains:
                        rendered_contains = (
                            "\n    But this pex only contains:"
                            "\n      {distributions}".format(
                                distributions="\n      ".join(
                                    os.path.basename(
                                        ranked_dist.distribution.location)
                                    for ranked_dist in contains), ))
                    else:
                        rendered_contains = (
                            "\n    But this pex had no {project_name!r} distributions."
                            .format(project_name=requirement.project_name))
                    items.append("{index: 2d}: {requirement}"
                                 "{rendered_requirers}"
                                 "{rendered_contains}".format(
                                     index=index + 1,
                                     requirement=requirement,
                                     rendered_requirers=rendered_requirers,
                                     rendered_contains=rendered_contains,
                                 ))

                raise ResolveError(
                    "Failed to resolve requirements from PEX environment @ {pex}.\n"
                    "Needed {platform} compatible dependencies for:\n"
                    "{items}".format(pex=self._pex,
                                     platform=self._platform,
                                     items="\n".join(items)))

        return OrderedSet(resolved_dists_by_key.values())
예제 #11
0
def build_pex(reqs, options, cache=None):
    interpreters = None  # Default to the current interpreter.

    pex_python_path = options.python_path  # If None, this will result in using $PATH.
    # TODO(#1075): stop looking at PEX_PYTHON_PATH and solely consult the `--python-path` flag.
    if pex_python_path is None and (options.rc_file
                                    or not ENV.PEX_IGNORE_RCFILES):
        rc_variables = Variables(rc=options.rc_file)
        pex_python_path = rc_variables.PEX_PYTHON_PATH

    # NB: options.python and interpreter constraints cannot be used together.
    if options.python:
        with TRACER.timed("Resolving interpreters", V=2):

            def to_python_interpreter(full_path_or_basename):
                if os.path.isfile(full_path_or_basename):
                    return PythonInterpreter.from_binary(full_path_or_basename)
                else:
                    interp = PythonInterpreter.from_env(full_path_or_basename)
                    if interp is None:
                        die("Failed to find interpreter: %s" %
                            full_path_or_basename)
                    return interp

            interpreters = [
                to_python_interpreter(interp) for interp in options.python
            ]
    elif options.interpreter_constraint:
        with TRACER.timed("Resolving interpreters", V=2):
            constraints = options.interpreter_constraint
            validate_constraints(constraints)
            try:
                interpreters = list(
                    iter_compatible_interpreters(
                        path=pex_python_path,
                        interpreter_constraints=constraints))
            except UnsatisfiableInterpreterConstraintsError as e:
                die(
                    e.create_message(
                        "Could not find a compatible interpreter."),
                    CANNOT_SETUP_INTERPRETER,
                )

    platforms = OrderedSet(options.platforms)
    interpreters = interpreters or []
    if options.platforms and options.resolve_local_platforms:
        with TRACER.timed(
                "Searching for local interpreters matching {}".format(
                    ", ".join(map(str, platforms)))):
            candidate_interpreters = OrderedSet(
                iter_compatible_interpreters(path=pex_python_path))
            candidate_interpreters.add(PythonInterpreter.get())
            for candidate_interpreter in candidate_interpreters:
                resolved_platforms = candidate_interpreter.supported_platforms.intersection(
                    platforms)
                if resolved_platforms:
                    for resolved_platform in resolved_platforms:
                        TRACER.log("Resolved {} for platform {}".format(
                            candidate_interpreter, resolved_platform))
                        platforms.remove(resolved_platform)
                    interpreters.append(candidate_interpreter)
        if platforms:
            TRACER.log(
                "Could not resolve a local interpreter for {}, will resolve only binary distributions "
                "for {}.".format(
                    ", ".join(map(str, platforms)),
                    "this platform"
                    if len(platforms) == 1 else "these platforms",
                ))

    interpreter = (PythonInterpreter.latest_release_of_min_compatible_version(
        interpreters) if interpreters else None)

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    pex_builder = PEXBuilder(
        path=safe_mkdtemp(),
        interpreter=interpreter,
        preamble=preamble,
        copy_mode=CopyMode.SYMLINK,
        include_tools=options.include_tools or options.venv,
    )

    if options.resources_directory:
        pex_warnings.warn(
            "The `-R/--resources-directory` option is deprecated. Resources should be added via "
            "`-D/--sources-directory` instead.")

    for directory in OrderedSet(options.sources_directory +
                                options.resources_directory):
        src_dir = os.path.normpath(directory)
        for root, _, files in os.walk(src_dir):
            for f in files:
                src_file_path = os.path.join(root, f)
                dst_path = os.path.relpath(src_file_path, src_dir)
                pex_builder.add_source(src_file_path, dst_path)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.unzip = options.unzip
    pex_info.venv = bool(options.venv)
    pex_info.venv_bin_path = options.venv
    pex_info.venv_copies = options.venv_copies
    pex_info.pex_path = options.pex_path
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.emit_warnings = options.emit_warnings
    pex_info.inherit_path = InheritPath.for_value(options.inherit_path)
    pex_info.pex_root = options.runtime_pex_root
    pex_info.strip_pex_env = options.strip_pex_env

    if options.interpreter_constraint:
        for ic in options.interpreter_constraint:
            pex_builder.add_interpreter_constraint(ic)

    indexes = compute_indexes(options)

    for requirements_pex in options.requirements_pexes:
        pex_builder.add_from_requirements_pex(requirements_pex)

    with TRACER.timed(
            "Resolving distributions ({})".format(reqs +
                                                  options.requirement_files)):
        if options.cache_ttl:
            pex_warnings.warn(
                "The --cache-ttl option is deprecated and no longer has any effect."
            )
        if options.headers:
            pex_warnings.warn(
                "The --header option is deprecated and no longer has any effect."
            )

        network_configuration = NetworkConfiguration(
            retries=options.retries,
            timeout=options.timeout,
            proxy=options.proxy,
            cert=options.cert,
            client_cert=options.client_cert,
        )

        try:
            if options.pex_repository:
                with TRACER.timed("Resolving requirements from PEX {}.".format(
                        options.pex_repository)):
                    resolveds = resolve_from_pex(
                        pex=options.pex_repository,
                        requirements=reqs,
                        requirement_files=options.requirement_files,
                        constraint_files=options.constraint_files,
                        network_configuration=network_configuration,
                        transitive=options.transitive,
                        interpreters=interpreters,
                        platforms=list(platforms),
                        manylinux=options.manylinux,
                        ignore_errors=options.ignore_errors,
                    )
            else:
                with TRACER.timed("Resolving requirements."):
                    resolveds = resolve_multi(
                        requirements=reqs,
                        requirement_files=options.requirement_files,
                        constraint_files=options.constraint_files,
                        allow_prereleases=options.allow_prereleases,
                        transitive=options.transitive,
                        interpreters=interpreters,
                        platforms=list(platforms),
                        indexes=indexes,
                        find_links=options.find_links,
                        resolver_version=ResolverVersion.for_value(
                            options.resolver_version),
                        network_configuration=network_configuration,
                        cache=cache,
                        build=options.build,
                        use_wheel=options.use_wheel,
                        compile=options.compile,
                        manylinux=options.manylinux,
                        max_parallel_jobs=options.max_parallel_jobs,
                        ignore_errors=options.ignore_errors,
                    )

            for resolved_dist in resolveds:
                pex_builder.add_distribution(resolved_dist.distribution)
                if resolved_dist.direct_requirement:
                    pex_builder.add_requirement(
                        resolved_dist.direct_requirement)
        except Unsatisfiable as e:
            die(str(e))

    if options.entry_point and options.script:
        die("Must specify at most one entry point or script.", INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
예제 #12
0
    def resolve_distributions(self, ignore_errors=False):
        # This method has four stages:
        # 1. Resolve sdists and wheels.
        # 2. Build local projects and sdists.
        # 3. Install wheels in individual chroots.
        # 4. Calculate the final resolved requirements.
        #
        # You'd think we might be able to just pip install all the requirements, but pexes can be
        # multi-platform / multi-interpreter, in which case only a subset of distributions resolved into
        # the PEX should be activated for the runtime interpreter. Sometimes there are platform specific
        # wheels and sometimes python version specific dists (backports being the common case). As such,
        # we need to be able to add each resolved distribution to the `sys.path` individually
        # (`PEXEnvironment` handles this selective activation at runtime). Since pip install only
        # accepts a single location to install all resolved dists, that won't work.
        #
        # This means we need to separately resolve all distributions, then install each in their own
        # chroot. To do this we use `pip download` for the resolve and download of all needed
        # distributions and then `pip install` to install each distribution in its own chroot.
        #
        # As a complicating factor, the runtime activation scheme relies on PEP 425 tags; i.e.: wheel
        # names. Some requirements are only available or applicable in source form - either via sdist,
        # VCS URL or local projects. As such we need to insert a `pip wheel` step to generate wheels for
        # all requirements resolved in source form via `pip download` / inspection of requirements to
        # discover those that are local directories (local setup.py or pyproject.toml python projects).
        #
        # Finally, we must calculate the pinned requirement corresponding to each distribution we
        # resolved along with any environment markers that control which runtime environments the
        # requirement should be activated in.

        if not self._requirements and not self._requirement_files:
            # Nothing to resolve.
            return []

        workspace = safe_mkdtemp()
        cache = self._cache or workspace

        resolved_dists_dir = os.path.join(workspace, 'resolved_dists')
        spawn_resolve = functools.partial(self._spawn_resolve,
                                          resolved_dists_dir)
        to_resolve = self._targets

        built_wheels_dir = os.path.join(cache, 'built_wheels')
        spawn_wheel_build = functools.partial(self._spawn_wheel_build,
                                              built_wheels_dir)
        to_build = list(self._iter_local_projects())

        installed_wheels_dir = os.path.join(cache, PexInfo.INSTALL_CACHE)
        spawn_install = functools.partial(self._spawn_install,
                                          installed_wheels_dir)
        to_install = []

        to_calculate_requirements_for = []

        # 1. Resolve sdists and wheels.
        with TRACER.timed('Resolving for:\n  '.format('\n  '.join(
                map(str, to_resolve)))):
            for resolve_result in self._run_parallel(inputs=to_resolve,
                                                     spawn_func=spawn_resolve,
                                                     raise_type=Unsatisfiable):
                to_build.extend(resolve_result.build_requests())
                to_install.extend(resolve_result.install_requests())

        if not any((to_build, to_install)):
            # Nothing to build or install.
            return []

        # 2. Build local projects and sdists.
        if to_build:
            with TRACER.timed('Building distributions for:\n  {}'.format(
                    '\n  '.join(map(str, to_build)))):

                build_requests, install_requests = self._categorize_build_requests(
                    build_requests=to_build, dist_root=built_wheels_dir)
                to_install.extend(install_requests)

                for build_result in self._run_parallel(
                        inputs=build_requests,
                        spawn_func=spawn_wheel_build,
                        raise_type=Untranslateable):
                    to_install.extend(build_result.finalize_build())

        # 3. Install wheels in individual chroots.

        # Dedup by wheel name; e.g.: only install universal wheels once even though they'll get
        # downloaded / built for each interpreter or platform.
        install_requests_by_wheel_file = OrderedDict()
        for install_request in to_install:
            install_requests = install_requests_by_wheel_file.setdefault(
                install_request.wheel_file, [])
            install_requests.append(install_request)

        representative_install_requests = [
            requests[0]
            for requests in install_requests_by_wheel_file.values()
        ]

        def add_requirements_requests(install_result):
            install_requests = install_requests_by_wheel_file[
                install_result.request.wheel_file]
            to_calculate_requirements_for.extend(
                install_result.finalize_install(install_requests))

        with TRACER.timed('Installing:\n  {}'.format('\n  '.join(
                map(str, representative_install_requests)))):

            install_requests, install_results = self._categorize_install_requests(
                install_requests=representative_install_requests,
                installed_wheels_dir=installed_wheels_dir)
            for install_result in install_results:
                add_requirements_requests(install_result)

            for install_result in self._run_parallel(
                    inputs=install_requests,
                    spawn_func=spawn_install,
                    raise_type=Untranslateable):
                add_requirements_requests(install_result)

        # 4. Calculate the final resolved requirements.
        with TRACER.timed(
                'Calculating resolved requirements for:\n  {}'.format(
                    '\n  '.join(map(str, to_calculate_requirements_for)))):
            distribution_requirements = DistributionRequirements.merged(
                self._run_parallel(inputs=to_calculate_requirements_for,
                                   spawn_func=DistributionRequirements.Request.
                                   spawn_calculation,
                                   raise_type=Untranslateable))

        resolved_distributions = OrderedSet()
        for requirements_request in to_calculate_requirements_for:
            for distribution in requirements_request.distributions:
                resolved_distributions.add(
                    ResolvedDistribution(
                        target=requirements_request.target,
                        requirement=distribution_requirements.to_requirement(
                            distribution),
                        distribution=distribution))

        if not ignore_errors and self._transitive:
            self._check_resolve(resolved_distributions)
        return resolved_distributions
예제 #13
0
    def _resolve(self, working_set, reqs):
        reqs_by_key = OrderedDict((req.key, req) for req in reqs)
        unresolved_reqs = OrderedDict()
        resolveds = OrderedSet()

        environment = self._target_interpreter_env.copy()
        environment['extra'] = list(
            set(itertools.chain(*(req.extras for req in reqs))))

        # Resolve them one at a time so that we can figure out which ones we need to elide should
        # there be an interpreter incompatibility.
        for req in reqs_by_key.values():
            if req.marker and not req.marker.evaluate(environment=environment):
                TRACER.log(
                    'Skipping activation of `%s` due to environment marker de-selection'
                    % req)
                continue
            with TRACER.timed('Resolving %s' % req, V=2):
                try:
                    resolveds.update(working_set.resolve([req], env=self))
                except DistributionNotFound as e:
                    TRACER.log('Failed to resolve a requirement: %s' % e)
                    requirers = unresolved_reqs.setdefault(e.req, OrderedSet())
                    if e.requirers:
                        requirers.update(reqs_by_key[requirer]
                                         for requirer in e.requirers)

        if unresolved_reqs:
            TRACER.log('Unresolved requirements:')
            for req in unresolved_reqs:
                TRACER.log('  - %s' % req)

            TRACER.log('Distributions contained within this pex:')
            distributions_by_key = defaultdict(list)
            if not self._pex_info.distributions:
                TRACER.log('  None')
            else:
                for dist_name, dist_digest in self._pex_info.distributions.items(
                ):
                    TRACER.log('  - %s' % dist_name)
                    distribution = DistributionHelper.distribution_from_path(
                        path=os.path.join(self._pex_info.install_cache,
                                          dist_digest, dist_name))
                    distributions_by_key[
                        distribution.as_requirement().key].append(distribution)

            if not self._pex_info.ignore_errors:
                items = []
                for index, (requirement,
                            requirers) in enumerate(unresolved_reqs.items()):
                    rendered_requirers = ''
                    if requirers:
                        rendered_requirers = (
                            '\n    Required by:'
                            '\n      {requirers}').format(
                                requirers='\n      '.join(map(str, requirers)))

                    items.append('{index: 2d}: {requirement}'
                                 '{rendered_requirers}'
                                 '\n    But this pex only contains:'
                                 '\n      {distributions}'.format(
                                     index=index + 1,
                                     requirement=requirement,
                                     rendered_requirers=rendered_requirers,
                                     distributions='\n      '.join(
                                         os.path.basename(d.location)
                                         for d in distributions_by_key[
                                             requirement.key])))

                die('Failed to execute PEX file. Needed {platform} compatible dependencies for:\n{items}'
                    .format(platform=Platform.of_interpreter(
                        self._interpreter),
                            items='\n'.join(items)))

        return resolveds
예제 #14
0
 def _paths(paths=None):
     # type: (Optional[Iterable[str]]) -> Iterable[str]
     # NB: If `paths=[]`, we will not read $PATH.
     return OrderedSet(paths if paths is not None else os.
                       getenv("PATH", "").split(os.pathsep))
예제 #15
0
    def _resolve(self, working_set, reqs):
        environment = self._target_interpreter_env.copy()
        environment["extra"] = list(set(itertools.chain(*(req.extras for req in reqs))))

        reqs_by_key = OrderedDict()
        for req in reqs:
            if req.marker and not req.marker.evaluate(environment=environment):
                TRACER.log(
                    "Skipping activation of `%s` due to environment marker de-selection" % req
                )
                continue
            reqs_by_key.setdefault(req.key, []).append(req)

        unresolved_reqs = OrderedDict()
        resolveds = OrderedSet()

        # Resolve them one at a time so that we can figure out which ones we need to elide should
        # there be an interpreter incompatibility.
        for key, reqs in reqs_by_key.items():
            with TRACER.timed("Resolving {} from {}".format(key, reqs), V=2):
                # N.B.: We resolve the bare requirement with no version specifiers since the resolve process
                # used to build this pex already did so. There may be multiple distributions satisfying any
                # particular key (e.g.: a Python 2 specific version and a Python 3 specific version for a
                # multi-python PEX) and we want the working set to pick the most appropriate one.
                req = Requirement.parse(key)
                try:
                    resolveds.update(working_set.resolve([req], env=self))
                except DistributionNotFound as e:
                    TRACER.log("Failed to resolve a requirement: %s" % e)
                    requirers = unresolved_reqs.setdefault(e.req, OrderedSet())
                    if e.requirers:
                        for requirer in e.requirers:
                            requirers.update(reqs_by_key[requirer])

        if unresolved_reqs:
            TRACER.log("Unresolved requirements:")
            for req in unresolved_reqs:
                TRACER.log("  - %s" % req)

            TRACER.log("Distributions contained within this pex:")
            distributions_by_key = defaultdict(list)
            if not self._pex_info.distributions:
                TRACER.log("  None")
            else:
                for dist_name, dist_digest in self._pex_info.distributions.items():
                    TRACER.log("  - %s" % dist_name)
                    distribution = DistributionHelper.distribution_from_path(
                        path=os.path.join(self._pex_info.install_cache, dist_digest, dist_name)
                    )
                    distributions_by_key[distribution.as_requirement().key].append(distribution)

            if not self._pex_info.ignore_errors:
                items = []
                for index, (requirement, requirers) in enumerate(unresolved_reqs.items()):
                    rendered_requirers = ""
                    if requirers:
                        rendered_requirers = ("\n    Required by:" "\n      {requirers}").format(
                            requirers="\n      ".join(map(str, requirers))
                        )

                    items.append(
                        "{index: 2d}: {requirement}"
                        "{rendered_requirers}"
                        "\n    But this pex only contains:"
                        "\n      {distributions}".format(
                            index=index + 1,
                            requirement=requirement,
                            rendered_requirers=rendered_requirers,
                            distributions="\n      ".join(
                                os.path.basename(d.location)
                                for d in distributions_by_key[requirement.key]
                            ),
                        )
                    )

                die(
                    "Failed to execute PEX file. Needed {platform} compatible dependencies for:\n{items}".format(
                        platform=self._interpreter.platform, items="\n".join(items)
                    )
                )

        return resolveds
예제 #16
0
def compute_indexes(options):
    # type: (Namespace) -> List[str]

    indexes = ([_PYPI] if options.pypi else []) + (options.indexes or [])
    return list(OrderedSet(indexes))
예제 #17
0
파일: pex.py 프로젝트: cosmicexplorer/pex
    def minimum_sys_path(cls, site_libs, inherit_path):
        # type: (Iterable[str], InheritPath.Value) -> Tuple[List[str], Mapping[str, Any]]
        scrub_paths = OrderedSet()  # type: OrderedSet[str]
        site_distributions = OrderedSet()  # type: OrderedSet[str]
        user_site_distributions = OrderedSet()  # type: OrderedSet[str]

        def all_distribution_paths(path):
            # type: (Optional[str]) -> Iterable[str]
            if path is None:
                return ()
            locations = set(dist.location for dist in find_distributions(path))
            return {path} | locations | set(os.path.realpath(path) for path in locations)

        for path_element in sys.path:
            if cls._tainted_path(path_element, site_libs):
                TRACER.log("Tainted path element: %s" % path_element)
                site_distributions.update(all_distribution_paths(path_element))
            else:
                TRACER.log("Not a tainted path element: %s" % path_element, V=2)

        user_site_distributions.update(all_distribution_paths(USER_SITE))

        if inherit_path == InheritPath.FALSE:
            scrub_paths = OrderedSet(site_distributions)
            scrub_paths.update(user_site_distributions)
            for path in user_site_distributions:
                TRACER.log("Scrubbing from user site: %s" % path)
            for path in site_distributions:
                TRACER.log("Scrubbing from site-packages: %s" % path)

        scrubbed_sys_path = list(OrderedSet(sys.path) - scrub_paths)

        pythonpath = cls.unstash_pythonpath()
        if pythonpath is not None:
            original_pythonpath = pythonpath.split(os.pathsep)
            user_pythonpath = list(OrderedSet(original_pythonpath) - set(sys.path))
            if original_pythonpath == user_pythonpath:
                TRACER.log("Unstashed PYTHONPATH of %s" % pythonpath, V=2)
            else:
                TRACER.log(
                    "Extracted user PYTHONPATH of %s from unstashed PYTHONPATH of %s"
                    % (os.pathsep.join(user_pythonpath), pythonpath),
                    V=2,
                )

            if inherit_path == InheritPath.FALSE:
                for path in user_pythonpath:
                    TRACER.log("Scrubbing user PYTHONPATH element: %s" % path)
            elif inherit_path == InheritPath.PREFER:
                TRACER.log("Prepending user PYTHONPATH: %s" % os.pathsep.join(user_pythonpath))
                scrubbed_sys_path = user_pythonpath + scrubbed_sys_path
            elif inherit_path == InheritPath.FALLBACK:
                TRACER.log("Appending user PYTHONPATH: %s" % os.pathsep.join(user_pythonpath))
                scrubbed_sys_path = scrubbed_sys_path + user_pythonpath

        scrub_from_importer_cache = filter(
            lambda key: any(key.startswith(path) for path in scrub_paths),
            sys.path_importer_cache.keys(),
        )
        scrubbed_importer_cache = dict(
            (key, value)
            for (key, value) in sys.path_importer_cache.items()
            if key not in scrub_from_importer_cache
        )

        for importer_cache_entry in scrub_from_importer_cache:
            TRACER.log("Scrubbing from path_importer_cache: %s" % importer_cache_entry, V=2)

        return scrubbed_sys_path, scrubbed_importer_cache
예제 #18
0
    def minimum_sys_path(cls, site_libs, inherit_path):
        scrub_paths = OrderedSet()
        site_distributions = OrderedSet()
        user_site_distributions = OrderedSet()

        def all_distribution_paths(path):
            locations = set(dist.location for dist in find_distributions(path))
            return set([path]) | locations | set(
                os.path.realpath(path) for path in locations)

        for path_element in sys.path:
            if cls._tainted_path(path_element, site_libs):
                TRACER.log('Tainted path element: %s' % path_element)
                site_distributions.update(all_distribution_paths(path_element))
            else:
                TRACER.log('Not a tainted path element: %s' % path_element,
                           V=2)

        user_site_distributions.update(all_distribution_paths(USER_SITE))

        if inherit_path == 'false':
            scrub_paths = site_distributions | user_site_distributions
            for path in user_site_distributions:
                TRACER.log('Scrubbing from user site: %s' % path)
            for path in site_distributions:
                TRACER.log('Scrubbing from site-packages: %s' % path)

        scrubbed_sys_path = list(OrderedSet(sys.path) - scrub_paths)

        pythonpath = cls.unstash_pythonpath()
        if pythonpath is not None:
            original_pythonpath = pythonpath.split(os.pathsep)
            user_pythonpath = list(
                OrderedSet(original_pythonpath) - set(sys.path))
            if original_pythonpath == user_pythonpath:
                TRACER.log('Unstashed PYTHONPATH of %s' % pythonpath, V=2)
            else:
                TRACER.log(
                    'Extracted user PYTHONPATH of %s from unstashed PYTHONPATH of %s'
                    % (os.pathsep.join(user_pythonpath), pythonpath),
                    V=2)

            if inherit_path == 'false':
                for path in user_pythonpath:
                    TRACER.log('Scrubbing user PYTHONPATH element: %s' % path)
            elif inherit_path == 'prefer':
                TRACER.log('Prepending user PYTHONPATH: %s' %
                           os.pathsep.join(user_pythonpath))
                scrubbed_sys_path = user_pythonpath + scrubbed_sys_path
            elif inherit_path == 'fallback':
                TRACER.log('Appending user PYTHONPATH: %s' %
                           os.pathsep.join(user_pythonpath))
                scrubbed_sys_path = scrubbed_sys_path + user_pythonpath

        scrub_from_importer_cache = filter(
            lambda key: any(key.startswith(path) for path in scrub_paths),
            sys.path_importer_cache.keys())
        scrubbed_importer_cache = dict(
            (key, value) for (key, value) in sys.path_importer_cache.items()
            if key not in scrub_from_importer_cache)

        for importer_cache_entry in scrub_from_importer_cache:
            TRACER.log('Scrubbing from path_importer_cache: %s' %
                       importer_cache_entry,
                       V=2)

        return scrubbed_sys_path, scrubbed_importer_cache
예제 #19
0
def build_pex(reqs, options, cache=None):
    interpreters = None  # Default to the current interpreter.

    pex_python_path = None  # Defaults to $PATH
    if options.rc_file or not ENV.PEX_IGNORE_RCFILES:
        rc_variables = Variables(rc=options.rc_file)
        pex_python_path = rc_variables.PEX_PYTHON_PATH

    # NB: options.python and interpreter constraints cannot be used together.
    if options.python:
        with TRACER.timed("Resolving interpreters", V=2):

            def to_python_interpreter(full_path_or_basename):
                if os.path.isfile(full_path_or_basename):
                    return PythonInterpreter.from_binary(full_path_or_basename)
                else:
                    interpreter = PythonInterpreter.from_env(full_path_or_basename)
                    if interpreter is None:
                        die("Failed to find interpreter: %s" % full_path_or_basename)
                    return interpreter

            interpreters = [to_python_interpreter(interp) for interp in options.python]
    elif options.interpreter_constraint:
        with TRACER.timed("Resolving interpreters", V=2):
            constraints = options.interpreter_constraint
            validate_constraints(constraints)
            try:
                interpreters = list(iter_compatible_interpreters(pex_python_path, constraints))
            except UnsatisfiableInterpreterConstraintsError as e:
                die(
                    e.create_message("Could not find a compatible interpreter."),
                    CANNOT_SETUP_INTERPRETER,
                )

    platforms = OrderedSet(options.platforms)
    interpreters = interpreters or []
    if options.platforms and options.resolve_local_platforms:
        with TRACER.timed(
            "Searching for local interpreters matching {}".format(", ".join(map(str, platforms)))
        ):
            candidate_interpreters = OrderedSet(iter_compatible_interpreters(pex_python_path))
            candidate_interpreters.add(PythonInterpreter.get())
            for candidate_interpreter in candidate_interpreters:
                resolved_platforms = candidate_interpreter.supported_platforms.intersection(
                    platforms
                )
                if resolved_platforms:
                    for resolved_platform in resolved_platforms:
                        TRACER.log(
                            "Resolved {} for platform {}".format(
                                candidate_interpreter, resolved_platform
                            )
                        )
                        platforms.remove(resolved_platform)
                    interpreters.append(candidate_interpreter)
        if platforms:
            TRACER.log(
                "Could not resolve a local interpreter for {}, will resolve only binary distributions "
                "for {}.".format(
                    ", ".join(map(str, platforms)),
                    "this platform" if len(platforms) == 1 else "these platforms",
                )
            )

    interpreter = min(interpreters) if interpreters else None
    if options.use_first_matching_interpreter and interpreters:
        if len(interpreters) > 1:
            unused_interpreters = set(interpreters) - {interpreter}
            TRACER.log(
                "Multiple interpreters resolved, but only using {} because "
                "`--use-first-matching-interpreter` was used. These interpreters were matched but "
                "will not be used: {}".format(
                    interpreter.binary,
                    ", ".join(interpreter.binary for interpreter in sorted(unused_interpreters)),
                )
            )
        interpreters = [interpreter]

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble)

    def walk_and_do(fn, src_dir):
        src_dir = os.path.normpath(src_dir)
        for root, dirs, files in os.walk(src_dir):
            for f in files:
                src_file_path = os.path.join(root, f)
                dst_path = os.path.relpath(src_file_path, src_dir)
                fn(src_file_path, dst_path)

    for directory in options.sources_directory:
        walk_and_do(pex_builder.add_source, directory)

    for directory in options.resources_directory:
        walk_and_do(pex_builder.add_resource, directory)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.unzip = options.unzip
    pex_info.pex_path = options.pex_path
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.emit_warnings = options.emit_warnings
    pex_info.inherit_path = options.inherit_path
    pex_info.pex_root = options.runtime_pex_root
    pex_info.strip_pex_env = options.strip_pex_env

    # If we're only building the PEX for the first of many interpreters due to
    # `--use-first-matching-interpreter` selection, we do not want to enable those same interpreter
    # constraints at runtime, where they could lead to a different interpreter being selected
    # leading to a failure to execute the PEX. Instead we rely on the shebang set by that single
    # interpreter to pick out a similar interpreter at runtime (for a CPython interpreter, the
    # shebang will be `#!/usr/bin/env pythonX.Y` which should generally be enough to select a
    # matching interpreter. To be clear though, there are many corners this will not work for
    # including mismatching abi (python2.7m vs python2.7mu) when the PEX contains platform specific
    # wheels, etc.
    if options.interpreter_constraint and not options.use_first_matching_interpreter:
        for ic in options.interpreter_constraint:
            pex_builder.add_interpreter_constraint(ic)

    # NB: `None` means use the default (pypi) index, `[]` means use no indexes.
    indexes = None
    if options.indexes != [_PYPI] and options.indexes is not None:
        indexes = [str(index) for index in options.indexes]

    for requirements_pex in options.requirements_pexes:
        pex_builder.add_from_requirements_pex(requirements_pex)

    with TRACER.timed("Resolving distributions ({})".format(reqs + options.requirement_files)):
        network_configuration = NetworkConfiguration.create(
            cache_ttl=options.cache_ttl,
            retries=options.retries,
            timeout=options.timeout,
            headers=options.headers,
            proxy=options.proxy,
            cert=options.cert,
            client_cert=options.client_cert,
        )

        try:
            resolveds = resolve_multi(
                requirements=reqs,
                requirement_files=options.requirement_files,
                constraint_files=options.constraint_files,
                allow_prereleases=options.allow_prereleases,
                transitive=options.transitive,
                interpreters=interpreters,
                platforms=list(platforms),
                indexes=indexes,
                find_links=options.find_links,
                network_configuration=network_configuration,
                cache=cache,
                build=options.build,
                use_wheel=options.use_wheel,
                compile=options.compile,
                manylinux=options.manylinux,
                max_parallel_jobs=options.max_parallel_jobs,
                ignore_errors=options.ignore_errors,
            )

            for resolved_dist in resolveds:
                log(
                    "  %s -> %s" % (resolved_dist.requirement, resolved_dist.distribution),
                    V=options.verbosity,
                )
                pex_builder.add_distribution(resolved_dist.distribution)
                pex_builder.add_requirement(resolved_dist.requirement)
        except Unsatisfiable as e:
            die(e)

    if options.entry_point and options.script:
        die("Must specify at most one entry point or script.", INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder