Esempio n. 1
0
def test_requirement_constraints(rule_runner: RuleRunner) -> None:
    direct_deps = ["requests>=1.0.0,<=2.23.0"]

    def assert_direct_requirements(pex_info):
        assert set(Requirement.parse(r)
                   for r in pex_info["requirements"]) == set(
                       Requirement.parse(d) for d in direct_deps)

    # Unconstrained, we should always pick the top of the range (requests 2.23.0) since the top of
    # the range is a transitive closure over universal wheels.
    direct_pex_info = create_pex_and_get_pex_info(
        rule_runner, requirements=PexRequirements(direct_deps))
    assert_direct_requirements(direct_pex_info)
    assert {
        "certifi-2020.12.5-py2.py3-none-any.whl",
        "chardet-3.0.4-py2.py3-none-any.whl",
        "idna-2.10-py2.py3-none-any.whl",
        "requests-2.23.0-py2.py3-none-any.whl",
        "urllib3-1.25.11-py2.py3-none-any.whl",
    } == set(direct_pex_info["distributions"].keys())

    constraints = ["requests==2.0.0"]
    rule_runner.create_file("constraints.txt", "\n".join(constraints))
    constrained_pex_info = create_pex_and_get_pex_info(
        rule_runner,
        requirements=PexRequirements(direct_deps),
        additional_pants_args=(
            "--python-setup-requirement-constraints=constraints.txt", ),
    )
    assert_direct_requirements(constrained_pex_info)
    assert {"requests-2.0.0-py2.py3-none-any.whl"
            } == set(constrained_pex_info["distributions"].keys())
Esempio n. 2
0
def create_pex_and_get_pex_info(
    rule_runner: RuleRunner,
    *,
    requirements=PexRequirements(),
    entry_point=None,
    interpreter_constraints=PexInterpreterConstraints(),
    platforms=PexPlatforms(),
    sources: Optional[Digest] = None,
    additional_pants_args: Tuple[str, ...] = (),
    additional_pex_args: Tuple[str, ...] = (),
    internal_only: bool = True,
) -> Dict:
    return cast(
        Dict,
        create_pex_and_get_all_data(
            rule_runner,
            requirements=requirements,
            entry_point=entry_point,
            interpreter_constraints=interpreter_constraints,
            platforms=platforms,
            sources=sources,
            additional_pants_args=additional_pants_args,
            additional_pex_args=additional_pex_args,
            internal_only=internal_only,
        )["info"],
    )
Esempio n. 3
0
async def find_build_system(request: BuildSystemRequest, setuptools: Setuptools) -> BuildSystem:
    digest_contents = await Get(
        DigestContents,
        DigestSubset(
            request.digest,
            PathGlobs(
                globs=[os.path.join(request.working_directory, "pyproject.toml")],
                glob_match_error_behavior=GlobMatchErrorBehavior.ignore,
            ),
        ),
    )
    ret = None
    if digest_contents:
        file_content = next(iter(digest_contents))
        settings: Mapping[str, Any] = toml.loads(file_content.content.decode())
        build_system = settings.get("build-system")
        if build_system is not None:
            build_backend = build_system.get("build-backend")
            if build_backend is None:
                raise InvalidBuildConfigError(
                    f"No build-backend found in the [build-system] table in {file_content.path}"
                )
            requires = build_system.get("requires")
            if requires is None:
                raise InvalidBuildConfigError(
                    f"No requires found in the [build-system] table in {file_content.path}"
                )
            ret = BuildSystem(PexRequirements(requires), build_backend)
    # Per PEP 517: "If the pyproject.toml file is absent, or the build-backend key is missing,
    #   the source tree is not using this specification, and tools should revert to the legacy
    #   behaviour of running setup.py."
    if ret is None:
        ret = BuildSystem.legacy(setuptools)
    return ret
Esempio n. 4
0
async def get_requirements(
    dep_owner: DependencyOwner,
    union_membership: UnionMembership,
    setup_py_generation: SetupPyGeneration,
) -> ExportedTargetRequirements:
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([dep_owner.exported_target.target.address]),
    )
    ownable_tgts = [
        tgt for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership)
    ]
    owners = await MultiGet(Get(ExportedTarget, OwnedDependency(tgt)) for tgt in ownable_tgts)
    owned_by_us: Set[Target] = set()
    owned_by_others: Set[Target] = set()
    for tgt, owner in zip(ownable_tgts, owners):
        (owned_by_us if owner == dep_owner.exported_target else owned_by_others).add(tgt)

    # Get all 3rdparty deps of our owned deps.
    #
    # Note that we need only consider requirements that are direct dependencies of our owned deps:
    # If T depends on R indirectly, then it must be via some direct deps U1, U2, ... For each such U,
    # if U is in the owned deps then we'll pick up R through U. And if U is not in the owned deps
    # then it's owned by an exported target ET, and so R will be in the requirements for ET, and we
    # will require ET.
    direct_deps_tgts = await MultiGet(
        Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in owned_by_us
    )

    transitive_excludes: FrozenOrderedSet[Target] = FrozenOrderedSet()
    uneval_trans_excl = [
        tgt.get(Dependencies).unevaluated_transitive_excludes for tgt in transitive_targets.closure
    ]
    if uneval_trans_excl:
        nested_trans_excl = await MultiGet(
            Get(Targets, UnparsedAddressInputs, unparsed) for unparsed in uneval_trans_excl
        )
        transitive_excludes = FrozenOrderedSet(
            itertools.chain.from_iterable(excludes for excludes in nested_trans_excl)
        )

    direct_deps_chained = FrozenOrderedSet(itertools.chain.from_iterable(direct_deps_tgts))
    direct_deps_with_excl = direct_deps_chained.difference(transitive_excludes)

    reqs = PexRequirements.create_from_requirement_fields(
        tgt[PythonRequirementsField]
        for tgt in direct_deps_with_excl
        if tgt.has_field(PythonRequirementsField)
    )
    req_strs = list(reqs)

    # Add the requirements on any exported targets on which we depend.
    kwargs_for_exported_targets_we_depend_on = await MultiGet(
        Get(SetupKwargs, OwnedDependency(tgt)) for tgt in owned_by_others
    )
    req_strs.extend(
        f"{kwargs.name}{setup_py_generation.first_party_dependency_version(kwargs.version)}"
        for kwargs in set(kwargs_for_exported_targets_we_depend_on)
    )
    return ExportedTargetRequirements(req_strs)
Esempio n. 5
0
async def run_setup_py(req: RunSetupPyRequest, setuptools: Setuptools) -> RunSetupPyResult:
    """Run a setup.py command on a single exported target."""
    # Note that this pex has no entrypoint. We use it to run our generated setup.py, which
    # in turn imports from and invokes setuptools.
    setuptools_pex = await Get(
        VenvPex,
        PexRequest(
            output_filename="setuptools.pex",
            internal_only=True,
            requirements=PexRequirements(setuptools.all_requirements),
            interpreter_constraints=req.interpreter_constraints,
        ),
    )
    # The setuptools dist dir, created by it under the chroot (not to be confused with
    # pants's own dist dir, at the buildroot).
    dist_dir = "dist/"
    result = await Get(
        ProcessResult,
        VenvPexProcess(
            setuptools_pex,
            argv=("setup.py", *req.args),
            input_digest=req.chroot.digest,
            # setuptools commands that create dists write them to the distdir.
            # TODO: Could there be other useful files to capture?
            output_directories=(dist_dir,),
            description=f"Run setuptools for {req.exported_target.target.address}",
            level=LogLevel.DEBUG,
        ),
    )
    output_digest = await Get(Digest, RemovePrefix(result.output_digest, dist_dir))
    return RunSetupPyResult(output_digest)
Esempio n. 6
0
def test_venv_pex_resolve_info(rule_runner: RuleRunner,
                               pex_type: type[Pex | VenvPex]) -> None:
    constraints = [
        "requests==2.23.0",
        "certifi==2020.12.5",
        "chardet==3.0.4",
        "idna==2.10",
        "urllib3==1.25.11",
    ]
    rule_runner.create_file("constraints.txt", "\n".join(constraints))
    pex = create_pex_and_get_all_data(
        rule_runner,
        pex_type=pex_type,
        requirements=PexRequirements(["requests==2.23.0"],
                                     apply_constraints=True),
        additional_pants_args=(
            "--python-requirement-constraints=constraints.txt", ),
    ).pex
    dists = rule_runner.request(PexResolveInfo, [pex])
    assert dists[0] == PexDistributionInfo("certifi", Version("2020.12.5"),
                                           None, ())
    assert dists[1] == PexDistributionInfo("chardet", Version("3.0.4"), None,
                                           ())
    assert dists[2] == PexDistributionInfo(
        "idna", Version("2.10"),
        SpecifierSet("!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"), ())
    assert dists[3].project_name == "requests"
    assert dists[3].version == Version("2.23.0")
    # requires_dists is parsed from metadata written by the pex tool, and is always
    #   a set of valid pkg_resources.Requirements.
    assert Requirement.parse(
        'PySocks!=1.5.7,>=1.5.6; extra == "socks"') in dists[3].requires_dists
    assert dists[4].project_name == "urllib3"
Esempio n. 7
0
def create_pex_and_get_pex_info(
    rule_runner: RuleRunner,
    *,
    pex_type: type[Pex | VenvPex] = Pex,
    requirements: PexRequirements = PexRequirements(),
    main: MainSpecification | None = None,
    interpreter_constraints:
    PexInterpreterConstraints = PexInterpreterConstraints(),
    platforms: PexPlatforms = PexPlatforms(),
    sources: Digest | None = None,
    additional_pants_args: Tuple[str, ...] = (),
    additional_pex_args: Tuple[str, ...] = (),
    internal_only: bool = True,
) -> Dict:
    return cast(
        Dict,
        create_pex_and_get_all_data(
            rule_runner,
            pex_type=pex_type,
            requirements=requirements,
            main=main,
            interpreter_constraints=interpreter_constraints,
            platforms=platforms,
            sources=sources,
            additional_pants_args=additional_pants_args,
            additional_pex_args=additional_pex_args,
            internal_only=internal_only,
        )["info"],
    )
Esempio n. 8
0
def test_issue_12222(rule_runner: RuleRunner) -> None:
    rule_runner.write_files({
        "constraints.txt":
        "foo==1.0\nbar==1.0",
        "BUILD":
        dedent("""
                python_requirement(name="foo",requirements=["foo"])
                python_requirement(name="bar",requirements=["bar"])
                python_sources(name="lib",sources=[],dependencies=[":foo"])
                """),
    })
    request = PexFromTargetsRequest(
        [Address("", target_name="lib")],
        output_filename="demo.pex",
        internal_only=False,
        platforms=PexPlatforms(["some-platform-x86_64"]),
    )
    rule_runner.set_options([
        "--python-requirement-constraints=constraints.txt",
        "--python-resolve-all-constraints",
    ])
    result = rule_runner.request(PexRequest, [request])

    assert result.requirements == PexRequirements(["foo"],
                                                  apply_constraints=True)
Esempio n. 9
0
async def flake8_lint_partition(
    partition: Flake8Partition, flake8: Flake8, lint_subsystem: LintSubsystem
) -> LintResult:
    flake8_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="flake8.pex",
            internal_only=True,
            requirements=PexRequirements(flake8.all_requirements),
            interpreter_constraints=partition.interpreter_constraints,
            main=flake8.main,
        ),
    )
    config_files_get = Get(ConfigFiles, ConfigFilesRequest, flake8.config_request)
    source_files_get = Get(
        SourceFiles, SourceFilesRequest(field_set.sources for field_set in partition.field_sets)
    )
    flake8_pex, config_files, source_files = await MultiGet(
        flake8_pex_get, config_files_get, source_files_get
    )

    input_digest = await Get(
        Digest, MergeDigests((source_files.snapshot.digest, config_files.snapshot.digest))
    )

    report_file_name = "flake8_report.txt" if lint_subsystem.reports_dir else None

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            flake8_pex,
            argv=generate_argv(source_files, flake8, report_file_name=report_file_name),
            input_digest=input_digest,
            output_files=(report_file_name,) if report_file_name else None,
            description=f"Run Flake8 on {pluralize(len(partition.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )

    report = None
    if report_file_name:
        report_digest = await Get(
            Digest,
            DigestSubset(
                result.output_digest,
                PathGlobs(
                    [report_file_name],
                    glob_match_error_behavior=GlobMatchErrorBehavior.warn,
                    description_of_origin="Flake8 report file",
                ),
            ),
        )
        report = LintReport(report_file_name, report_digest)

    return LintResult.from_fallible_process_result(
        result,
        partition_description=str(sorted(str(c) for c in partition.interpreter_constraints)),
        report=report,
    )
Esempio n. 10
0
async def resolve_plugins(
        request: PluginsRequest,
        global_options: GlobalOptions) -> ResolvedPluginDistributions:
    """This rule resolves plugins using a VenvPex, and exposes the absolute paths of their dists.

    NB: This relies on the fact that PEX constructs venvs in a stable location (within the
    `named_caches` directory), but consequently needs to disable the process cache: see the
    ProcessCacheScope reference in the body.
    """
    # The repository's constraints are not relevant here, because this resolve is mixed
    # into the Pants' process' path, and never into user code.
    requirements = PexRequirements(sorted(global_options.options.plugins),
                                   apply_constraints=False)
    if not requirements:
        return ResolvedPluginDistributions()

    python: PythonExecutable | None = None
    if not request.interpreter_constraints:
        python = cast(
            PythonExecutable,
            PythonExecutable.fingerprinted(
                sys.executable,
                ".".join(map(str, sys.version_info[:3])).encode("utf8")),
        )

    plugins_pex = await Get(
        VenvPex,
        PexRequest(
            output_filename="pants_plugins.pex",
            internal_only=True,
            python=python,
            requirements=requirements,
            interpreter_constraints=request.interpreter_constraints,
            description=
            f"Resolving plugins: {', '.join(requirements.req_strings)}",
        ),
    )

    # NB: We run this Process per-restart because it (intentionally) leaks named cache
    # paths in a way that invalidates the Process-cache. See the method doc.
    cache_scope = (ProcessCacheScope.PER_SESSION
                   if global_options.options.plugins_force_resolve else
                   ProcessCacheScope.PER_RESTART_SUCCESSFUL)

    plugins_process_result = await Get(
        ProcessResult,
        VenvPexProcess(
            plugins_pex,
            argv=
            ("-c",
             "import os, site; print(os.linesep.join(site.getsitepackages()))"
             ),
            description="Extracting plugin locations",
            level=LogLevel.DEBUG,
            cache_scope=cache_scope,
        ),
    )
    return ResolvedPluginDistributions(
        plugins_process_result.stdout.decode().strip().split("\n"))
Esempio n. 11
0
def _create_pex(rule_runner: RuleRunner) -> Pex:
    rule_runner.set_options(["--backend-packages=pants.backend.python"])
    request = PexRequest(
        output_filename="setup-py-runner.pex",
        internal_only=True,
        requirements=PexRequirements(["setuptools==44.0.0", "wheel==0.34.2"]),
    )
    return rule_runner.request(Pex, [request])
Esempio n. 12
0
def test_resolves_dependencies(rule_runner: RuleRunner) -> None:
    requirements = PexRequirements(["six==1.12.0", "jsonschema==2.6.0", "requests==2.23.0"])
    pex_info = create_pex_and_get_pex_info(rule_runner, requirements=requirements)
    # NB: We do not check for transitive dependencies, which PEX-INFO will include. We only check
    # that at least the dependencies we requested are included.
    assert set(parse_requirements(requirements)).issubset(
        set(parse_requirements(pex_info["requirements"]))
    )
Esempio n. 13
0
async def setup_isort(setup_request: SetupRequest, isort: Isort) -> Setup:
    isort_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="isort.pex",
            internal_only=True,
            requirements=PexRequirements(isort.all_requirements),
            interpreter_constraints=PexInterpreterConstraints(
                isort.interpreter_constraints),
            entry_point=isort.entry_point,
        ),
    )

    config_digest_request = Get(
        Digest,
        PathGlobs(
            globs=isort.config,
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            conjunction=GlobExpansionConjunction.all_match,
            description_of_origin="the option `--isort-config`",
        ),
    )

    source_files_request = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in setup_request.request.field_sets),
    )

    source_files, isort_pex, config_digest = await MultiGet(
        source_files_request, isort_pex_request, config_digest_request)
    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None else
        setup_request.request.prior_formatter_result)

    input_digest = await Get(
        Digest,
        MergeDigests(
            (source_files_snapshot.digest, isort_pex.digest, config_digest)),
    )

    process = await Get(
        Process,
        PexProcess(
            isort_pex,
            argv=generate_args(source_files=source_files,
                               isort=isort,
                               check_only=setup_request.check_only),
            input_digest=input_digest,
            output_files=source_files_snapshot.files,
            description=
            f"Run isort on {pluralize(len(setup_request.request.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return Setup(process, original_digest=source_files_snapshot.digest)
Esempio n. 14
0
async def setup_isort(setup_request: SetupRequest, isort: Isort) -> Setup:
    isort_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="isort.pex",
            internal_only=True,
            requirements=PexRequirements(isort.all_requirements),
            interpreter_constraints=PexInterpreterConstraints(
                isort.interpreter_constraints),
            main=isort.main,
        ),
    )
    source_files_get = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in setup_request.request.field_sets),
    )
    source_files, isort_pex = await MultiGet(source_files_get, isort_pex_get)

    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None else
        setup_request.request.prior_formatter_result)

    config_files = await Get(ConfigFiles, ConfigFilesRequest,
                             isort.config_request(source_files_snapshot.dirs))

    # Isort 5+ changes how config files are handled. Determine which semantics we should use.
    is_isort5 = False
    if isort.config:
        isort_info = await Get(PexResolveInfo, VenvPex, isort_pex)
        is_isort5 = any(
            dist_info.project_name == "isort" and dist_info.version.major >= 5
            for dist_info in isort_info)

    input_digest = await Get(
        Digest,
        MergeDigests(
            (source_files_snapshot.digest, config_files.snapshot.digest)))

    process = await Get(
        Process,
        VenvPexProcess(
            isort_pex,
            argv=generate_argv(source_files,
                               isort,
                               is_isort5=is_isort5,
                               check_only=setup_request.check_only),
            input_digest=input_digest,
            output_files=source_files_snapshot.files,
            description=
            f"Run isort on {pluralize(len(setup_request.request.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return Setup(process, original_digest=source_files_snapshot.digest)
Esempio n. 15
0
async def create_ipython_repl_request(repl: IPythonRepl, ipython: IPython,
                                      pex_env: PexEnvironment) -> ReplRequest:
    # Note that we get an intermediate PexRequest here (instead of going straight to a Pex)
    # so that we can get the interpreter constraints for use in ipython_request.
    requirements_pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in repl.targets), internal_only=True),
    )

    requirements_request = Get(Pex, PexRequest, requirements_pex_request)

    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(repl.targets, include_files=True))

    ipython_request = Get(
        Pex,
        PexRequest(
            output_filename="ipython.pex",
            main=ipython.main,
            requirements=PexRequirements(ipython.all_requirements),
            interpreter_constraints=requirements_pex_request.
            interpreter_constraints,
            internal_only=True,
        ),
    )

    requirements_pex, sources, ipython_pex = await MultiGet(
        requirements_request, sources_request, ipython_request)
    merged_digest = await Get(
        Digest,
        MergeDigests(
            (requirements_pex.digest, sources.source_files.snapshot.digest,
             ipython_pex.digest)),
    )

    args = list(
        pex_env.create_argv(repl.in_chroot(ipython_pex.name),
                            python=ipython_pex.python))
    if ipython.options.ignore_cwd:
        args.append("--ignore-cwd")

    chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **pex_env.environment_dict(python_configured=ipython_pex.python is not None),
        "PEX_PATH":
        repl.in_chroot(requirements_pex_request.output_filename),
        "PEX_EXTRA_SYS_PATH":
        ":".join(chrooted_source_roots),
    }

    return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
Esempio n. 16
0
def test_use_packed_pex_requirements(rule_runner: RuleRunner,
                                     is_all_constraints_resolve: bool,
                                     internal_only: bool) -> None:
    requirements = PexRequirements(
        ["six==1.12.0"], is_all_constraints_resolve=is_all_constraints_resolve)
    pex_data = create_pex_and_get_all_data(rule_runner,
                                           requirements=requirements,
                                           internal_only=internal_only)
    # If this is either internal_only, or an all_constraints resolve, we should use packed.
    should_use_packed = is_all_constraints_resolve or internal_only
    assert (not pex_data.is_zipapp) == should_use_packed
Esempio n. 17
0
def _create_pex(
    rule_runner: RuleRunner,
    interpreter_constraints: PexInterpreterConstraints,
) -> Pex:
    request = PexRequest(
        output_filename="setup-py-runner.pex",
        internal_only=True,
        requirements=PexRequirements(["setuptools==44.0.0", "wheel==0.34.2"]),
        interpreter_constraints=interpreter_constraints,
    )
    return rule_runner.request(Pex, [request])
Esempio n. 18
0
def test_requirement_constraints(rule_runner: RuleRunner) -> None:
    direct_deps = ["requests>=1.0.0,<=2.23.0"]

    def assert_direct_requirements(pex_info):
        assert {PipRequirement.parse(r)
                for r in pex_info["requirements"]
                } == {PipRequirement.parse(d)
                      for d in direct_deps}

    # Unconstrained, we should always pick the top of the range (requests 2.23.0) since the top of
    # the range is a transitive closure over universal wheels.
    direct_pex_info = create_pex_and_get_pex_info(rule_runner,
                                                  requirements=PexRequirements(
                                                      direct_deps,
                                                      apply_constraints=False))
    assert_direct_requirements(direct_pex_info)
    assert "requests-2.23.0-py2.py3-none-any.whl" in set(
        direct_pex_info["distributions"].keys())

    constraints = [
        "requests==2.16.0",
        "certifi==2019.6.16",
        "chardet==3.0.2",
        "idna==2.5",
        "urllib3==1.21.1",
    ]
    rule_runner.create_file("constraints.txt", "\n".join(constraints))
    constrained_pex_info = create_pex_and_get_pex_info(
        rule_runner,
        requirements=PexRequirements(direct_deps, apply_constraints=True),
        additional_pants_args=(
            "--python-requirement-constraints=constraints.txt", ),
    )
    assert_direct_requirements(constrained_pex_info)
    assert {
        "certifi-2019.6.16-py2.py3-none-any.whl",
        "chardet-3.0.2-py2.py3-none-any.whl",
        "idna-2.5-py2.py3-none-any.whl",
        "requests-2.16.0-py2.py3-none-any.whl",
        "urllib3-1.21.1-py2.py3-none-any.whl",
    } == set(constrained_pex_info["distributions"].keys())
Esempio n. 19
0
 def _create_pex(self) -> Pex:
     return self.request(
         Pex,
         [
             PexRequest(
                 output_filename="setup-py-runner.pex",
                 internal_only=True,
                 requirements=PexRequirements(["setuptools==44.0.0", "wheel==0.34.2"]),
             ),
             create_options_bootstrapper(args=["--backend-packages=pants.backend.python"]),
         ],
     )
Esempio n. 20
0
async def setup_lambdex(lambdex: Lambdex) -> LambdexSetup:
    requirements_pex = await Get(
        Pex,
        PexRequest(
            output_filename="lambdex.pex",
            internal_only=True,
            requirements=PexRequirements(lambdex.all_requirements),
            interpreter_constraints=PexInterpreterConstraints(lambdex.interpreter_constraints),
            entry_point=lambdex.entry_point,
        ),
    )
    return LambdexSetup(requirements_pex=requirements_pex)
Esempio n. 21
0
async def setup_coverage(coverage: CoverageSubsystem) -> CoverageSetup:
    pex = await Get(
        Pex,
        PexRequest(
            output_filename="coverage.pex",
            internal_only=True,
            requirements=PexRequirements(coverage.all_requirements),
            interpreter_constraints=PexInterpreterConstraints(coverage.interpreter_constraints),
            entry_point=coverage.entry_point,
        ),
    )
    return CoverageSetup(pex)
Esempio n. 22
0
def create_pex_and_get_all_data(
    rule_runner: RuleRunner,
    *,
    pex_type: type[Pex | VenvPex] = Pex,
    requirements: PexRequirements = PexRequirements(),
    main: MainSpecification | None = None,
    interpreter_constraints:
    PexInterpreterConstraints = PexInterpreterConstraints(),
    platforms: PexPlatforms = PexPlatforms(),
    sources: Digest | None = None,
    additional_inputs: Digest | None = None,
    additional_pants_args: Tuple[str, ...] = (),
    additional_pex_args: Tuple[str, ...] = (),
    env: Mapping[str, str] | None = None,
    internal_only: bool = True,
) -> Dict:
    request = PexRequest(
        output_filename="test.pex",
        internal_only=internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=platforms,
        main=main,
        sources=sources,
        additional_inputs=additional_inputs,
        additional_args=additional_pex_args,
    )
    rule_runner.set_options(
        ["--backend-packages=pants.backend.python", *additional_pants_args],
        env=env,
        env_inherit={"PATH", "PYENV_ROOT", "HOME"},
    )
    pex = rule_runner.request(pex_type, [request])
    if isinstance(pex, Pex):
        digest = pex.digest
    elif isinstance(pex, VenvPex):
        digest = pex.digest
    else:
        raise AssertionError(
            f"Expected a Pex or a VenvPex but got a {type(pex)}.")
    rule_runner.scheduler.write_digest(digest)
    pex_path = os.path.join(rule_runner.build_root, "test.pex")
    with zipfile.ZipFile(pex_path, "r") as zipfp:
        with zipfp.open("PEX-INFO", "r") as pex_info:
            pex_info_content = pex_info.readline().decode()
            pex_list = zipfp.namelist()
    return {
        "pex": pex,
        "local_path": pex_path,
        "info": json.loads(pex_info_content),
        "files": pex_list,
    }
Esempio n. 23
0
async def get_requirements(
    dep_owner: DependencyOwner, union_membership: UnionMembership
) -> ExportedTargetRequirements:
    transitive_targets = await Get(
        TransitiveTargets, Addresses([dep_owner.exported_target.target.address])
    )

    ownable_tgts = [
        tgt for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership)
    ]
    owners = await MultiGet(Get(ExportedTarget, OwnedDependency(tgt)) for tgt in ownable_tgts)
    owned_by_us: Set[Target] = set()
    owned_by_others: Set[Target] = set()
    for tgt, owner in zip(ownable_tgts, owners):
        (owned_by_us if owner == dep_owner.exported_target else owned_by_others).add(tgt)

    # Get all 3rdparty deps of our owned deps.
    #
    # Note that we need only consider requirements that are direct dependencies of our owned deps:
    # If T depends on R indirectly, then it must be via some direct deps U1, U2, ... For each such U,
    # if U is in the owned deps then we'll pick up R through U. And if U is not in the owned deps
    # then it's owned by an exported target ET, and so R will be in the requirements for ET, and we
    # will require ET.
    #
    # TODO: Note that this logic doesn't account for indirection via dep aggregator targets, of type
    #  `target`. But we don't have those in v2 (yet) anyway. Plus, as we move towards buildgen and/or
    #  stricter build graph hygiene, it makes sense to require that targets directly declare their
    #  true dependencies. Plus, in the specific realm of setup-py, since we must exclude indirect
    #  deps across exported target boundaries, it's not a big stretch to just insist that
    #  requirements must be direct deps.
    direct_deps_tgts = await MultiGet(
        Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in owned_by_us
    )
    reqs = PexRequirements.create_from_requirement_fields(
        tgt[PythonRequirementsField]
        for tgt in itertools.chain.from_iterable(direct_deps_tgts)
        if tgt.has_field(PythonRequirementsField)
    )
    req_strs = list(reqs)

    # Add the requirements on any exported targets on which we depend.
    kwargs_for_exported_targets_we_depend_on = await MultiGet(
        Get(SetupKwargs, OwnedDependency(tgt)) for tgt in owned_by_others
    )
    req_strs.extend(
        f"{kwargs.name}=={kwargs.version}"
        for kwargs in set(kwargs_for_exported_targets_we_depend_on)
    )

    return ExportedTargetRequirements(req_strs)
Esempio n. 24
0
def test_venv_pex_resolve_info(rule_runner: RuleRunner, pex_type: type[Pex | VenvPex]) -> None:
    venv_pex = create_pex_and_get_all_data(
        rule_runner, pex_type=pex_type, requirements=PexRequirements(["requests==2.23.0"])
    )["pex"]
    dists = rule_runner.request(PexResolveInfo, [venv_pex])
    assert dists[0] == PexDistributionInfo("certifi", Version("2020.12.5"), None, ())
    assert dists[1] == PexDistributionInfo("chardet", Version("3.0.4"), None, ())
    assert dists[2] == PexDistributionInfo(
        "idna", Version("2.10"), SpecifierSet("!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"), ()
    )
    assert dists[3].project_name == "requests"
    assert dists[3].version == Version("2.23.0")
    assert Requirement.parse('PySocks!=1.5.7,>=1.5.6; extra == "socks"') in dists[3].requires_dists
    assert dists[4].project_name == "urllib3"
Esempio n. 25
0
async def generate_user_lockfile_goal(
    addresses: Addresses,
    python_setup: PythonSetup,
    workspace: Workspace,
) -> GenerateUserLockfileGoal:
    if python_setup.lockfile is None:
        logger.warning(
            "You ran `./pants generate-user-lockfile`, but `[python].experimental_lockfile` "
            "is not set. Please set this option to the path where you'd like the lockfile for "
            "your code's dependencies to live."
        )
        return GenerateUserLockfileGoal(exit_code=1)

    transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(addresses))
    reqs = PexRequirements.create_from_requirement_fields(
        tgt[PythonRequirementsField]
        # NB: By looking at the dependencies, rather than the closure, we only generate for
        # requirements that are actually used in the project.
        for tgt in transitive_targets.dependencies
        if tgt.has_field(PythonRequirementsField)
    )

    if not reqs:
        logger.warning(
            "No third-party requirements found for the transitive closure, so a lockfile will not "
            "be generated."
        )
        return GenerateUserLockfileGoal(exit_code=0)

    result = await Get(
        PythonLockfile,
        PythonLockfileRequest(
            reqs.req_strings,
            # TODO(#12314): Use interpreter constraints from the transitive closure.
            InterpreterConstraints(python_setup.interpreter_constraints),
            resolve_name="not yet implemented",
            lockfile_dest=python_setup.lockfile,
            _description=(
                f"Generate lockfile for {pluralize(len(reqs.req_strings), 'requirement')}: "
                f"{', '.join(reqs.req_strings)}"
            ),
            # TODO(12382): Make this command actually accurate once we figure out the semantics
            #  for user lockfiles. This is currently misleading.
            _regenerate_command="./pants generate-user-lockfile ::",
        ),
    )
    workspace.write_digest(result.digest)
    logger.info(f"Wrote lockfile to {result.path}")

    return GenerateUserLockfileGoal(exit_code=0)
Esempio n. 26
0
async def setup_user_lockfile_requests(
    requested: _SpecifiedUserResolves, python_setup: PythonSetup
) -> _UserLockfileRequests:
    # First, associate all resolves with their consumers.
    all_build_targets = await Get(UnexpandedTargets, AddressSpecs([DescendantAddresses("")]))
    resolves_to_roots = defaultdict(list)
    for tgt in all_build_targets:
        if not tgt.has_field(PythonResolveField):
            continue
        tgt[PythonResolveField].validate(python_setup)
        resolve = tgt[PythonResolveField].value
        if resolve is None:
            continue
        resolves_to_roots[resolve].append(tgt.address)

    # Expand the resolves for all specified.
    transitive_targets_per_resolve = await MultiGet(
        Get(TransitiveTargets, TransitiveTargetsRequest(resolves_to_roots[resolve]))
        for resolve in requested
    )
    pex_requirements_per_resolve = []
    interpreter_constraints_per_resolve = []
    for transitive_targets in transitive_targets_per_resolve:
        req_fields = []
        ic_fields = []
        for tgt in transitive_targets.closure:
            if tgt.has_field(PythonRequirementsField):
                req_fields.append(tgt[PythonRequirementsField])
            if tgt.has_field(InterpreterConstraintsField):
                ic_fields.append(tgt[InterpreterConstraintsField])
        pex_requirements_per_resolve.append(
            PexRequirements.create_from_requirement_fields(req_fields)
        )
        interpreter_constraints_per_resolve.append(
            InterpreterConstraints.create_from_compatibility_fields(ic_fields, python_setup)
        )

    requests = (
        PythonLockfileRequest(
            requirements.req_strings,
            interpreter_constraints,
            resolve_name=resolve,
            lockfile_dest=python_setup.resolves_to_lockfiles[resolve],
        )
        for resolve, requirements, interpreter_constraints in zip(
            requested, pex_requirements_per_resolve, interpreter_constraints_per_resolve
        )
    )
    return _UserLockfileRequests(requests)
Esempio n. 27
0
async def setup_setuptools(setuptools: Setuptools) -> SetuptoolsSetup:
    # Note that this pex has no entrypoint. We use it to run our generated setup.py, which
    # in turn imports from and invokes setuptools.
    requirements_pex = await Get(
        Pex,
        PexRequest(
            output_filename="setuptools.pex",
            internal_only=True,
            requirements=PexRequirements(setuptools.all_requirements),
            interpreter_constraints=PexInterpreterConstraints(setuptools.interpreter_constraints),
        ),
    )
    return SetuptoolsSetup(
        requirements_pex=requirements_pex,
    )
Esempio n. 28
0
async def setup_isort(setup_request: SetupRequest, isort: Isort) -> Setup:
    isort_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="isort.pex",
            internal_only=True,
            requirements=PexRequirements(isort.all_requirements),
            interpreter_constraints=PexInterpreterConstraints(
                isort.interpreter_constraints),
            main=isort.main,
        ),
    )

    config_files_get = Get(ConfigFiles, ConfigFilesRequest,
                           isort.config_request)
    source_files_get = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in setup_request.request.field_sets),
    )

    source_files, isort_pex, config_files = await MultiGet(
        source_files_get, isort_pex_get, config_files_get)
    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None else
        setup_request.request.prior_formatter_result)

    input_digest = await Get(
        Digest,
        MergeDigests(
            (source_files_snapshot.digest, config_files.snapshot.digest)))

    process = await Get(
        Process,
        VenvPexProcess(
            isort_pex,
            argv=generate_args(source_files=source_files,
                               isort=isort,
                               check_only=setup_request.check_only),
            input_digest=input_digest,
            output_files=source_files_snapshot.files,
            description=
            f"Run isort on {pluralize(len(setup_request.request.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return Setup(process, original_digest=source_files_snapshot.digest)
Esempio n. 29
0
def get_distributions(rule_runner: RuleRunner, *, requirements: Iterable[str],
                      constraints: Iterable[str]) -> ExtractedPexDistributions:
    # NB: The constraints are important for determinism.
    rule_runner.set_options([
        "--backend-packages=pants.backend.python",
        "--python-setup-requirement-constraints=constraints.txt",
    ])
    rule_runner.create_file("constraints.txt", "\n".join(constraints))

    pex_request = PexRequest(
        output_filename="test.pex",
        requirements=PexRequirements(requirements),
        interpreter_constraints=PexInterpreterConstraints([">=3.6"]),
        internal_only=True,
    )
    built_pex = rule_runner.request(Pex, [pex_request])
    return rule_runner.request(ExtractedPexDistributions, [built_pex])
Esempio n. 30
0
async def setup_docformatter(setup_request: SetupRequest,
                             docformatter: Docformatter) -> Setup:
    docformatter_pex_request = Get(
        VenvPex,
        PexRequest(
            output_filename="docformatter.pex",
            internal_only=True,
            requirements=PexRequirements(docformatter.all_requirements),
            interpreter_constraints=PexInterpreterConstraints(
                docformatter.interpreter_constraints),
            main=docformatter.main,
        ),
    )

    source_files_request = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in setup_request.request.field_sets),
    )

    source_files, docformatter_pex = await MultiGet(source_files_request,
                                                    docformatter_pex_request)

    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None else
        setup_request.request.prior_formatter_result)

    process = await Get(
        Process,
        VenvPexProcess(
            docformatter_pex,
            argv=generate_args(
                source_files=source_files,
                docformatter=docformatter,
                check_only=setup_request.check_only,
            ),
            input_digest=source_files_snapshot.digest,
            output_files=source_files_snapshot.files,
            description=
            (f"Run Docformatter on {pluralize(len(setup_request.request.field_sets), 'file')}."
             ),
            level=LogLevel.DEBUG,
        ),
    )
    return Setup(process, original_digest=source_files_snapshot.digest)