示例#1
0
async def create_ipython_repl_request(
    repl: IPythonRepl, ipython: IPython, pex_env: PexEnvironment
) -> ReplRequest:
    # Note that we get an intermediate PexRequest here (instead of going straight to a Pex)
    # so that we can get the interpreter constraints for use in ipython_request.
    requirements_pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            Addresses(tgt.address for tgt in repl.targets), internal_only=True
        ),
    )

    requirements_request = Get(Pex, PexRequest, requirements_pex_request)

    sources_request = Get(
        PythonSourceFiles, PythonSourceFilesRequest(repl.targets, include_files=True)
    )

    ipython_request = Get(
        Pex,
        PexRequest(
            output_filename="ipython.pex",
            entry_point=ipython.entry_point,
            requirements=PexRequirements(ipython.all_requirements),
            interpreter_constraints=requirements_pex_request.interpreter_constraints,
            internal_only=True,
        ),
    )

    requirements_pex, sources, ipython_pex = await MultiGet(
        requirements_request, sources_request, ipython_request
    )
    merged_digest = await Get(
        Digest,
        MergeDigests(
            (requirements_pex.digest, sources.source_files.snapshot.digest, ipython_pex.digest)
        ),
    )

    args = [repl.in_chroot(ipython_pex.name)]
    if ipython.options.ignore_cwd:
        args.append("--ignore-cwd")

    chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **pex_env.environment_dict,
        "PEX_PATH": repl.in_chroot(requirements_pex_request.output_filename),
        "PEX_EXTRA_SYS_PATH": ":".join(chrooted_source_roots),
    }

    return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
示例#2
0
async def gather_coordinates_for_jvm_lockfile(
    request: GatherJvmCoordinatesRequest, ) -> ArtifactRequirements:
    # Separate `artifact_inputs` by whether the strings parse as an `Address` or not.
    requirements: set[ArtifactRequirement] = set()
    candidate_address_inputs: set[AddressInput] = set()
    bad_artifact_inputs = []
    for artifact_input in request.artifact_inputs:
        # Try parsing as a `Coordinate` first since otherwise `AddressInput.parse` will try to see if the
        # group name is a file on disk.
        if 2 <= artifact_input.count(":") <= 3:
            try:
                maybe_coord = Coordinate.from_coord_str(
                    artifact_input).as_requirement()
                requirements.add(maybe_coord)
                continue
            except Exception:
                pass

        try:
            address_input = AddressInput.parse(
                artifact_input,
                description_of_origin=f"the option `{request.option_name}`")
            candidate_address_inputs.add(address_input)
        except Exception:
            bad_artifact_inputs.append(artifact_input)

    if bad_artifact_inputs:
        raise ValueError(
            "The following values could not be parsed as an address nor as a JVM coordinate string. "
            f"The problematic inputs supplied to the `{request.option_name}` option were: "
            f"{', '.join(bad_artifact_inputs)}.")

    # Gather coordinates from the provided addresses.
    addresses = await MultiGet(
        Get(Address, AddressInput, ai) for ai in candidate_address_inputs)
    all_supplied_targets = await Get(Targets, Addresses(addresses))
    other_targets = []
    for tgt in all_supplied_targets:
        if JvmArtifactFieldSet.is_applicable(tgt):
            requirements.add(ArtifactRequirement.from_jvm_artifact_target(tgt))
        else:
            other_targets.append(tgt)

    if other_targets:
        raise ValueError(
            softwrap(f"""
                The following addresses reference targets that are not `jvm_artifact` targets.
                Please only supply the addresses of `jvm_artifact` for the `{request.option_name}`
                option. The problematic addresses are: {', '.join(str(tgt.address) for tgt in other_targets)}.
                """))

    return ArtifactRequirements(requirements)
示例#3
0
async def dependencies(
    console: Console, addresses: Addresses, options: DependenciesOptions,
) -> Dependencies:
    if options.values.transitive:
        transitive_targets = await Get[TransitiveTargets](Addresses, addresses)
        targets = Targets(transitive_targets.closure - FrozenOrderedSet(transitive_targets.roots))
    else:
        target_roots = await Get[Targets](Addresses, addresses)
        targets = await Get[Targets](
            Addresses(
                itertools.chain.from_iterable(
                    tgt.get(DependenciesField).value or () for tgt in target_roots
                )
            )
        )

    include_3rdparty = options.values.type in [
        DependencyType.THIRD_PARTY,
        DependencyType.SOURCE_AND_THIRD_PARTY,
    ]
    include_source = options.values.type in [
        DependencyType.SOURCE,
        DependencyType.SOURCE_AND_THIRD_PARTY,
    ]

    address_strings = set()
    third_party_requirements: Set[str] = set()
    for tgt in targets:
        if include_source:
            address_strings.add(tgt.address.spec)
        if include_3rdparty:
            if tgt.has_field(PythonRequirementsField):
                third_party_requirements.update(
                    str(python_req.requirement) for python_req in tgt[PythonRequirementsField].value
                )
            if tgt.has_field(JarsField):
                third_party_requirements.update(
                    (
                        f"{jar.org}:{jar.name}:{jar.rev}"
                        if jar.rev is not None
                        else f"{jar.org}:{jar.name}"
                    )
                    for jar in tgt[JarsField].value
                )

    with options.line_oriented(console) as print_stdout:
        for address in sorted(address_strings):
            print_stdout(address)
        for requirement_string in sorted(third_party_requirements):
            print_stdout(requirement_string)

    return Dependencies(exit_code=0)
示例#4
0
 def assert_injected(deps_cls: Type[Dependencies], *,
                     injected: List[str]) -> None:
     provided_addr = Address.parse("//:provided")
     deps_field = deps_cls([provided_addr],
                           address=Address.parse("//:target"))
     result = self.request_single_product(
         Addresses,
         Params(DependenciesRequest(deps_field),
                create_options_bootstrapper()))
     assert result == Addresses(
         sorted([
             provided_addr, *(Address.parse(addr) for addr in injected)
         ]))
示例#5
0
 def __init__(
         self,
         addresses: Iterable[Address],
         *,
         internal_only: bool,
         interpreter_constraints:
     InterpreterConstraints = InterpreterConstraints(),
         sources: PythonSourceFiles = PythonSourceFiles.empty(),
 ) -> None:
     self.addresses = Addresses(addresses)
     self.internal_only = internal_only
     self.interpreter_constraints = interpreter_constraints
     self.sources = sources
示例#6
0
async def create_python_awslambda(
    field_set: PythonAwsLambdaFieldSet,
    lambdex_setup: LambdexSetup,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> CreatedAWSLambda:
    # Lambdas typically use the .zip suffix, so we use that instead of .pex.
    pex_filename = f"{field_set.address.target_name}.zip"
    # We hardcode the platform value to the appropriate one for each AWS Lambda runtime.
    # (Running the "hello world" lambda in the example code will report the platform, and can be
    # used to verify correctness of these platform strings.)
    py_major, py_minor = field_set.runtime.to_interpreter_version()
    platform = f"manylinux2014_x86_64-cp-{py_major}{py_minor}-cp{py_major}{py_minor}"
    # set pymalloc ABI flag - this was removed in python 3.8 https://bugs.python.org/issue36707
    if py_major <= 3 and py_minor < 8:
        platform += "m"
    if (py_major, py_minor) == (2, 7):
        platform += "u"
    pex_request = TwoStepPexFromTargetsRequest(
        PexFromTargetsRequest(
            addresses=Addresses([field_set.address]),
            entry_point=None,
            output_filename=pex_filename,
            platforms=PexPlatforms([platform]),
        ))

    pex_result = await Get[TwoStepPex](TwoStepPexFromTargetsRequest,
                                       pex_request)
    input_digest = await Get[Digest](MergeDigests(
        (pex_result.pex.digest, lambdex_setup.requirements_pex.digest)))

    # NB: Lambdex modifies its input pex in-place, so the input file is also the output file.
    lambdex_args = ("build", "-e", field_set.handler.value, pex_filename)
    process = lambdex_setup.requirements_pex.create_process(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path="./lambdex.pex",
        pex_args=lambdex_args,
        input_digest=input_digest,
        output_files=(pex_filename, ),
        description=f"Setting up handler in {pex_filename}",
    )
    result = await Get[ProcessResult](Process, process)
    # Note that the AWS-facing handler function is always lambdex_handler.handler, which
    # is the wrapper injected by lambdex that manages invocation of the actual handler.
    return CreatedAWSLambda(
        digest=result.output_digest,
        name=pex_filename,
        runtime=field_set.runtime.value,
        handler="lambdex_handler.handler",
    )
示例#7
0
async def resolve_scala_plugins_for_target(
    request: ScalaPluginsForTargetRequest,
    all_scala_plugins: AllScalaPluginTargets,
    jvm: JvmSubsystem,
    scalac: Scalac,
) -> ScalaPluginTargetsForTarget:
    target = request.target
    resolve = request.resolve_name

    plugin_names = target.get(ScalaConsumedPluginNamesField).value
    if plugin_names is None:
        plugin_names_by_resolve = scalac.parsed_default_plugins()
        plugin_names = tuple(plugin_names_by_resolve.get(resolve, ()))

    candidate_plugins = []
    artifact_address_gets = []
    for plugin in all_scala_plugins:
        if _plugin_name(plugin) not in plugin_names:
            continue
        candidate_plugins.append(plugin)
        artifact_field = plugin[ScalacPluginArtifactField]
        address_input = AddressInput.parse(
            artifact_field.value,
            relative_to=target.address.spec_path,
            description_of_origin=
            (f"the `{artifact_field.alias}` field from the target {artifact_field.address}"
             ),
        )
        artifact_address_gets.append(Get(Address, AddressInput, address_input))

    artifact_addresses = await MultiGet(artifact_address_gets)
    candidate_artifacts = await Get(Targets, Addresses(artifact_addresses))

    plugins: dict[str, tuple[Target, Target]] = {
    }  # Maps plugin name to relevant JVM artifact
    for plugin, artifact in zip(candidate_plugins, candidate_artifacts):
        if artifact[JvmResolveField].normalized_value(jvm) != resolve:
            continue

        plugins[_plugin_name(plugin)] = (plugin, artifact)

    for plugin_name in plugin_names:
        if plugin_name not in plugins:
            raise Exception(
                f"Could not find Scala plugin `{plugin_name}` in resolve `{resolve}` "
                f"for target {request.target}")

    plugin_targets, artifact_targets = zip(
        *plugins.values()) if plugins else ((), ())
    return ScalaPluginTargetsForTarget(Targets(plugin_targets),
                                       Targets(artifact_targets))
示例#8
0
async def run_ipython_repl(repl: IPythonRepl, ipython: IPython) -> ReplBinary:
    addresses = Addresses(tgt.address for tgt in repl.targets)
    two_step_pex = await Get[TwoStepPex](
        TwoStepPexFromTargetsRequest(
            PexFromTargetsRequest(
                addresses=addresses,
                output_filename="ipython-repl.pex",
                entry_point=ipython.get_entry_point(),
                additional_requirements=ipython.get_requirement_specs(),
            )
        )
    )
    repl_pex = two_step_pex.pex
    return ReplBinary(digest=repl_pex.digest, binary_name=repl_pex.output_filename)
示例#9
0
async def get_requirements(
    dep_owner: DependencyOwner, union_membership: UnionMembership
) -> ExportedTargetRequirements:
    transitive_targets = await Get(
        TransitiveTargets, Addresses([dep_owner.exported_target.target.address])
    )

    ownable_tgts = [
        tgt for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership)
    ]
    owners = await MultiGet(Get(ExportedTarget, OwnedDependency(tgt)) for tgt in ownable_tgts)
    owned_by_us: Set[Target] = set()
    owned_by_others: Set[Target] = set()
    for tgt, owner in zip(ownable_tgts, owners):
        (owned_by_us if owner == dep_owner.exported_target else owned_by_others).add(tgt)

    # Get all 3rdparty deps of our owned deps.
    #
    # Note that we need only consider requirements that are direct dependencies of our owned deps:
    # If T depends on R indirectly, then it must be via some direct deps U1, U2, ... For each such U,
    # if U is in the owned deps then we'll pick up R through U. And if U is not in the owned deps
    # then it's owned by an exported target ET, and so R will be in the requirements for ET, and we
    # will require ET.
    #
    # TODO: Note that this logic doesn't account for indirection via dep aggregator targets, of type
    #  `target`. But we don't have those in v2 (yet) anyway. Plus, as we move towards buildgen and/or
    #  stricter build graph hygiene, it makes sense to require that targets directly declare their
    #  true dependencies. Plus, in the specific realm of setup-py, since we must exclude indirect
    #  deps across exported target boundaries, it's not a big stretch to just insist that
    #  requirements must be direct deps.
    direct_deps_tgts = await MultiGet(
        Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in owned_by_us
    )
    reqs = PexRequirements.create_from_requirement_fields(
        tgt[PythonRequirementsField]
        for tgt in itertools.chain.from_iterable(direct_deps_tgts)
        if tgt.has_field(PythonRequirementsField)
    )
    req_strs = list(reqs)

    # Add the requirements on any exported targets on which we depend.
    kwargs_for_exported_targets_we_depend_on = await MultiGet(
        Get(SetupKwargs, OwnedDependency(tgt)) for tgt in owned_by_others
    )
    req_strs.extend(
        f"{kwargs.name}=={kwargs.version}"
        for kwargs in set(kwargs_for_exported_targets_we_depend_on)
    )

    return ExportedTargetRequirements(req_strs)
示例#10
0
async def transitive_targets_lite(request: TransitiveTargetsRequestLite) -> TransitiveTargets:
    roots_as_targets = await Get(Targets, Addresses(request.roots))
    visited: OrderedSet[Target] = OrderedSet()
    queued = FrozenOrderedSet(roots_as_targets)
    dependency_mapping: Dict[Address, Tuple[Address, ...]] = {}
    while queued:
        direct_dependencies_addresses_per_tgt = await MultiGet(
            Get(Addresses, DependenciesRequestLite(tgt.get(Dependencies))) for tgt in queued
        )
        direct_dependencies_per_tgt = []
        for addresses_per_tgt in direct_dependencies_addresses_per_tgt:
            wrapped_tgts = await MultiGet(
                Get(WrappedTarget, Address, addr) for addr in addresses_per_tgt
            )
            direct_dependencies_per_tgt.append(
                tuple(wrapped_t.target for wrapped_t in wrapped_tgts)
            )

        dependency_mapping.update(
            zip(
                (t.address for t in queued),
                (tuple(t.address for t in deps) for deps in direct_dependencies_per_tgt),
            )
        )

        queued = FrozenOrderedSet(
            itertools.chain.from_iterable(direct_dependencies_per_tgt)
        ).difference(visited)
        visited.update(queued)

    # NB: We use `roots_as_targets` to get the root addresses, rather than `request.roots`. This
    # is because expanding from the `Addresses` -> `Targets` may have resulted in generated
    # subtargets being used, so we need to use `roots_as_targets` to have this expansion.
    _detect_cycles(tuple(t.address for t in roots_as_targets), dependency_mapping)

    # Apply any transitive excludes (`!!` ignores).
    wrapped_transitive_excludes = await MultiGet(
        Get(
            WrappedTarget, AddressInput, AddressInput.parse(addr, relative_to=tgt.address.spec_path)
        )
        for tgt in (*roots_as_targets, *visited)
        for addr in tgt.get(Dependencies).unevaluated_transitive_excludes.values
    )
    transitive_excludes = FrozenOrderedSet(
        wrapped_t.target for wrapped_t in wrapped_transitive_excludes
    )

    return TransitiveTargets(
        tuple(roots_as_targets), FrozenOrderedSet(visited.difference(transitive_excludes))
    )
示例#11
0
文件: graph.py 项目: rhysyngsun/pants
async def resolve_unparsed_address_inputs(
        request: UnparsedAddressInputs,
        global_options: GlobalOptions) -> Addresses:
    addresses = await MultiGet(
        Get(
            Address,
            AddressInput,
            AddressInput.parse(
                v,
                relative_to=request.relative_to,
                subproject_roots=global_options.options.subproject_roots,
            ),
        ) for v in request.values)
    return Addresses(addresses)
示例#12
0
async def resolve_scala_plugins_for_target(
    request: ScalaPluginsForTargetRequest,
    all_scala_plugins: AllScalaPluginTargets,
    jvm: JvmSubsystem,
    scalac: Scalac,
) -> ScalaPluginTargetsForTarget:

    target = request.target
    resolve = request.resolve_name

    plugin_names = target.get(ScalaConsumedPluginNamesField).value
    if plugin_names is None:
        plugin_names_by_resolve = scalac.parsed_default_plugins()
        plugin_names = tuple(plugin_names_by_resolve.get(resolve, ()))

    candidate_plugins: list[Target] = []
    for plugin in all_scala_plugins:
        if _plugin_name(plugin) in plugin_names:
            candidate_plugins.append(plugin)

    artifact_address_inputs = (
        plugin[ScalacPluginArtifactField].value for plugin in candidate_plugins
    )

    artifact_addresses = await MultiGet(
        # `is not None` is solely to satiate mypy. artifact field is required.
        Get(Address, AddressInput, AddressInput.parse(ai))
        for ai in artifact_address_inputs
        if ai is not None
    )

    candidate_artifacts = await Get(Targets, Addresses(artifact_addresses))

    plugins: dict[str, tuple[Target, Target]] = {}  # Maps plugin name to relevant JVM artifact
    for plugin, artifact in zip(candidate_plugins, candidate_artifacts):
        if artifact[JvmResolveField].normalized_value(jvm) != resolve:
            continue

        plugins[_plugin_name(plugin)] = (plugin, artifact)

    for plugin_name in plugin_names:
        if plugin_name not in plugins:
            raise Exception(
                f"Could not find Scala plugin `{plugin_name}` in resolve `{resolve}` "
                f"for target {request.target}"
            )

    plugin_targets, artifact_targets = zip(*plugins.values()) if plugins else ((), ())
    return ScalaPluginTargetsForTarget(Targets(plugin_targets), Targets(artifact_targets))
示例#13
0
文件: repl.py 项目: hephex/pants
async def create_python_repl_request(repl: PythonRepl,
                                     pex_env: PexEnvironment) -> ReplRequest:

    # Note that we get an intermediate PexRequest here (instead of going straight to a Pex) so
    # that we can get the interpreter constraints for use in local_dists_request.
    requirements_pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in repl.targets), internal_only=True),
    )
    requirements_request = Get(Pex, PexRequest, requirements_pex_request)

    local_dists_request = Get(
        LocalDistsPex,
        LocalDistsPexRequest(
            Addresses(tgt.address for tgt in repl.targets),
            internal_only=True,
            interpreter_constraints=requirements_pex_request.
            interpreter_constraints,
        ),
    )

    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(repl.targets, include_files=True))

    requirements_pex, local_dists, sources = await MultiGet(
        requirements_request, local_dists_request, sources_request)
    merged_digest = await Get(
        Digest,
        MergeDigests((requirements_pex.digest, local_dists.pex.digest,
                      sources.source_files.snapshot.digest)),
    )

    complete_pex_env = pex_env.in_workspace()
    args = complete_pex_env.create_argv(repl.in_chroot(requirements_pex.name),
                                        python=requirements_pex.python)

    chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **complete_pex_env.environment_dict(python_configured=requirements_pex.python is not None),
        "PEX_EXTRA_SYS_PATH":
        ":".join(chrooted_source_roots),
        "PEX_PATH":
        repl.in_chroot(local_dists.pex.name),
    }

    return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
示例#14
0
    def test_transitive_targets(self) -> None:
        t1 = MockTarget({}, address=Address.parse(":t1"))
        t2 = MockTarget({Dependencies.alias: [t1.address]},
                        address=Address.parse(":t2"))
        d1 = MockTarget({Dependencies.alias: [t1.address]},
                        address=Address.parse(":d1"))
        d2 = MockTarget({Dependencies.alias: [t2.address]},
                        address=Address.parse(":d2"))
        d3 = MockTarget({}, address=Address.parse(":d3"))
        root = MockTarget(
            {Dependencies.alias: [d1.address, d2.address, d3.address]},
            address=Address.parse(":root"),
        )

        self.add_to_build_file(
            "",
            dedent("""\
                target(name='t1')
                target(name='t2', dependencies=[':t1'])
                target(name='d1', dependencies=[':t1'])
                target(name='d2', dependencies=[':t2'])
                target(name='d3')
                target(name='root', dependencies=[':d1', ':d2', ':d3'])
                """),
        )

        direct_deps = self.request_single_product(
            Targets,
            Params(DependenciesRequest(root[Dependencies]),
                   create_options_bootstrapper()))
        assert direct_deps == Targets([d1, d2, d3])

        transitive_target = self.request_single_product(
            TransitiveTarget,
            Params(WrappedTarget(root), create_options_bootstrapper()))
        assert transitive_target.root == root
        assert {
            dep_transitive_target.root
            for dep_transitive_target in transitive_target.dependencies
        } == {d1, d2, d3}

        transitive_targets = self.request_single_product(
            TransitiveTargets,
            Params(Addresses([root.address, d2.address]),
                   create_options_bootstrapper()),
        )
        assert transitive_targets.roots == (root, d2)
        assert transitive_targets.closure == FrozenOrderedSet(
            [root, d2, d1, d3, t2, t1])
示例#15
0
文件: changed.py 项目: wiwa/pants
async def find_owners(
    build_configuration: BuildConfiguration,
    address_mapper: AddressMapper,
    changed_request: ChangedRequest,
) -> ChangedAddresses:
    owners = await Get[Owners](OwnersRequest(sources=changed_request.sources))

    # If the ChangedRequest does not require dependees, then we're done.
    if changed_request.include_dependees == IncludeDependeesOption.NONE:
        return ChangedAddresses(owners.addresses)

    # Otherwise: find dependees.
    all_addresses = await Get[Addresses](AddressSpecs((DescendantAddresses(""),)))
    all_structs = [
        s.value for s in await MultiGet(Get[HydratedStruct](Address, a) for a in all_addresses)
    ]

    bfa = build_configuration.registered_aliases()
    graph = _DependentGraph.from_iterable(
        target_types_from_build_file_aliases(bfa), address_mapper, all_structs
    )
    if changed_request.include_dependees == IncludeDependeesOption.DIRECT:
        return ChangedAddresses(Addresses(graph.dependents_of_addresses(owners.addresses)))
    return ChangedAddresses(Addresses(graph.transitive_dependents_of_addresses(owners.addresses)))
示例#16
0
async def resolve_unparsed_address_inputs(
        request: UnparsedAddressInputs,
        subproject_roots: SubprojectRoots) -> Addresses:
    addresses = await MultiGet(
        Get(
            Address,
            AddressInput,
            AddressInput.parse(
                v,
                relative_to=request.relative_to,
                subproject_roots=subproject_roots,
                description_of_origin=request.description_of_origin,
            ),
        ) for v in request.values)
    return Addresses(addresses)
示例#17
0
def test_resources(rule_runner: RuleRunner) -> None:
    rule_runner.write_files({
        "BUILD": "resources(name='root', sources=['*.txt'])",
        "one.txt": "",
        "two.txt": "",
        "3rdparty/jvm/default.lock": EMPTY_LOCKFILE,
    })

    # Building the generator target should exclude the individual files and result in a single jar
    # for the generator.
    rendered_classpath = rule_runner.request(
        RenderedClasspath,
        [Addresses([Address(spec_path="", target_name="root")])])
    assert rendered_classpath.content == {
        ".root.resources.jar": {
            "one.txt",
            "two.txt",
        }
    }

    # But requesting a single file should individually package it.
    rendered_classpath = rule_runner.request(
        RenderedClasspath,
        [
            Addresses([
                Address(spec_path="",
                        target_name="root",
                        relative_file_path="one.txt")
            ])
        ],
    )
    assert rendered_classpath.content == {
        ".one.txt.root.resources.jar": {
            "one.txt",
        }
    }
示例#18
0
 def assert_failed_cycle(
     self, *, root_target_name: str, subject_target_name: str, path_target_names: Tuple[str, ...]
 ) -> None:
     with self.assertRaises(ExecutionError) as e:
         self.request_single_product(
             TransitiveTargets,
             Params(
                 Addresses([Address("", target_name=root_target_name)]),
                 create_options_bootstrapper(),
             ),
         )
     (cycle_exception,) = e.exception.wrapped_exceptions
     assert isinstance(cycle_exception, CycleException)
     assert cycle_exception.subject == Address("", target_name=subject_target_name)
     assert cycle_exception.path == tuple(Address("", target_name=p) for p in path_target_names)
示例#19
0
async def generate_smalltalk_from_avro(
    request: GenerateSmalltalkFromAvroRequest,
) -> GeneratedSources:
    protocol_files = request.protocol_sources.files

    # Many codegen implementations will need to look up a protocol target's dependencies in their
    # rule. We add this here to ensure that this does not result in rule graph issues.
    _ = await Get(TransitiveTargets, Addresses([request.protocol_target.address]))

    def generate_fortran(fp: str) -> FileContent:
        parent = str(PurePath(fp).parent).replace("src/avro", "src/smalltalk")
        file_name = f"{PurePath(fp).stem}.st"
        return FileContent(str(PurePath(parent, file_name)), b"Generated")

    result = await Get(Snapshot, CreateDigest([generate_fortran(fp) for fp in protocol_files]))
    return GeneratedSources(result)
示例#20
0
async def get_owned_dependencies(dependency_owner: DependencyOwner) -> OwnedDependencies:
    """Find the dependencies of dependency_owner that are owned by it.

    Includes dependency_owner itself.
    """
    transitive_targets = await Get[TransitiveTargets](
        Addresses([dependency_owner.exported_target.target.address])
    )
    ownable_targets = [tgt for tgt in transitive_targets.closure if is_ownable_target(tgt)]
    owners = await MultiGet(Get[ExportedTarget](OwnedDependency(tgt)) for tgt in ownable_targets)
    owned_dependencies = [
        tgt
        for owner, tgt in zip(owners, ownable_targets)
        if owner == dependency_owner.exported_target
    ]
    return OwnedDependencies(OwnedDependency(t) for t in owned_dependencies)
示例#21
0
 def __init__(
         self,
         addresses: Iterable[Address],
         *,
         internal_only: bool,
         hardcoded_interpreter_constraints: InterpreterConstraints
     | None = None,
         platforms: PexPlatforms = PexPlatforms(),
         complete_platforms: CompletePlatforms = CompletePlatforms(),
         additional_lockfile_args: tuple[str, ...] = (),
 ) -> None:
     self.addresses = Addresses(addresses)
     self.internal_only = internal_only
     self.hardcoded_interpreter_constraints = hardcoded_interpreter_constraints
     self.platforms = platforms
     self.complete_platforms = complete_platforms
     self.additional_lockfile_args = additional_lockfile_args
示例#22
0
async def find_changed_owners(request: ChangedRequest,
                              specs_filter: SpecsFilter) -> ChangedAddresses:
    no_dependees = request.dependees == DependeesOption.NONE
    owners = await Get(
        Owners,
        OwnersRequest(
            request.sources,
            # If `--changed-dependees` is used, we cannot eagerly filter out root targets. We
            # need to first find their dependees, and only then should we filter. See
            # https://github.com/pantsbuild/pants/issues/15544
            filter_by_global_options=no_dependees,
        ),
    )
    if no_dependees:
        return ChangedAddresses(owners)

    # See https://github.com/pantsbuild/pants/issues/15313. We filter out target generators because
    # they are not useful as aliases for their generated targets in the context of
    # `--changed-since`. Including them makes it look like all sibling targets from the same
    # target generator have also changed.
    #
    # However, we also must be careful to preserve if target generators are direct owners, which
    # happens when a generated file is deleted.
    owner_target_generators = FrozenOrderedSet(
        addr.maybe_convert_to_target_generator() for addr in owners
        if addr.is_generated_target)
    dependees = await Get(
        Dependees,
        DependeesRequest(
            owners,
            transitive=request.dependees == DependeesOption.TRANSITIVE,
            include_roots=False,
        ),
    )
    result = FrozenOrderedSet(owners) | (dependees - owner_target_generators)
    if specs_filter.is_specified:
        # Finally, we must now filter out the result to only include what matches our tags, as the
        # last step of https://github.com/pantsbuild/pants/issues/15544.
        #
        # Note that we use `UnexpandedTargets` rather than `Targets` or `FilteredTargets` so that
        # we preserve target generators.
        result_as_tgts = await Get(UnexpandedTargets, Addresses(result))
        result = FrozenOrderedSet(tgt.address for tgt in result_as_tgts
                                  if specs_filter.matches(tgt))

    return ChangedAddresses(result)
示例#23
0
def test_compile_mixed_cycle(rule_runner: RuleRunner) -> None:
    # Add an extra import to the Java file which will force a cycle between them.
    rule_runner.write_files(
        {
            "BUILD": "scala_sources(name='main')",
            "3rdparty/jvm/BUILD": DEFAULT_SCALA_LIBRARY_TARGET,
            "3rdparty/jvm/default.lock": DEFAULT_LOCKFILE,
            "Example.scala": scala_main_source(),
            "lib/BUILD": "java_sources()",
            "lib/C.java": java_lib_source(["org.pantsbuild.example.Main"]),
        }
    )

    main_address = Address(spec_path="", target_name="main")
    lib_address = Address(spec_path="lib")
    assert len(expect_single_expanded_coarsened_target(rule_runner, main_address).members) == 2
    rule_runner.request(Classpath, [Addresses([main_address, lib_address])])
示例#24
0
async def infer_python_init_dependencies(
    request: InferInitDependencies,
    python_infer_subsystem: PythonInferSubsystem,
    python_setup: PythonSetup,
) -> InferredDependencies:
    if (
        not python_infer_subsystem.options.is_default("inits") and not python_infer_subsystem.inits
    ) or python_infer_subsystem.init_files is InitFilesInference.never:
        return InferredDependencies([])

    ignore_empty_files = (
        python_infer_subsystem.options.is_default("inits")
        and python_infer_subsystem.init_files is InitFilesInference.content_only
    )
    fp = request.sources_field.file_path
    assert fp is not None
    init_files = await Get(
        AncestorFiles,
        AncestorFilesRequest(
            input_files=(fp,),
            requested=("__init__.py", "__init__.pyi"),
            ignore_empty_files=ignore_empty_files,
        ),
    )
    owners = await MultiGet(Get(Owners, OwnersRequest((f,))) for f in init_files.snapshot.files)

    original_tgt, owner_tgts = await MultiGet(
        Get(
            WrappedTarget,
            WrappedTargetRequest(
                request.sources_field.address, description_of_origin="<infallible>"
            ),
        ),
        Get(Targets, Addresses(itertools.chain.from_iterable(owners))),
    )
    resolve = original_tgt.target[PythonResolveField].normalized_value(python_setup)
    python_owners = [
        tgt.address
        for tgt in owner_tgts
        if (
            tgt.has_field(PythonSourceField)
            and tgt[PythonResolveField].normalized_value(python_setup) == resolve
        )
    ]
    return InferredDependencies(python_owners)
示例#25
0
async def handle_bsp_scalac_options_request(
    request: HandleScalacOptionsRequest,
    build_root: BuildRoot,
    workspace: Workspace,
) -> HandleScalacOptionsResult:
    bsp_target = await Get(BSPBuildTargetInternal, BuildTargetIdentifier,
                           request.bsp_target_id)
    targets = await Get(
        Targets,
        AddressSpecs,
        bsp_target.specs.address_specs,
    )
    coarsened_targets = await Get(CoarsenedTargets,
                                  Addresses(tgt.address for tgt in targets))
    resolve = await Get(CoursierResolveKey, CoarsenedTargets,
                        coarsened_targets)
    lockfile = await Get(CoursierResolvedLockfile, CoursierResolveKey, resolve)

    resolve_digest = await Get(
        Digest,
        CreateDigest([
            FileEntry(entry.file_name, entry.file_digest)
            for entry in lockfile.entries
        ]),
    )

    resolve_digest = await Get(
        Digest, AddPrefix(resolve_digest, f"jvm/resolves/{resolve.name}/lib"))

    workspace.write_digest(resolve_digest, path_prefix=".pants.d/bsp")

    classpath = [
        build_root.pathlib_path.joinpath(
            f".pants.d/bsp/jvm/resolves/{resolve.name}/lib/{entry.file_name}").
        as_uri() for entry in lockfile.entries
    ]

    return HandleScalacOptionsResult(
        ScalacOptionsItem(
            target=request.bsp_target_id,
            options=(),
            classpath=tuple(classpath),
            class_directory=build_root.pathlib_path.joinpath(
                f".pants.d/bsp/jvm/resolves/{resolve.name}/classes").as_uri(),
        ))
    def assert_prefix_mapping(
        *,
        original: str,
        src: str,
        dest: str,
        expected: str,
    ) -> None:
        rule_runner.create_file(original)
        rule_runner.add_to_build_file(
            "",
            dedent(f"""\
                files(name="original", sources=[{repr(original)}])

                relocated_files(
                    name="relocated",
                    files_targets=[":original"],
                    src={repr(src)},
                    dest={repr(dest)},
                )
                """),
            overwrite=True,
        )
        tgt = rule_runner.get_target(Address("", target_name="relocated"))
        result = rule_runner.request(
            GeneratedSources,
            [RelocateFilesViaCodegenRequest(EMPTY_SNAPSHOT, tgt)])
        assert result.snapshot.files == (expected, )

        # We also ensure that when looking at the transitive dependencies of the `relocated_files`
        # target and then getting all the code of that closure, we only end up with the relocated
        # files. If we naively marked the original files targets as a typical `Dependencies` field,
        # we would hit this issue.
        transitive_targets = rule_runner.request(TransitiveTargets,
                                                 [Addresses([tgt.address])])
        all_sources = rule_runner.request(
            SourceFiles,
            [
                SourceFilesRequest(
                    (tgt.get(Sources) for tgt in transitive_targets.closure),
                    enable_codegen=True,
                    for_sources_types=(FilesSources, ),
                )
            ],
        )
        assert all_sources.snapshot.files == (expected, )
    def get_expanded_specs(self) -> ExpandedSpecs:
        """Return a dict containing the canonicalized addresses of the specs for this run, and what
        files they expand to."""

        (unexpanded_addresses, ) = self._scheduler.product_request(
            Addresses, [Params(self._specs, self._options_bootstrapper)])

        expanded_targets = self._scheduler.product_request(
            Targets,
            [Params(Addresses([addr])) for addr in unexpanded_addresses])
        targets_dict: Dict[str, List[TargetInfo]] = {}
        for addr, targets in zip(unexpanded_addresses, expanded_targets):
            targets_dict[addr.spec] = [
                TargetInfo(filename=(tgt.address.filename if tgt.address.
                                     is_file_target else str(tgt.address)))
                for tgt in targets
            ]
        return ExpandedSpecs(targets=targets_dict)
示例#28
0
async def resolve_dependencies_lite(
    request: DependenciesRequestLite,
    union_membership: UnionMembership,
    registered_target_types: RegisteredTargetTypes,
    global_options: GlobalOptions,
) -> Addresses:
    provided = parse_dependencies_field(
        request.field,
        subproject_roots=global_options.options.subproject_roots,
        registered_target_types=registered_target_types.types,
        union_membership=union_membership,
    )
    literal_addresses = await MultiGet(Get(Address, AddressInput, ai) for ai in provided.addresses)
    ignored_addresses = set(
        await MultiGet(Get(Address, AddressInput, ai) for ai in provided.ignored_addresses)
    )

    # Inject any dependencies.
    inject_request_types = union_membership.get(InjectDependenciesRequest)
    injected = await MultiGet(
        Get(InjectedDependencies, InjectDependenciesRequest, inject_request_type(request.field))
        for inject_request_type in inject_request_types
        if isinstance(request.field, inject_request_type.inject_for)
    )

    # Inject dependencies on all the BUILD target's generated file targets.
    subtargets = await Get(
        Subtargets, Address, request.field.address.maybe_convert_to_build_target()
    )
    subtarget_addresses = tuple(
        t.address for t in subtargets.subtargets if t.address != request.field.address
    )

    result = {
        addr
        for addr in (
            *subtarget_addresses,
            *literal_addresses,
            *itertools.chain.from_iterable(injected),
        )
        if addr not in ignored_addresses
    }
    return Addresses(sorted(result))
示例#29
0
async def inject_docker_dependencies(
    request: InjectDockerDependencies, all_packageable_targets: AllPackageableTargets
) -> InjectedDependencies:
    """Inspects the Dockerfile for references to known packagable targets."""
    dockerfile_info = await Get(
        DockerfileInfo, DockerfileInfoRequest(request.dependencies_field.address)
    )

    putative_image_addresses = set(
        await Get(
            Addresses,
            UnparsedAddressInputs(
                (v for v in dockerfile_info.from_image_build_args.to_dict().values() if v),
                owning_address=dockerfile_info.address,
                description_of_origin="TODO(#14468)",
            ),
        )
    )
    maybe_output_paths = set(dockerfile_info.copy_source_paths)

    # NB: There's no easy way of knowing the output path's default file ending as there could
    # be none or it could be dynamic. Instead of forcing clients to tell us, we just use all the
    # possible ones from the Dockerfile. In rare cases we over-infer, but it is relatively harmless.
    # NB: The suffix gets an `or None` `pathlib` includes the ".", but `OutputPathField` doesnt
    # expect it (if you give it "", it'll leave a trailing ".").
    possible_file_endings = {PurePath(path).suffix[1:] or None for path in maybe_output_paths}
    inject_addresses = []
    for target in all_packageable_targets:
        if target.address in putative_image_addresses:
            inject_addresses.append(target.address)
            continue

        output_path_field = target.get(OutputPathField)
        possible_output_paths = {
            output_path_field.value_or_default(file_ending=file_ending)
            for file_ending in possible_file_endings
        }
        for output_path in possible_output_paths:
            if output_path in maybe_output_paths:
                inject_addresses.append(target.address)
                break

    return InjectedDependencies(Addresses(inject_addresses))
示例#30
0
async def find_owners(owners_request: OwnersRequest) -> Owners:
    sources_set = FrozenOrderedSet(owners_request.sources)
    dirs_set = FrozenOrderedSet(os.path.dirname(source) for source in sources_set)

    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    candidate_specs = tuple(AscendantAddresses(directory=d) for d in dirs_set)
    candidate_targets = await Get[Targets](AddressSpecs(candidate_specs))
    build_file_addresses = await MultiGet(
        Get[BuildFileAddress](Address, tgt.address) for tgt in candidate_targets
    )

    owners = Addresses(
        tgt.address
        for tgt, bfa in zip(candidate_targets, build_file_addresses)
        if bfa.rel_path in sources_set
        # NB: Deleted files can only be matched against the 'filespec' (i.e. `PathGlobs`) for a
        # target, which is why we use `any_matches_filespec`.
        or any_matches_filespec(sources_set, tgt.get(Sources).filespec)
    )
    return Owners(owners)