Exemplo n.º 1
0
async def find_owners(
    build_configuration: BuildConfiguration,
    address_mapper: AddressMapper,
    changed_request: ChangedRequest,
) -> ChangedAddresses:
    owners = await Get[Owners](OwnersRequest(sources=changed_request.sources))

    # If the ChangedRequest does not require dependees, then we're done.
    if changed_request.include_dependees == IncludeDependeesOption.NONE:
        return ChangedAddresses(owners.addresses)

    # Otherwise: find dependees.
    all_addresses = await Get[Addresses](AddressSpecs(
        (DescendantAddresses(""), )))
    all_structs = [
        s.value for s in await MultiGet(Get[HydratedStruct](Address, a)
                                        for a in all_addresses)
    ]

    bfa = build_configuration.registered_aliases()
    graph = _DependentGraph.from_iterable(
        target_types_from_build_file_aliases(bfa), address_mapper, all_structs)
    if changed_request.include_dependees == IncludeDependeesOption.DIRECT:
        return ChangedAddresses(
            Addresses(graph.dependents_of_addresses(owners.addresses)))
    return ChangedAddresses(
        Addresses(graph.transitive_dependents_of_addresses(owners.addresses)))
Exemplo n.º 2
0
async def inject_docker_dependencies(request: InjectDockerDependencies) -> InjectedDependencies:
    """Inspects COPY instructions in the Dockerfile for references to known packagable targets."""
    dockerfile_info = await Get(
        DockerfileInfo, DockerfileInfoRequest(request.dependencies_field.address)
    )

    # Parse all putative target addresses.
    putative_addresses = await Get(
        Addresses,
        UnparsedAddressInputs(
            dockerfile_info.putative_target_addresses,
            owning_address=dockerfile_info.address,
        ),
    )

    # Get the target for those addresses that are known.
    directories = {address.spec_path for address in putative_addresses}
    all_addresses = await Get(Addresses, AddressSpecs(map(MaybeEmptySiblingAddresses, directories)))
    targets = await Get(
        Targets, Addresses((address for address in putative_addresses if address in all_addresses))
    )

    # Only keep those targets that we can "package".
    package = await Get(FieldSetsPerTarget, FieldSetsPerTargetRequest(PackageFieldSet, targets))
    referenced_targets = (
        field_sets[0].address for field_sets in package.collection if len(field_sets) > 0
    )
    return InjectedDependencies(Addresses(referenced_targets))
Exemplo n.º 3
0
def test_compile_multiple_source_files(rule_runner: RuleRunner) -> None:
    rule_runner.write_files(
        {
            "BUILD": dedent(
                """\
                coursier_lockfile(
                    name = 'lockfile',
                    requirements = [],
                    sources = [
                        "coursier_resolve.lockfile",
                    ],
                )

                java_sources(
                    name = 'lib',
                    dependencies = [
                        ':lockfile',
                    ]
                )
                """
            ),
            "coursier_resolve.lockfile": CoursierResolvedLockfile(entries=())
            .to_json()
            .decode("utf-8"),
            "ExampleLib.java": JAVA_LIB_SOURCE,
            "OtherLib.java": dedent(
                """\
                package org.pantsbuild.example.lib;

                public class OtherLib {
                    public static String hello() {
                        return "Hello!";
                    }
                }
                """
            ),
        }
    )

    expanded_targets = rule_runner.request(
        Targets, [Addresses([Address(spec_path="", target_name="lib")])]
    )
    assert sorted(t.address.spec for t in expanded_targets) == [
        "//ExampleLib.java:lib",
        "//OtherLib.java:lib",
    ]

    coarsened_targets = rule_runner.request(
        CoarsenedTargets, [Addresses([t.address for t in expanded_targets])]
    )
    assert len(coarsened_targets) == 1
    coarsened_target = coarsened_targets[0]
    assert len(coarsened_target.members) == 2
    request = CompileJavaSourceRequest(component=coarsened_target)

    compiled_classfiles = rule_runner.request(CompiledClassfiles, [request])
    classfile_digest_contents = rule_runner.request(DigestContents, [compiled_classfiles.digest])
    assert frozenset(content.path for content in classfile_digest_contents) == frozenset(
        ["org/pantsbuild/example/lib/ExampleLib.class", "org/pantsbuild/example/lib/OtherLib.class"]
    )
Exemplo n.º 4
0
async def pylint_lint(request: PylintRequest, pylint: Pylint,
                      python_setup: PythonSetup) -> LintResults:
    if pylint.skip:
        return LintResults([], linter_name="Pylint")

    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs,
                                        pylint.source_plugins)
    plugin_targets_request = Get(TransitiveTargets,
                                 Addresses(plugin_target_addresses))
    linted_targets_request = Get(
        Targets,
        Addresses(field_set.address for field_set in request.field_sets))
    plugin_targets, linted_targets = await MultiGet(plugin_targets_request,
                                                    linted_targets_request)

    plugin_targets_compatibility_fields = tuple(
        plugin_tgt[PythonInterpreterCompatibility]
        for plugin_tgt in plugin_targets.closure
        if plugin_tgt.has_field(PythonInterpreterCompatibility))

    # Pylint needs direct dependencies in the chroot to ensure that imports are valid. However, it
    # doesn't lint those direct dependencies nor does it care about transitive dependencies.
    per_target_dependencies = await MultiGet(
        Get(Targets, DependenciesRequest(field_set.dependencies))
        for field_set in request.field_sets)

    # We batch targets by their interpreter constraints to ensure, for example, that all Python 2
    # targets run together and all Python 3 targets run together.
    # Note that Pylint uses the AST of the interpreter that runs it. So, we include any plugin
    # targets in this interpreter constraints calculation.
    interpreter_constraints_to_target_setup = defaultdict(set)
    for field_set, tgt, dependencies in zip(request.field_sets, linted_targets,
                                            per_target_dependencies):
        target_setup = PylintTargetSetup(field_set,
                                         Targets([tgt, *dependencies]))
        interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields(
            (
                *(tgt[PythonInterpreterCompatibility]
                  for tgt in [tgt, *dependencies]
                  if tgt.has_field(PythonInterpreterCompatibility)),
                *plugin_targets_compatibility_fields,
            ),
            python_setup,
        )
        interpreter_constraints_to_target_setup[interpreter_constraints].add(
            target_setup)

    partitions = (PylintPartition(
        tuple(
            sorted(target_setups,
                   key=lambda tgt_setup: tgt_setup.field_set.address)),
        interpreter_constraints,
        Targets(plugin_targets.closure),
    ) for interpreter_constraints, target_setups in sorted(
        interpreter_constraints_to_target_setup.items()))
    partitioned_results = await MultiGet(
        Get(LintResult, PylintPartition, partition)
        for partition in partitions)
    return LintResults(partitioned_results, linter_name="Pylint")
Exemplo n.º 5
0
def expect_single_expanded_coarsened_target(
        rule_runner: RuleRunner, address: Address) -> CoarsenedTarget:
    expanded_target = rule_runner.request(
        Targets, [Addresses([address])]).expect_single()
    coarsened_targets = rule_runner.request(
        CoarsenedTargets, [Addresses([expanded_target.address])])
    assert len(coarsened_targets) == 1
    return coarsened_targets[0]
Exemplo n.º 6
0
def test_compile_multiple_source_files(rule_runner: RuleRunner) -> None:
    rule_runner.write_files({
        "BUILD":
        dedent("""\
                java_sources(
                    name = 'lib',

                )
                """),
        "3rdparty/jvm/default.lock":
        TestCoursierWrapper.new(entries=()).serialize(),
        "ExampleLib.java":
        JAVA_LIB_SOURCE,
        "OtherLib.java":
        dedent("""\
                package org.pantsbuild.example.lib;

                public class OtherLib {
                    public static String hello() {
                        return "Hello!";
                    }
                }
                """),
    })

    expanded_targets = rule_runner.request(
        Targets, [Addresses([Address(spec_path="", target_name="lib")])])
    assert sorted(t.address.spec for t in expanded_targets) == [
        "//ExampleLib.java:lib",
        "//OtherLib.java:lib",
    ]

    coarsened_targets = rule_runner.request(
        CoarsenedTargets, [Addresses([t.address for t in expanded_targets])])
    assert len(coarsened_targets) == 2
    assert all(len(ctgt.members) == 1 for ctgt in coarsened_targets)

    coarsened_targets_sorted = sorted(
        coarsened_targets, key=lambda ctgt: str(list(ctgt.members)[0].address))

    request0 = CompileJavaSourceRequest(component=coarsened_targets_sorted[0],
                                        resolve=make_resolve(rule_runner))
    classpath0 = rule_runner.request(RenderedClasspath, [request0])
    assert classpath0.content == {
        ".ExampleLib.java.lib.javac.jar": {
            "org/pantsbuild/example/lib/ExampleLib.class",
        }
    }

    request1 = CompileJavaSourceRequest(component=coarsened_targets_sorted[1],
                                        resolve=make_resolve(rule_runner))
    classpath1 = rule_runner.request(RenderedClasspath, [request1])
    assert classpath1.content == {
        ".OtherLib.java.lib.javac.jar": {
            "org/pantsbuild/example/lib/OtherLib.class",
        }
    }
Exemplo n.º 7
0
async def addresses_from_address_specs(
        address_specs: AddressSpecs, global_options: GlobalOptions,
        specs_filter: AddressSpecsFilter) -> Addresses:
    matched_addresses: OrderedSet[Address] = OrderedSet()
    filtering_disabled = address_specs.filter_by_global_options is False

    # First convert all `AddressLiteralSpec`s. Some of the resulting addresses may be generated
    # addresses. This will raise an exception if any of the addresses are not valid.
    literal_addresses = await MultiGet(
        Get(
            Address,
            AddressInput(spec.path_component, spec.target_component,
                         spec.generated_component),
        ) for spec in address_specs.literals)
    literal_target_adaptors = await MultiGet(
        Get(TargetAdaptor, Address, addr.maybe_convert_to_target_generator())
        for addr in literal_addresses)
    # We convert to targets for the side effect of validating that any generated targets actually
    # belong to their target generator.
    await Get(
        UnexpandedTargets,
        Addresses(addr for addr in literal_addresses
                  if addr.is_generated_target))
    for literal_spec, addr, target_adaptor in zip(address_specs.literals,
                                                  literal_addresses,
                                                  literal_target_adaptors):
        if filtering_disabled or specs_filter.matches(addr, target_adaptor):
            matched_addresses.add(addr)

    # Then, convert all `AddressGlobSpecs`. Resolve all BUILD files covered by the specs, then
    # group by directory.
    paths = await Get(
        Paths,
        PathGlobs,
        address_specs.to_path_globs(
            build_patterns=global_options.options.build_patterns,
            build_ignore_patterns=global_options.options.build_ignore,
        ),
    )
    dirnames = {os.path.dirname(f) for f in paths.files}
    address_families = await MultiGet(
        Get(AddressFamily, AddressFamilyDir(d)) for d in dirnames)
    address_family_by_directory = {af.namespace: af for af in address_families}

    for glob_spec in address_specs.globs:
        # These may raise ResolveError, depending on the type of spec.
        addr_families_for_spec = glob_spec.matching_address_families(
            address_family_by_directory)
        addr_target_pairs_for_spec = glob_spec.matching_addresses(
            addr_families_for_spec)
        matched_addresses.update(
            addr for (addr, tgt) in addr_target_pairs_for_spec
            # TODO(#11123): handle the edge case if a generated target's `tags` != its generator's.
            if filtering_disabled or specs_filter.matches(addr, tgt))

    return Addresses(sorted(matched_addresses))
Exemplo n.º 8
0
async def pylint_lint(request: PylintRequest, pylint: Pylint,
                      python_setup: PythonSetup) -> LintResults:
    if pylint.skip:
        return LintResults()

    plugin_targets_request = Get[TransitiveTargets](Addresses(
        Address.parse(plugin_addr) for plugin_addr in pylint.source_plugins))
    linted_targets_request = Get[Targets](Addresses(
        field_set.address for field_set in request.field_sets))
    plugin_targets, linted_targets = cast(
        Tuple[TransitiveTargets, Targets],
        await MultiGet([plugin_targets_request, linted_targets_request]),
    )

    plugin_targets_compatibility_fields = tuple(
        plugin_tgt[PythonInterpreterCompatibility]
        for plugin_tgt in plugin_targets.closure
        if plugin_tgt.has_field(PythonInterpreterCompatibility))

    # Pylint needs direct dependencies in the chroot to ensure that imports are valid. However, it
    # doesn't lint those direct dependencies nor does it care about transitive dependencies.
    per_target_dependencies = await MultiGet(
        Get[Targets](DependenciesRequest(field_set.dependencies))
        for field_set in request.field_sets)

    # We batch targets by their interpreter constraints to ensure, for example, that all Python 2
    # targets run together and all Python 3 targets run together.
    interpreter_constraints_to_target_setup = defaultdict(set)
    for field_set, tgt, dependencies in zip(request.field_sets, linted_targets,
                                            per_target_dependencies):
        target_setup = PylintTargetSetup(field_set,
                                         Targets([tgt, *dependencies]))
        interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields(
            (
                *(tgt.get(PythonInterpreterCompatibility)
                  for tgt in [tgt, *dependencies]),
                *plugin_targets_compatibility_fields,
            ),
            python_setup,
        ) or PexInterpreterConstraints(pylint.default_interpreter_constraints)
        interpreter_constraints_to_target_setup[interpreter_constraints].add(
            target_setup)

    partitions = (PylintPartition(
        tuple(
            sorted(target_setups,
                   key=lambda target_setup: target_setup.field_set.address)),
        interpreter_constraints,
        Targets(plugin_targets.closure),
    ) for interpreter_constraints, target_setups in sorted(
        interpreter_constraints_to_target_setup.items()))
    partitioned_results = await MultiGet(
        Get[LintResult](PylintPartition, partition)
        for partition in partitions)
    return LintResults(partitioned_results)
Exemplo n.º 9
0
    def test_normal_resolution(self) -> None:
        self.add_to_build_file("src/smalltalk", "smalltalk()")
        addr = Address.parse("src/smalltalk")
        deps = Addresses([Address.parse("//:dep1"), Address.parse("//:dep2")])
        deps_field = Dependencies(deps, address=addr)
        assert self.request_single_product(Addresses, DependenciesRequest(deps_field)) == deps

        # Also test that we handle no dependencies.
        empty_deps_field = Dependencies(None, address=addr)
        assert self.request_single_product(
            Addresses, DependenciesRequest(empty_deps_field)
        ) == Addresses([])
Exemplo n.º 10
0
async def get_exporting_owner(owned_dependency: OwnedDependency) -> ExportedTarget:
    """Find the exported target that owns the given target (and therefore exports it).

    The owner of T (i.e., the exported target in whose artifact T's code is published) is:

     1. An exported target that depends on T (or is T itself).
     2. Is T's closest filesystem ancestor among those satisfying 1.

    If there are multiple such exported targets at the same degree of ancestry, the ownership
    is ambiguous and an error is raised. If there is no exported target that depends on T
    and is its ancestor, then there is no owner and an error is raised.
    """
    target = owned_dependency.target
    ancestor_addrs = AscendantAddresses(target.address.spec_path)
    ancestor_tgts = await Get(Targets, AddressSpecs([ancestor_addrs]))
    # Note that addresses sort by (spec_path, target_name), and all these targets are
    # ancestors of the given target, i.e., their spec_paths are all prefixes. So sorting by
    # address will effectively sort by closeness of ancestry to the given target.
    exported_ancestor_tgts = sorted(
        [t for t in ancestor_tgts if t.has_field(PythonProvidesField)],
        key=lambda t: t.address,
        reverse=True,
    )
    exported_ancestor_iter = iter(exported_ancestor_tgts)
    for exported_ancestor in exported_ancestor_iter:
        transitive_targets = await Get(TransitiveTargets, Addresses([exported_ancestor.address]))
        if target in transitive_targets.closure:
            owner = exported_ancestor
            # Find any exported siblings of owner that also depend on target. They have the
            # same spec_path as it, so they must immediately follow it in ancestor_iter.
            sibling_owners = []
            sibling = next(exported_ancestor_iter, None)
            while sibling and sibling.address.spec_path == owner.address.spec_path:
                transitive_targets = await Get(TransitiveTargets, Addresses([sibling.address]))
                if target in transitive_targets.closure:
                    sibling_owners.append(sibling)
                sibling = next(exported_ancestor_iter, None)
            if sibling_owners:
                all_owners = [exported_ancestor] + sibling_owners
                raise AmbiguousOwnerError(
                    f"Found multiple sibling python_distribution targets that are the closest "
                    f"ancestor dependees of {target.address} and are therefore candidates to "
                    f"own it: {', '.join(o.address.spec for o in all_owners)}. Only a "
                    f"single such owner is allowed, to avoid ambiguity."
                )
            return ExportedTarget(owner)
    raise NoOwnerError(
        f"No python_distribution target found to own {target.address}. Note that "
        f"the owner must be in or above the owned target's directory, and must "
        f"depend on it (directly or indirectly)."
    )
Exemplo n.º 11
0
async def create_python_binary_run_request(
        field_set: PythonBinaryFieldSet,
        python_binary_defaults: PythonBinaryDefaults) -> RunRequest:
    entry_point = field_set.entry_point.value
    if entry_point is None:
        # TODO: This is overkill? We don't need to hydrate the sources and strip snapshots,
        #  we only need the path relative to the source root.
        binary_sources = await Get(HydratedSources,
                                   HydrateSourcesRequest(field_set.sources))
        stripped_binary_sources = await Get(
            StrippedSourceFiles, SourceFiles(binary_sources.snapshot, ()))
        entry_point = PythonBinarySources.translate_source_file_to_entry_point(
            stripped_binary_sources.snapshot.files)
    if entry_point is None:
        raise InvalidFieldException(
            "You must either specify `sources` or `entry_point` for the target "
            f"{repr(field_set.address)} in order to run it, but both fields were undefined."
        )

    transitive_targets = await Get(TransitiveTargets,
                                   Addresses([field_set.address]))

    output_filename = f"{field_set.address.target_name}.pex"
    pex_request = Get(
        Pex,
        PexFromTargetsRequest(
            addresses=Addresses([field_set.address]),
            platforms=PexPlatforms.create_from_platforms_field(
                field_set.platforms),
            output_filename=output_filename,
            additional_args=field_set.generate_additional_args(
                python_binary_defaults),
            include_source_files=False,
        ),
    )
    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure,
                                 include_files=True),
    )
    pex, sources = await MultiGet(pex_request, sources_request)

    merged_digest = await Get(
        Digest,
        MergeDigests([pex.digest, sources.source_files.snapshot.digest]))
    return RunRequest(
        digest=merged_digest,
        binary_name=pex.output_filename,
        prefix_args=("-m", entry_point),
        env={"PEX_EXTRA_SYS_PATH": ":".join(sources.source_roots)},
    )
Exemplo n.º 12
0
async def get_exporting_owner(
        owned_dependency: OwnedDependency) -> ExportedTarget:
    """Find the exported target that owns the given target (and therefore exports it).

    The owner of T (i.e., the exported target in whose artifact T's code is published) is:

     1. An exported target that depends on T (or is T itself).
     2. Is T's closest filesystem ancestor among those satisfying 1.

    If there are multiple such exported targets at the same degree of ancestry, the ownership
    is ambiguous and an error is raised. If there is no exported target that depends on T
    and is its ancestor, then there is no owner and an error is raised.
    """
    target = owned_dependency.target
    ancestor_addrs = AscendantAddresses(target.address.spec_path)
    ancestor_tgts = await Get[Targets](AddressSpecs((ancestor_addrs, )))
    # Note that addresses sort by (spec_path, target_name), and all these targets are
    # ancestors of the given target, i.e., their spec_paths are all prefixes. So sorting by
    # address will effectively sort by closeness of ancestry to the given target.
    exported_ancestor_tgts = sorted(
        [t for t in ancestor_tgts if _is_exported(t)],
        key=lambda t: t.address,
        reverse=True,
    )
    exported_ancestor_iter = iter(exported_ancestor_tgts)
    for exported_ancestor in exported_ancestor_iter:
        transitive_targets = await Get[TransitiveTargets](Addresses(
            [exported_ancestor.address]))
        if target in transitive_targets.closure:
            owner = exported_ancestor
            # Find any exported siblings of owner that also depend on target. They have the
            # same spec_path as it, so they must immediately follow it in ancestor_iter.
            sibling_owners = []
            sibling = next(exported_ancestor_iter, None)
            while sibling and sibling.address.spec_path == owner.address.spec_path:
                transitive_targets = await Get[TransitiveTargets](Addresses(
                    [sibling.address]))
                if target in transitive_targets.closure:
                    sibling_owners.append(sibling)
                sibling = next(exported_ancestor_iter, None)
            if sibling_owners:
                raise AmbiguousOwnerError(
                    f"Exporting owners for {target.address.reference()} are "
                    f"ambiguous. Found {exported_ancestor.address.reference()} and "
                    f"{len(sibling_owners)} others: "
                    f'{", ".join(so.address.reference() for so in sibling_owners)}'
                )
            return ExportedTarget(owner)
    raise NoOwnerError(
        f"No exported target owner found for {target.address.reference()}")
Exemplo n.º 13
0
async def get_requirements(
        dep_owner: DependencyOwner,
        union_membership: UnionMembership) -> ExportedTargetRequirements:
    transitive_targets = await Get[TransitiveTargets](Addresses(
        [dep_owner.exported_target.target.address]))

    ownable_tgts = [
        tgt for tgt in transitive_targets.closure
        if is_ownable_target(tgt, union_membership)
    ]
    owners = await MultiGet(Get[ExportedTarget](OwnedDependency(tgt))
                            for tgt in ownable_tgts)
    owned_by_us: Set[Target] = set()
    owned_by_others: Set[Target] = set()
    for tgt, owner in zip(ownable_tgts, owners):
        (owned_by_us
         if owner == dep_owner.exported_target else owned_by_others).add(tgt)

    # Get all 3rdparty deps of our owned deps.
    #
    # Note that we need only consider requirements that are direct dependencies of our owned deps:
    # If T depends on R indirectly, then it must be via some direct deps U1, U2, ... For each such U,
    # if U is in the owned deps then we'll pick up R through U. And if U is not in the owned deps
    # then it's owned by an exported target ET, and so R will be in the requirements for ET, and we
    # will require ET.
    #
    # TODO: Note that this logic doesn't account for indirection via dep aggregator targets, of type
    #  `target`. But we don't have those in v2 (yet) anyway. Plus, as we move towards buildgen and/or
    #  stricter build graph hygiene, it makes sense to require that targets directly declare their
    #  true dependencies. Plus, in the specific realm of setup-py, since we must exclude indirect
    #  deps across exported target boundaries, it's not a big stretch to just insist that
    #  requirements must be direct deps.
    direct_deps_addrs = sorted(
        set(
            itertools.chain.from_iterable(
                tgt.get(Dependencies).value or () for tgt in owned_by_us)))
    direct_deps_tgts = await Get[Targets](Addresses(direct_deps_addrs))
    reqs = PexRequirements.create_from_requirement_fields(
        tgt[PythonRequirementsField] for tgt in direct_deps_tgts
        if tgt.has_field(PythonRequirementsField))
    req_strs = list(reqs)

    # Add the requirements on any exported targets on which we depend.
    exported_targets_we_depend_on = await MultiGet(
        Get[ExportedTarget](OwnedDependency(tgt)) for tgt in owned_by_others)
    req_strs.extend(et.provides.requirement
                    for et in set(exported_targets_we_depend_on))

    return ExportedTargetRequirements(req_strs)
Exemplo n.º 14
0
def test_junit_test_dep(rule_runner: RuleRunner) -> None:
    rule_runner.write_files({
        "BUILD":
        dedent("""\
                java_sources(
                    name = 'lib',

                )
                junit_tests(
                    name = 'tests',

                )
                """),
        "FooTest.java":
        dedent("""\
                package org.pantsbuild.example;

                import org.pantsbuild.example.C;

                public class FooTest {
                    public static void main(String[] args) throws Exception {
                        C c = new C();
                    }
                }
                """),
        "Foo.java":
        dedent("""\
                package org.pantsbuild.example;

                public class Foo {}

                class C {}
                """),
    })

    lib = rule_runner.get_target(
        Address("", target_name="lib", relative_file_path="Foo.java"))
    tests = rule_runner.get_target(
        Address("", target_name="tests", relative_file_path="FooTest.java"))

    # A.java has an inferred dependency on B.java
    assert rule_runner.request(
        Addresses,
        [DependenciesRequest(tests[Dependencies])]) == Addresses([lib.address])

    # B.java does NOT have a dependency on A.java, as it would if we just had subtargets without
    # inferred dependencies.
    assert rule_runner.request(
        Addresses, [DependenciesRequest(lib[Dependencies])]) == Addresses()
Exemplo n.º 15
0
def run_goal(
    targets: List[MockTarget],
    *,
    show_documented: bool = False,
    show_provides: bool = False,
    provides_columns: Optional[str] = None,
) -> Tuple[str, str]:
    console = MockConsole(use_colors=False)
    run_rule_with_mocks(
        list_targets,
        rule_args=[
            Addresses(tgt.address for tgt in targets),
            create_goal_subsystem(
                ListSubsystem,
                sep="\\n",
                output_file=None,
                documented=show_documented,
                provides=show_provides,
                provides_columns=provides_columns or "address,artifact_id",
            ),
            console,
        ],
        mock_gets=[
            MockGet(
                product_type=UnexpandedTargets,
                subject_type=Addresses,
                mock=lambda _: UnexpandedTargets(targets),
            )
        ],
    )
    return cast(str,
                console.stdout.getvalue()), cast(str,
                                                 console.stderr.getvalue())
Exemplo n.º 16
0
async def kotlinc_check(
    request: KotlincCheckRequest,
    classpath_entry_request: ClasspathEntryRequestFactory,
) -> CheckResults:
    coarsened_targets = await Get(
        CoarsenedTargets, Addresses(field_set.address for field_set in request.field_sets)
    )

    # NB: Each root can have an independent resolve, because there is no inherent relation
    # between them other than that they were on the commandline together.
    resolves = await MultiGet(
        Get(CoursierResolveKey, CoarsenedTargets([t])) for t in coarsened_targets
    )

    results = await MultiGet(
        Get(
            FallibleClasspathEntry,
            ClasspathEntryRequest,
            classpath_entry_request.for_targets(component=target, resolve=resolve),
        )
        for target, resolve in zip(coarsened_targets, resolves)
    )

    # NB: We don't pass stdout/stderr as it will have already been rendered as streaming.
    exit_code = next((result.exit_code for result in results if result.exit_code != 0), 0)
    return CheckResults([CheckResult(exit_code, "", "")], checker_name=request.name)
Exemplo n.º 17
0
async def find_owners(owners_request: OwnersRequest) -> Owners:
    sources_set = FrozenOrderedSet(owners_request.sources)
    dirs_set = FrozenOrderedSet(os.path.dirname(source) for source in sources_set)

    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    candidate_specs = tuple(AscendantAddresses(directory=d) for d in dirs_set)
    candidate_targets = await Get[HydratedTargets](AddressSpecs(candidate_specs))

    # Match the source globs against the expanded candidate targets.
    def owns_any_source(legacy_target: HydratedTarget) -> bool:
        """Given a `HydratedTarget` instance, check if it owns the given source file."""
        target_kwargs = legacy_target.adaptor.kwargs()

        # Handle `sources`-declaring targets.
        # NB: Deleted files can only be matched against the 'filespec' (ie, `PathGlobs`) for a target,
        # so we don't actually call `fileset.matches` here.
        # TODO: This matching logic should be implemented using the rust `fs` crate for two reasons:
        #  1) having two implementations isn't great
        #  2) we're expanding sources via HydratedTarget, but it isn't necessary to do that to match
        target_sources = target_kwargs.get("sources", None)
        return target_sources and any_matches_filespec(
            paths=sources_set, spec=target_sources.filespec
        )

    build_file_addresses = await MultiGet(
        Get[BuildFileAddress](Address, ht.adaptor.address) for ht in candidate_targets
    )
    owners = Addresses(
        ht.adaptor.address
        for ht, bfa in zip(candidate_targets, build_file_addresses)
        if LegacyAddressMapper.any_is_declaring_file(bfa, sources_set) or owns_any_source(ht)
    )
    return Owners(owners)
Exemplo n.º 18
0
 def assert_injected(deps_cls: Type[Dependencies], *, injected: List[str]) -> None:
     provided_addr = Address.parse("//:provided")
     deps_field = deps_cls([provided_addr], address=Address.parse("//:target"))
     result = self.request_single_product(Addresses, DependenciesRequest(deps_field))
     assert result == Addresses(
         sorted([provided_addr, *(Address.parse(addr) for addr in injected)])
     )
Exemplo n.º 19
0
async def get_requirements_pex(request: RequirementsPexRequest, setup: PythonSetup) -> PexRequest:
    if setup.run_against_entire_lockfile and request.internal_only:
        requirements = await Get(
            PexRequirements, _PexRequirementsRequest(Addresses(request.addresses))
        )
        opt_pex_request = await Get(
            OptionalPexRequest,
            _RepositoryPexRequest(
                addresses=sorted(request.addresses),
                requirements=requirements,
                internal_only=request.internal_only,
                hardcoded_interpreter_constraints=request.hardcoded_interpreter_constraints,
            ),
        )
        if opt_pex_request.maybe_pex_request is None:
            raise ValueError(
                "[python].run_against_entire_lockfile was set, but could not find a "
                "lockfile or constraints file for this target set. See "
                f"{doc_url('python-third-party-dependencies')} for details."
            )
        return opt_pex_request.maybe_pex_request

    pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest(
            addresses=sorted(request.addresses),
            output_filename="requirements.pex",
            internal_only=request.internal_only,
            include_source_files=False,
            hardcoded_interpreter_constraints=request.hardcoded_interpreter_constraints,
        ),
    )
    return pex_request
Exemplo n.º 20
0
async def addresses_from_raw_specs_with_only_file_owners(
        specs: RawSpecsWithOnlyFileOwners,
        owners_not_found_behavior: OwnersNotFoundBehavior) -> Addresses:
    """Find the owner(s) for each spec."""
    paths_per_include = await MultiGet(
        Get(Paths, PathGlobs, specs.path_globs_for_spec(spec))
        for spec in specs.all_specs())
    owners_per_include = await MultiGet(
        Get(
            Owners,
            OwnersRequest(
                paths.files,
                filter_by_global_options=specs.filter_by_global_options),
        ) for paths in paths_per_include)
    addresses: set[Address] = set()
    for spec, owners in zip(specs.all_specs(), owners_per_include):
        if (not specs.from_change_detection
                and owners_not_found_behavior != OwnersNotFoundBehavior.ignore
                and isinstance(spec, FileLiteralSpec) and not owners):
            _log_or_raise_unmatched_owners(
                [PurePath(str(spec))],
                owners_not_found_behavior,
                ignore_option="--owners-not-found-behavior=ignore",
            )
        addresses.update(owners)
    return Addresses(sorted(addresses))
Exemplo n.º 21
0
    def test_dependency_inference(self) -> None:
        self.add_to_build_file(
            "",
            dedent("""\
                smalltalk(name='inferred1')
                smalltalk(name='inferred2')
                smalltalk(name='inferred3')
                smalltalk(name='provided')
                """),
        )
        self.create_file("demo/f1.st", "//:inferred1\n//:inferred2\n")
        self.create_file("demo/f2.st", "//:inferred3\n")
        self.add_to_build_file(
            "demo",
            "smalltalk(sources=['*.st'], dependencies=['//:provided'])")

        deps_field = Dependencies([Address.parse("//:provided")],
                                  address=Address.parse("demo"))
        result = self.request_single_product(
            Addresses,
            Params(
                DependenciesRequest(deps_field),
                create_options_bootstrapper(args=["--dependency-inference"]),
            ),
        )
        assert result == Addresses(
            sorted(
                Address.parse(addr) for addr in [
                    "//:inferred1", "//:inferred2", "//:inferred3",
                    "//:provided"
                ]))
Exemplo n.º 22
0
async def _jvm_bsp_resources(
    request: BSPResourcesRequest,
    build_root: BuildRoot,
) -> BSPResourcesResult:
    """Generically handles a BSPResourcesRequest (subclass).

    This is a `@rule_helper` rather than a `@rule` for the same reason as `_jvm_bsp_compile`.
    """
    coarsened_targets = await Get(
        CoarsenedTargets, Addresses([fs.address for fs in request.field_sets]))

    source_files = await Get(
        StrippedSourceFiles,
        SourceFilesRequest(
            [tgt.get(SourcesField) for tgt in coarsened_targets.closure()],
            for_sources_types=(ResourceSourceField, ),
            enable_codegen=True,
        ),
    )

    rel_resources_dir = _jvm_resources_directory(
        request.bsp_target.bsp_target_id)
    output_digest = await Get(
        Digest,
        AddPrefix(source_files.snapshot.digest, rel_resources_dir),
    )

    return BSPResourcesResult(
        resources=(
            # NB: IntelliJ requires that directory URIs end in slashes.
            build_root.pathlib_path.joinpath(
                ".pants.d/bsp", rel_resources_dir).as_uri() + "/", ),
        output_digest=output_digest,
    )
Exemplo n.º 23
0
async def create_python_binary(
        config: PythonBinaryConfiguration) -> CreatedBinary:
    entry_point: Optional[str]
    if config.entry_point.value is not None:
        entry_point = config.entry_point.value
    else:
        source_files = await Get[SourceFiles](AllSourceFilesRequest(
            [config.sources], strip_source_roots=True))
        # NB: `PythonBinarySources` enforces that we have 0-1 sources.
        if len(source_files.files) == 1:
            module_name = source_files.files[0]
            entry_point = PythonBinary.translate_source_path_to_py_module_specifier(
                module_name)
        else:
            entry_point = None

    output_filename = f"{config.address.target_name}.pex"
    two_step_pex = await Get[TwoStepPex](TwoStepPexFromTargetsRequest(
        PexFromTargetsRequest(
            addresses=Addresses([config.address]),
            entry_point=entry_point,
            platforms=PexPlatforms.create_from_platforms_field(
                config.platforms),
            output_filename=output_filename,
            additional_args=config.generate_additional_args(),
        )))
    pex = two_step_pex.pex
    return CreatedBinary(digest=pex.directory_digest,
                         binary_name=pex.output_filename)
Exemplo n.º 24
0
async def addresses_from_filesystem_specs(
        filesystem_specs: FilesystemSpecs,
        owners_not_found_behavior: OwnersNotFoundBehavior) -> Addresses:
    """Find the owner(s) for each FilesystemSpec."""
    paths_per_include = await MultiGet(
        Get(
            Paths,
            PathGlobs,
            filesystem_specs.path_globs_for_spec(
                spec,
                owners_not_found_behavior.to_glob_match_error_behavior()),
        ) for spec in filesystem_specs.file_includes)
    owners_per_include = await MultiGet(
        Get(Owners, OwnersRequest(sources=paths.files))
        for paths in paths_per_include)
    addresses: set[Address] = set()
    for spec, owners in zip(filesystem_specs.file_includes,
                            owners_per_include):
        if (owners_not_found_behavior != OwnersNotFoundBehavior.ignore
                and isinstance(spec, FileLiteralSpec) and not owners):
            _log_or_raise_unmatched_owners(
                [PurePath(str(spec))],
                owners_not_found_behavior,
                ignore_option="--owners-not-found-behavior=ignore",
            )
        addresses.update(owners)
    return Addresses(sorted(addresses))
Exemplo n.º 25
0
def test_third_party_dep_inference_with_provides(
        rule_runner: RuleRunner) -> None:
    rule_runner.set_options(
        [
            "--java-infer-third-party-import-mapping={'org.joda.time.**':'joda-time:joda-time', 'org.joda.time.DateTime':'joda-time:joda-time-2'}",
        ],
        env_inherit=PYTHON_BOOTSTRAP_ENV,
    )
    rule_runner.write_files({
        "BUILD":
        dedent("""\
                jvm_artifact(
                    name = "joda-time_joda-time",
                    group = "joda-time",
                    artifact = "joda-time",
                    version = "2.10.10",
                )

                java_sources(
                    name = 'lib',
                    experimental_provides_types = ['org.joda.time.MefripulousDateTime', ],
                )
                """),
        "PrintDate.java":
        dedent("""\
                package org.pantsbuild.example;

                import org.joda.time.DateTime;
                import org.joda.time.MefripulousDateTime;

                public class PrintDate {
                    public static void main(String[] args) {
                        DateTime dt = new DateTime();
                        System.out.println(dt.toString());
                        new MefripulousDateTime().mefripulate();
                    }
                }
                """),
        "MefripulousDateTime.java":
        dedent("""\
                package org.joda.time;

                public class MefripulousDateTime {
                    public void mefripulate() {
                        DateTime dt = new LocalDateTime();
                        System.out.println(dt.toString());
                    }
                }
                """),
    })

    lib1 = rule_runner.get_target(
        Address("", target_name="lib", relative_file_path="PrintDate.java"))
    assert rule_runner.request(
        Addresses, [DependenciesRequest(lib1[Dependencies])]) == Addresses([
            Address("", target_name="joda-time_joda-time"),
            Address("",
                    target_name="lib",
                    relative_file_path="MefripulousDateTime.java"),
        ])
Exemplo n.º 26
0
def test_compile_mixed(rule_runner: RuleRunner) -> None:
    rule_runner.write_files(
        {
            "BUILD": "scala_sources(name='main')",
            "3rdparty/jvm/BUILD": DEFAULT_SCALA_LIBRARY_TARGET,
            "3rdparty/jvm/default.lock": DEFAULT_LOCKFILE,
            "Example.scala": scala_main_source(),
            "lib/BUILD": "java_sources()",
            "lib/C.java": java_lib_source(),
        }
    )
    rendered_classpath = rule_runner.request(
        RenderedClasspath, [Addresses([Address(spec_path="", target_name="main")])]
    )

    assert rendered_classpath.content[".Example.scala.main.scalac.jar"] == {
        "META-INF/MANIFEST.MF",
        "org/pantsbuild/example/Main$.class",
        "org/pantsbuild/example/Main.class",
    }
    assert rendered_classpath.content["lib.C.java.javac.jar"] == {
        "org/pantsbuild/example/lib/C.class",
    }
    assert any(
        key.startswith("org.scala-lang_scala-library_") for key in rendered_classpath.content.keys()
    )
    assert len(rendered_classpath.content.keys()) == 3
Exemplo n.º 27
0
async def addresses_from_filesystem_specs(
        filesystem_specs: FilesystemSpecs,
        global_options: GlobalOptions) -> Addresses:
    """Find the owner(s) for each FilesystemSpec.

    Every returned address will be a generated subtarget, meaning that each address will have
    exactly one file in its `sources` field.
    """
    owners_not_found_behavior = global_options.options.owners_not_found_behavior
    paths_per_include = await MultiGet(
        Get(
            Paths,
            PathGlobs,
            filesystem_specs.path_globs_for_spec(
                spec,
                owners_not_found_behavior.to_glob_match_error_behavior()),
        ) for spec in filesystem_specs.includes)
    owners_per_include = await MultiGet(
        Get(Owners, OwnersRequest(sources=paths.files))
        for paths in paths_per_include)
    addresses: Set[Address] = set()
    for spec, owners in zip(filesystem_specs.includes, owners_per_include):
        if (owners_not_found_behavior != OwnersNotFoundBehavior.ignore
                and isinstance(spec, FilesystemLiteralSpec) and not owners):
            _log_or_raise_unmatched_owners(
                [PurePath(str(spec))],
                global_options.options.owners_not_found_behavior,
                ignore_option="--owners-not-found-behavior=ignore",
            )
        addresses.update(owners)
    return Addresses(sorted(addresses))
Exemplo n.º 28
0
def run_goal(targets: list[MockTarget],
             *,
             show_documented: bool = False) -> tuple[str, str]:
    with mock_console(create_options_bootstrapper()) as (console,
                                                         stdio_reader):
        run_rule_with_mocks(
            list_targets,
            rule_args=[
                Addresses(tgt.address for tgt in targets),
                create_goal_subsystem(
                    ListSubsystem,
                    sep="\\n",
                    output_file=None,
                    documented=show_documented,
                ),
                console,
            ],
            mock_gets=[
                MockGet(
                    output_type=UnexpandedTargets,
                    input_type=Addresses,
                    mock=lambda _: UnexpandedTargets(targets),
                )
            ],
        )
        return stdio_reader.get_stdout(), stdio_reader.get_stderr()
Exemplo n.º 29
0
async def handle_bsp_scalac_options_request(
    request: HandleScalacOptionsRequest,
    build_root: BuildRoot,
    workspace: Workspace,
) -> HandleScalacOptionsResult:
    targets = await Get(Targets, BuildTargetIdentifier, request.bsp_target_id)
    thirdparty_modules = await Get(
        ThirdpartyModules,
        ThirdpartyModulesRequest(Addresses(tgt.address for tgt in targets)))
    resolve = thirdparty_modules.resolve

    resolve_digest = await Get(
        Digest,
        AddPrefix(thirdparty_modules.merged_digest,
                  f"jvm/resolves/{resolve.name}/lib"))

    workspace.write_digest(resolve_digest, path_prefix=".pants.d/bsp")

    classpath = tuple(
        build_root.pathlib_path.joinpath(
            f".pants.d/bsp/jvm/resolves/{resolve.name}/lib/{filename}").as_uri(
            ) for cp_entry in thirdparty_modules.entries.values()
        for filename in cp_entry.filenames)

    return HandleScalacOptionsResult(
        ScalacOptionsItem(
            target=request.bsp_target_id,
            options=(),
            classpath=classpath,
            class_directory=build_root.pathlib_path.joinpath(
                f".pants.d/bsp/{jvm_classes_directory(request.bsp_target_id)}"
            ).as_uri(),
        ))
Exemplo n.º 30
0
    def test_transitive_targets_tolerates_subtarget_cycles(self) -> None:
        """For generated subtargets, we should tolerate cycles between targets.

        This only works with generated subtargets, so we use explicit file dependencies in this
        test.
        """
        self.create_files("", ["dep.txt", "t1.txt", "t2.txt"])
        self.add_to_build_file(
            "",
            dedent(
                """\
                target(name='dep', sources=['dep.txt'])
                target(name='t1', sources=['t1.txt'], dependencies=['dep.txt:dep', 't2.txt:t2'])
                target(name='t2', sources=['t2.txt'], dependencies=['t1.txt:t1'])
                """
            ),
        )
        result = self.request_single_product(
            TransitiveTargets,
            Params(Addresses([Address("", target_name="t2")]), create_options_bootstrapper()),
        )
        assert len(result.roots) == 1
        assert result.roots[0].address == Address("", relative_file_path="t2.txt", target_name="t2")
        assert [tgt.address for tgt in result.dependencies] == [
            Address("", relative_file_path="t1.txt", target_name="t1"),
            Address("", relative_file_path="dep.txt", target_name="dep"),
            Address("", relative_file_path="t2.txt", target_name="t2"),
        ]