Exemple #1
0
def test_parse_dependencies_field() -> None:
    """Ensure that we correctly handle `!` ignores.

    We leave the rest of the parsing to AddressInput and Address.
    """
    result = parse_dependencies_field(
        ["a/b/c", "!a/b/c", "f.txt", "!f.txt"], spec_path="demo/subdir", subproject_roots=[],
    )
    expected_addresses = {AddressInput("a/b/c"), AddressInput("f.txt")}
    assert set(result.addresses) == expected_addresses
    assert set(result.ignored_addresses) == expected_addresses
async def relocate_files(request: RelocateFilesViaCodegenRequest) -> GeneratedSources:
    # Unlike normal codegen, we operate the on the sources of the `files_targets` field, not the
    # `sources` of the original `relocated_sources` target.
    # TODO(#10915): using `await Get(Addresses, UnparsedAddressInputs)` causes a graph failure.
    original_files_targets = await MultiGet(
        Get(
            WrappedTarget,
            AddressInput,
            AddressInput.parse(v, relative_to=request.protocol_target.address.spec_path),
        )
        for v in (
            request.protocol_target.get(RelocatedFilesOriginalTargets)
            .to_unparsed_address_inputs()
            .values
        )
    )
    original_files_sources = await MultiGet(
        Get(HydratedSources, HydrateSourcesRequest(wrapped_tgt.target.get(Sources)))
        for wrapped_tgt in original_files_targets
    )
    snapshot = await Get(
        Snapshot, MergeDigests(sources.snapshot.digest for sources in original_files_sources)
    )

    src_val = request.protocol_target.get(RelocatedFilesSrcField).value
    dest_val = request.protocol_target.get(RelocatedFilesDestField).value
    if src_val:
        snapshot = await Get(Snapshot, RemovePrefix(snapshot.digest, src_val))
    if dest_val:
        snapshot = await Get(Snapshot, AddPrefix(snapshot.digest, dest_val))
    return GeneratedSources(snapshot)
Exemple #3
0
def test_resolve_address() -> None:
    rule_runner = RuleRunner(rules=[QueryRule(Address, (AddressInput, ))])

    def assert_is_expected(address_input: AddressInput,
                           expected: Address) -> None:
        assert rule_runner.request(Address, [address_input]) == expected

    rule_runner.create_file("a/b/c.txt")
    assert_is_expected(
        AddressInput("a/b/c.txt"),
        Address("a/b", target_name=None, relative_file_path="c.txt"))
    assert_is_expected(
        AddressInput("a/b"),
        Address("a/b", target_name=None, relative_file_path=None))

    assert_is_expected(AddressInput("a/b", target_component="c"),
                       Address("a/b", target_name="c"))
    assert_is_expected(
        AddressInput("a/b/c.txt", target_component="c"),
        Address("a/b", relative_file_path="c.txt", target_name="c"),
    )

    # Top-level addresses will not have a path_component, unless they are a file address.
    rule_runner.create_file("f.txt")
    assert_is_expected(
        AddressInput("f.txt", target_component="original"),
        Address("", relative_file_path="f.txt", target_name="original"),
    )
    assert_is_expected(AddressInput("", target_component="t"),
                       Address("", target_name="t"))

    with pytest.raises(ExecutionError) as exc:
        rule_runner.request(Address, [AddressInput("a/b/fake")])
    assert "'a/b/fake' does not exist on disk" in str(exc.value)
Exemple #4
0
    def test_resolve_address(self) -> None:
        def assert_is_expected(address_input: AddressInput,
                               expected: Address) -> None:
            assert self.request_single_product(Address,
                                               address_input) == expected

        self.create_file("a/b/c.txt")
        assert_is_expected(
            AddressInput("a/b/c.txt"),
            Address("a/b", target_name=None, relative_file_path="c.txt"))
        assert_is_expected(
            AddressInput("a/b"),
            Address("a/b", target_name=None, relative_file_path=None))

        assert_is_expected(AddressInput("a/b", target_component="c"),
                           Address("a/b", target_name="c"))
        assert_is_expected(
            AddressInput("a/b/c.txt", target_component="c"),
            Address("a/b", relative_file_path="c.txt", target_name="c"),
        )

        # Top-level addresses will not have a path_component, unless they are a file address.
        self.create_file("f.txt")
        assert_is_expected(
            AddressInput("f.txt", target_component="original"),
            Address("", relative_file_path="f.txt", target_name="original"),
        )
        assert_is_expected(AddressInput("", target_component="t"),
                           Address("", target_name="t"))

        with pytest.raises(ExecutionError) as exc:
            self.request_single_product(Address, AddressInput("a/b/fake"))
        assert "'a/b/fake' does not exist on disk" in str(exc.value)
Exemple #5
0
def calculate_specs(
    options_bootstrapper: OptionsBootstrapper,
    options: Options,
    session: SchedulerSession,
    *,
    build_root: Optional[str] = None,
) -> Specs:
    """Determine the specs for a given Pants run."""
    build_root = build_root or get_buildroot()
    specs = SpecsParser(build_root).parse_specs(options.specs)
    changed_options = ChangedOptions.from_options(options.for_scope("changed"))

    logger.debug("specs are: %s", specs)
    logger.debug("changed_options are: %s", changed_options)

    if specs.provided and changed_options.provided:
        changed_name = "--changed-since" if changed_options.since else "--changed-diffspec"
        if specs.filesystem_specs and specs.address_specs:
            specs_description = "target and file arguments"
        elif specs.filesystem_specs:
            specs_description = "file arguments"
        else:
            specs_description = "target arguments"
        raise InvalidSpecConstraint(
            f"You used `{changed_name}` at the same time as using {specs_description}. Please "
            "use only one.")

    if not changed_options.provided:
        return specs

    git = get_git()
    if not git:
        raise InvalidSpecConstraint(
            "The `--changed-*` options are only available if Git is used for the repository."
        )
    changed_request = ChangedRequest(
        sources=tuple(changed_options.changed_files(git)),
        dependees=changed_options.dependees,
    )
    (changed_addresses, ) = session.product_request(
        ChangedAddresses, [Params(changed_request, options_bootstrapper)])
    logger.debug("changed addresses: %s", changed_addresses)

    address_specs = []
    for address in cast(ChangedAddresses, changed_addresses):
        address_input = AddressInput.parse(address.spec)
        address_specs.append(
            AddressLiteralSpec(
                path_component=address_input.path_component,
                # NB: AddressInput.target_component may be None, but AddressLiteralSpec expects a
                # string.
                target_component=address_input.target_component
                or address.target_name,
            ))
    return Specs(AddressSpecs(address_specs, filter_by_global_options=True),
                 FilesystemSpecs([]))
Exemple #6
0
async def addresses_from_address_specs(
        address_specs: AddressSpecs, global_options: GlobalOptions,
        specs_filter: AddressSpecsFilter) -> Addresses:
    matched_addresses: OrderedSet[Address] = OrderedSet()
    filtering_disabled = address_specs.filter_by_global_options is False

    # First convert all `AddressLiteralSpec`s. Some of the resulting addresses may be generated
    # addresses. This will raise an exception if any of the addresses are not valid.
    literal_addresses = await MultiGet(
        Get(
            Address,
            AddressInput(spec.path_component, spec.target_component,
                         spec.generated_component),
        ) for spec in address_specs.literals)
    literal_target_adaptors = await MultiGet(
        Get(TargetAdaptor, Address, addr.maybe_convert_to_target_generator())
        for addr in literal_addresses)
    # We convert to targets for the side effect of validating that any generated targets actually
    # belong to their target generator.
    await Get(
        UnexpandedTargets,
        Addresses(addr for addr in literal_addresses
                  if addr.is_generated_target))
    for literal_spec, addr, target_adaptor in zip(address_specs.literals,
                                                  literal_addresses,
                                                  literal_target_adaptors):
        if filtering_disabled or specs_filter.matches(addr, target_adaptor):
            matched_addresses.add(addr)

    # Then, convert all `AddressGlobSpecs`. Resolve all BUILD files covered by the specs, then
    # group by directory.
    paths = await Get(
        Paths,
        PathGlobs,
        address_specs.to_path_globs(
            build_patterns=global_options.options.build_patterns,
            build_ignore_patterns=global_options.options.build_ignore,
        ),
    )
    dirnames = {os.path.dirname(f) for f in paths.files}
    address_families = await MultiGet(
        Get(AddressFamily, AddressFamilyDir(d)) for d in dirnames)
    address_family_by_directory = {af.namespace: af for af in address_families}

    for glob_spec in address_specs.globs:
        # These may raise ResolveError, depending on the type of spec.
        addr_families_for_spec = glob_spec.matching_address_families(
            address_family_by_directory)
        addr_target_pairs_for_spec = glob_spec.matching_addresses(
            addr_families_for_spec)
        matched_addresses.update(
            addr for (addr, tgt) in addr_target_pairs_for_spec
            # TODO(#11123): handle the edge case if a generated target's `tags` != its generator's.
            if filtering_disabled or specs_filter.matches(addr, tgt))

    return Addresses(sorted(matched_addresses))
Exemple #7
0
async def resolve_address(address_input: AddressInput) -> Address:
    # Determine the type of the path_component of the input.
    if address_input.path_component:
        snapshot = await Get(Snapshot,
                             PathGlobs(globs=(address_input.path_component, )))
        is_file, is_dir = bool(snapshot.files), bool(snapshot.dirs)
    else:
        # It is an address in the root directory.
        is_file, is_dir = False, True

    if is_file:
        return address_input.file_to_address()
    elif is_dir:
        return address_input.dir_to_address()
    else:
        raise ResolveError(
            f"The file or directory '{address_input.path_component}' does not exist on disk in the "
            f"workspace: cannot resolve '{address_input.target_component}' relative to it."
        )
Exemple #8
0
async def resolve_unparsed_address_inputs(
        request: UnparsedAddressInputs,
        subproject_roots: SubprojectRoots) -> Addresses:
    addresses = await MultiGet(
        Get(
            Address,
            AddressInput,
            AddressInput.parse(v,
                               relative_to=request.relative_to,
                               subproject_roots=subproject_roots),
        ) for v in request.values)
    return Addresses(addresses)
Exemple #9
0
async def infer_smalltalk_dependencies(request: InferSmalltalkDependencies) -> InferredDependencies:
    # To demo an inference rule, we simply treat each `sources` file to contain a list of
    # addresses, one per line.
    hydrated_sources = await Get(HydratedSources, HydrateSourcesRequest(request.sources_field))
    digest_contents = await Get(DigestContents, Digest, hydrated_sources.snapshot.digest)
    all_lines = itertools.chain.from_iterable(
        file_content.content.decode().splitlines() for file_content in digest_contents
    )
    resolved = await MultiGet(
        Get(Address, AddressInput, AddressInput.parse(line)) for line in all_lines
    )
    return InferredDependencies(resolved)
Exemple #10
0
async def resolve_address(address_input: AddressInput) -> Address:
    # Determine the type of the path_component of the input.
    if address_input.path_component:
        paths = await Get(Paths,
                          PathGlobs(globs=(address_input.path_component, )))
        is_file, is_dir = bool(paths.files), bool(paths.dirs)
    else:
        # It is an address in the root directory.
        is_file, is_dir = False, True

    if is_file:
        return address_input.file_to_address()
    elif is_dir:
        return address_input.dir_to_address()
    else:
        spec = address_input.path_component
        if address_input.target_component:
            spec += f":{address_input.target_component}"
        raise ResolveError(
            f"The file or directory '{address_input.path_component}' does not exist on disk in the "
            f"workspace, so the address '{spec}' cannot be resolved.")
Exemple #11
0
async def transitive_targets_lite(request: TransitiveTargetsRequestLite) -> TransitiveTargets:
    roots_as_targets = await Get(Targets, Addresses(request.roots))
    visited: OrderedSet[Target] = OrderedSet()
    queued = FrozenOrderedSet(roots_as_targets)
    dependency_mapping: Dict[Address, Tuple[Address, ...]] = {}
    while queued:
        direct_dependencies_addresses_per_tgt = await MultiGet(
            Get(Addresses, DependenciesRequestLite(tgt.get(Dependencies))) for tgt in queued
        )
        direct_dependencies_per_tgt = []
        for addresses_per_tgt in direct_dependencies_addresses_per_tgt:
            wrapped_tgts = await MultiGet(
                Get(WrappedTarget, Address, addr) for addr in addresses_per_tgt
            )
            direct_dependencies_per_tgt.append(
                tuple(wrapped_t.target for wrapped_t in wrapped_tgts)
            )

        dependency_mapping.update(
            zip(
                (t.address for t in queued),
                (tuple(t.address for t in deps) for deps in direct_dependencies_per_tgt),
            )
        )

        queued = FrozenOrderedSet(
            itertools.chain.from_iterable(direct_dependencies_per_tgt)
        ).difference(visited)
        visited.update(queued)

    # NB: We use `roots_as_targets` to get the root addresses, rather than `request.roots`. This
    # is because expanding from the `Addresses` -> `Targets` may have resulted in generated
    # subtargets being used, so we need to use `roots_as_targets` to have this expansion.
    _detect_cycles(tuple(t.address for t in roots_as_targets), dependency_mapping)

    # Apply any transitive excludes (`!!` ignores).
    wrapped_transitive_excludes = await MultiGet(
        Get(
            WrappedTarget, AddressInput, AddressInput.parse(addr, relative_to=tgt.address.spec_path)
        )
        for tgt in (*roots_as_targets, *visited)
        for addr in tgt.get(Dependencies).unevaluated_transitive_excludes.values
    )
    transitive_excludes = FrozenOrderedSet(
        wrapped_t.target for wrapped_t in wrapped_transitive_excludes
    )

    return TransitiveTargets(
        tuple(roots_as_targets), FrozenOrderedSet(visited.difference(transitive_excludes))
    )
Exemple #12
0
async def resolve_unparsed_address_inputs(
        request: UnparsedAddressInputs,
        global_options: GlobalOptions) -> Addresses:
    addresses = await MultiGet(
        Get(
            Address,
            AddressInput,
            AddressInput.parse(
                v,
                relative_to=request.relative_to,
                subproject_roots=global_options.options.subproject_roots,
            ),
        ) for v in request.values)
    return Addresses(addresses)
Exemple #13
0
async def determine_main_pkg_for_go_binary(
    request: GoBinaryMainPackageRequest, ) -> GoBinaryMainPackage:
    addr = request.field.address
    if request.field.value:
        wrapped_specified_tgt = await Get(
            WrappedTarget,
            AddressInput,
            AddressInput.parse(request.field.value,
                               relative_to=addr.spec_path),
        )
        if not wrapped_specified_tgt.target.has_field(
                GoFirstPartyPackageSourcesField):
            raise InvalidFieldException(
                f"The {repr(GoBinaryMainPackageField.alias)} field in target {addr} must point to "
                "a `go_first_party_package` target, but was the address for a "
                f"`{wrapped_specified_tgt.target.alias}` target.\n\n"
                "Hint: you should normally not specify this field so that Pants will find the "
                "`go_first_party_package` target for you. (Pants generates "
                "`go_first_party_package` targets based on the `go_mod` target)."
            )
        return GoBinaryMainPackage(wrapped_specified_tgt.target.address)

    candidate_targets = await Get(
        Targets, AddressSpecs([SiblingAddresses(addr.spec_path)]))
    relevant_pkg_targets = [
        tgt for tgt in candidate_targets
        if tgt.has_field(GoFirstPartyPackageSourcesField)
        and tgt.residence_dir == addr.spec_path
    ]
    if len(relevant_pkg_targets) == 1:
        return GoBinaryMainPackage(relevant_pkg_targets[0].address)

    wrapped_tgt = await Get(WrappedTarget, Address, addr)
    alias = wrapped_tgt.target.alias
    if not relevant_pkg_targets:
        raise ResolveError(
            f"The `{alias}` target {addr} requires that there is a `go_first_party_package` "
            f"target for its directory {addr.spec_path}, but none were found.\n\n"
            "Have you added a `go_mod` target (which will generate `go_first_party_package` "
            "targets)?")
    raise ResolveError(
        f"There are multiple `go_first_party_package` targets for the same directory of the "
        f"`{alias}` target {addr}: {addr.spec_path}. It is ambiguous what to use as the `main` "
        "package.\n\n"
        f"To fix, please either set the `main` field for `{addr} or remove these "
        "`go_first_party_package` targets so that only one remains: "
        f"{sorted(tgt.address.spec for tgt in relevant_pkg_targets)}")
def maybe_address(val: str, renames: MacroRenames, *,
                  relative_to: str | None) -> Address | None:
    # All macros generate targets with a `name`, so we know they must have `:`. We know they
    # also can't have `#` because they're not generated targets syntax.
    if ":" not in val or "#" in val:
        return None

    try:
        # We assume that all addresses are normal addresses, rather than file addresses, as
        # we know that none of the generated targets will be file addresses. That is, we can
        # ignore file addresses.
        addr = AddressInput.parse(val,
                                  relative_to=relative_to).dir_to_address()
    except (AddressParseException, InvalidAddress):
        return None

    return addr if addr in renames.generated else None
Exemple #15
0
async def _determine_literal_addresses_from_raw_specs(
        literal_specs: tuple[AddressLiteralSpec, ...], *,
        description_of_origin: str) -> tuple[WrappedTarget, ...]:
    literal_addresses = await MultiGet(
        Get(
            Address,
            AddressInput(
                spec.path_component,
                spec.target_component,
                generated_component=spec.generated_component,
                parameters=spec.parameters,
                description_of_origin=description_of_origin,
            ),
        ) for spec in literal_specs)

    # We replace references to parametrized target templates with all their created targets. For
    # example:
    #  - dir:tgt -> (dir:tgt@k=v1, dir:tgt@k=v2)
    #  - dir:tgt@k=v -> (dir:tgt@k=v,another=a, dir:tgt@k=v,another=b), but not anything
    #       where @k=v is not true.
    literal_parametrizations = await MultiGet(
        Get(
            _TargetParametrizations,
            _TargetParametrizationsRequest(
                address.maybe_convert_to_target_generator(),
                description_of_origin=description_of_origin,
            ),
        ) for address in literal_addresses)

    # Note that if the address is not in the _TargetParametrizations, we must fall back to that
    # address's value. This will allow us to error that the address is invalid.
    all_candidate_addresses = itertools.chain.from_iterable(
        list(params.get_all_superset_targets(address)) or [address] for
        address, params in zip(literal_addresses, literal_parametrizations))

    # We eagerly call the `WrappedTarget` rule because it will validate that every final address
    # actually exists, such as with generated target addresses.
    return await MultiGet(
        Get(
            WrappedTarget,
            WrappedTargetRequest(addr,
                                 description_of_origin=description_of_origin))
        for addr in all_candidate_addresses)
Exemple #16
0
async def determine_main_pkg_for_go_binary(
    request: GoBinaryMainPackageRequest,
) -> GoBinaryMainPackage:
    addr = request.field.address
    if request.field.value:
        description_of_origin = (
            f"the `{request.field.alias}` field from the target {request.field.address}"
        )
        specified_address = await Get(
            Address,
            AddressInput,
            AddressInput.parse(
                request.field.value,
                relative_to=addr.spec_path,
                description_of_origin=description_of_origin,
            ),
        )
        wrapped_specified_tgt = await Get(
            WrappedTarget,
            WrappedTargetRequest(specified_address, description_of_origin=description_of_origin),
        )
        if not wrapped_specified_tgt.target.has_field(GoPackageSourcesField):
            raise InvalidFieldException(
                f"The {repr(GoBinaryMainPackageField.alias)} field in target {addr} must point to "
                "a `go_package` target, but was the address for a "
                f"`{wrapped_specified_tgt.target.alias}` target.\n\n"
                "Hint: you should normally not specify this field so that Pants will find the "
                "`go_package` target for you."
            )
        return GoBinaryMainPackage(wrapped_specified_tgt.target.address)

    candidate_targets = await Get(
        Targets,
        RawSpecs(
            dir_globs=(DirGlobSpec(addr.spec_path),),
            description_of_origin="the `go_binary` dependency inference rule",
        ),
    )
    relevant_pkg_targets = [
        tgt
        for tgt in candidate_targets
        if tgt.has_field(GoPackageSourcesField) and tgt.residence_dir == addr.spec_path
    ]
    if len(relevant_pkg_targets) == 1:
        return GoBinaryMainPackage(relevant_pkg_targets[0].address)

    if not relevant_pkg_targets:
        raise ResolveError(
            f"The target {addr} requires that there is a `go_package` "
            f"target defined in its directory {addr.spec_path}, but none were found.\n\n"
            "To fix, add a target like `go_package()` or `go_package(name='pkg')` to the BUILD "
            f"file in {addr.spec_path}."
        )
    raise ResolveError(
        f"There are multiple `go_package` targets for the same directory of the "
        f"target {addr}: {addr.spec_path}. It is ambiguous what to use as the `main` "
        "package.\n\n"
        f"To fix, please either set the `main` field for `{addr} or remove these "
        "`go_package` targets so that only one remains: "
        f"{sorted(tgt.address.spec for tgt in relevant_pkg_targets)}"
    )
Exemple #17
0
async def addresses_from_address_specs(
    address_specs: AddressSpecs,
    build_file_options: BuildFileOptions,
    specs_filter: AddressSpecsFilter,
) -> Addresses:
    matched_addresses: OrderedSet[Address] = OrderedSet()
    filtering_disabled = address_specs.filter_by_global_options is False

    # Resolve all `AddressLiteralSpec`s. Will error on invalid addresses.
    literal_wrapped_targets = await MultiGet(
        Get(
            WrappedTarget,
            AddressInput(spec.path_component, spec.target_component,
                         spec.generated_component),
        ) for spec in address_specs.literals)
    matched_addresses.update(
        wrapped_tgt.target.address for wrapped_tgt in literal_wrapped_targets
        if filtering_disabled or specs_filter.matches(wrapped_tgt.target))
    if not address_specs.globs:
        return Addresses(matched_addresses)

    # Resolve all `AddressGlobSpecs`.
    build_file_paths = await Get(
        Paths,
        PathGlobs,
        address_specs.to_build_file_path_globs(
            build_patterns=build_file_options.patterns,
            build_ignore_patterns=build_file_options.ignores,
        ),
    )
    dirnames = {os.path.dirname(f) for f in build_file_paths.files}
    address_families = await MultiGet(
        Get(AddressFamily, AddressFamilyDir(d)) for d in dirnames)
    base_addresses = Addresses(
        itertools.chain.from_iterable(
            address_family.addresses_to_target_adaptors
            for address_family in address_families))

    target_parametrizations_list = await MultiGet(
        Get(_TargetParametrizations, Address, base_address)
        for base_address in base_addresses)
    residence_dir_to_targets = defaultdict(list)
    for target_parametrizations in target_parametrizations_list:
        for tgt in target_parametrizations.all:
            residence_dir_to_targets[tgt.residence_dir].append(tgt)

    matched_globs = set()
    for glob_spec in address_specs.globs:
        for residence_dir in residence_dir_to_targets:
            if not glob_spec.matches(residence_dir):
                continue
            matched_globs.add(glob_spec)
            matched_addresses.update(
                tgt.address for tgt in residence_dir_to_targets[residence_dir]
                if filtering_disabled or specs_filter.matches(tgt))

    unmatched_globs = [
        glob for glob in address_specs.globs
        if glob not in matched_globs and glob.error_if_no_matches
    ]
    if unmatched_globs:
        glob_description = (
            f"the address glob `{unmatched_globs[0]}`"
            if len(unmatched_globs) == 1 else
            f"these address globs: {sorted(str(glob) for glob in unmatched_globs)}"
        )
        raise ResolveError(
            f"No targets found for {glob_description}\n\n"
            f"Do targets exist in those directories? Maybe run `{bin_name()} tailor` to generate "
            f"BUILD files? See {doc_url('targets')} about targets and BUILD files."
        )

    return Addresses(sorted(matched_addresses))
Exemple #18
0
async def addresses_with_origins_from_address_specs(
    address_specs: AddressSpecs, global_options: GlobalOptions, specs_filter: AddressSpecsFilter
) -> AddressesWithOrigins:
    """Given an AddressMapper and list of AddressSpecs, return matching AddressesWithOrigins.

    :raises: :class:`ResolveError` if the provided specs fail to match targets, and those spec
        types expect to have matched something.
    """
    matched_addresses: OrderedSet[Address] = OrderedSet()
    addr_to_origin: Dict[Address, AddressSpec] = {}
    filtering_disabled = address_specs.filter_by_global_options is False

    # First convert all `AddressLiteralSpec`s. Some of the resulting addresses may be file
    # addresses. This will raise an exception if any of the addresses are not valid.
    literal_addresses = await MultiGet(
        Get(Address, AddressInput(spec.path_component, spec.target_component))
        for spec in address_specs.literals
    )
    literal_target_adaptors = await MultiGet(
        Get(TargetAdaptor, Address, addr.maybe_convert_to_base_target())
        for addr in literal_addresses
    )
    # We convert to targets for the side effect of validating that any file addresses actually
    # belong to the specified base targets.
    await Get(
        UnexpandedTargets, Addresses(addr for addr in literal_addresses if not addr.is_base_target)
    )
    for literal_spec, addr, target_adaptor in zip(
        address_specs.literals, literal_addresses, literal_target_adaptors
    ):
        addr_to_origin[addr] = literal_spec
        if filtering_disabled or specs_filter.matches(addr, target_adaptor):
            matched_addresses.add(addr)

    # Then, convert all `AddressGlobSpecs`. Snapshot all BUILD files covered by the specs, then
    # group by directory.
    snapshot = await Get(
        Snapshot,
        PathGlobs,
        address_specs.to_path_globs(
            build_patterns=global_options.options.build_patterns,
            build_ignore_patterns=global_options.options.build_ignore,
        ),
    )
    dirnames = {os.path.dirname(f) for f in snapshot.files}
    address_families = await MultiGet(Get(AddressFamily, Dir(d)) for d in dirnames)
    address_family_by_directory = {af.namespace: af for af in address_families}

    for glob_spec in address_specs.globs:
        # These may raise ResolveError, depending on the type of spec.
        addr_families_for_spec = glob_spec.matching_address_families(address_family_by_directory)
        addr_target_pairs_for_spec = glob_spec.matching_addresses(addr_families_for_spec)

        for addr, _ in addr_target_pairs_for_spec:
            # A target might be covered by multiple specs, so we take the most specific one.
            addr_to_origin[addr] = AddressSpecs.more_specific(addr_to_origin.get(addr), glob_spec)

        matched_addresses.update(
            addr
            for (addr, tgt) in addr_target_pairs_for_spec
            if filtering_disabled or specs_filter.matches(addr, tgt)
        )

    return AddressesWithOrigins(
        AddressWithOrigin(address=addr, origin=addr_to_origin[addr]) for addr in matched_addresses
    )
Exemple #19
0
async def resolve_dependencies(request: DependenciesRequest,
                               union_membership: UnionMembership,
                               global_options: GlobalOptions) -> Addresses:
    explicitly_provided = await Get(ExplicitlyProvidedDependencies,
                                    DependenciesRequest, request)

    # Inject any dependencies. This is determined by the `request.field` class. For example, if
    # there is a rule to inject for FortranDependencies, then FortranDependencies and any subclass
    # of FortranDependencies will use that rule.
    inject_request_types = union_membership.get(InjectDependenciesRequest)
    injected = await MultiGet(
        Get(InjectedDependencies, InjectDependenciesRequest,
            inject_request_type(request.field))
        for inject_request_type in inject_request_types
        if isinstance(request.field, inject_request_type.inject_for))

    inference_request_types = union_membership.get(InferDependenciesRequest)
    inferred: Tuple[InferredDependencies, ...] = ()
    if inference_request_types:
        # Dependency inference is solely determined by the `Sources` field for a Target, so we
        # re-resolve the original target to inspect its `Sources` field, if any.
        wrapped_tgt = await Get(WrappedTarget, Address, request.field.address)
        sources_field = wrapped_tgt.target.get(Sources)
        relevant_inference_request_types = [
            inference_request_type
            for inference_request_type in inference_request_types
            if isinstance(sources_field, inference_request_type.infer_from)
        ]
        inferred = await MultiGet(
            Get(
                InferredDependencies,
                InferDependenciesRequest,
                inference_request_type(sources_field),
            ) for inference_request_type in relevant_inference_request_types)

    # If this is a BUILD target, or no dependency inference implementation can infer dependencies on
    # a file address's sibling files, then we inject dependencies on all the BUILD target's
    # generated subtargets.
    subtarget_addresses: Tuple[Address, ...] = ()
    no_sibling_file_deps_inferrable = not inferred or all(
        inferred_deps.sibling_dependencies_inferrable is False
        for inferred_deps in inferred)
    if not request.field.address.is_file_target or no_sibling_file_deps_inferrable:
        subtargets = await Get(
            Subtargets, Address,
            request.field.address.maybe_convert_to_build_target())
        subtarget_addresses = tuple(t.address for t in subtargets.subtargets
                                    if t.address != request.field.address)

    # If the target has `SpecialCasedDependencies`, such as the `archive` target having
    # `files` and `packages` fields, then we possibly include those too. We don't want to always
    # include those dependencies because they should often be excluded from the result due to
    # being handled elsewhere in the calling code.
    special_cased: Tuple[Address, ...] = ()
    if request.include_special_cased_deps:
        wrapped_tgt = await Get(WrappedTarget, Address, request.field.address)
        # Unlike normal, we don't use `tgt.get()` because there may be >1 subclass of
        # SpecialCasedDependencies.
        special_cased_fields = tuple(
            field for field in wrapped_tgt.target.field_values.values()
            if isinstance(field, SpecialCasedDependencies))
        # We can't use the normal `Get(Addresses, UnparsedAddressInputs)` due to a graph cycle.
        special_cased = await MultiGet(
            Get(
                Address,
                AddressInput,
                AddressInput.parse(
                    addr,
                    relative_to=request.field.address.spec_path,
                    subproject_roots=global_options.options.subproject_roots,
                ),
            ) for special_cased_field in special_cased_fields
            for addr in special_cased_field.to_unparsed_address_inputs().values
        )

    result = {
        addr
        for addr in (
            *subtarget_addresses,
            *explicitly_provided.includes,
            *itertools.chain.from_iterable(injected),
            *itertools.chain.from_iterable(inferred),
            *special_cased,
        ) if addr not in explicitly_provided.ignores
    }
    return Addresses(sorted(result))
Exemple #20
0
async def resolve_dependencies(
    request: DependenciesRequest,
    target_types_to_generate_requests: TargetTypesToGenerateTargetsRequests,
    union_membership: UnionMembership,
    global_options: GlobalOptions,
) -> Addresses:
    wrapped_tgt, explicitly_provided = await MultiGet(
        Get(WrappedTarget, Address, request.field.address),
        Get(ExplicitlyProvidedDependencies, DependenciesRequest, request),
    )
    tgt = wrapped_tgt.target

    # Inject any dependencies (based on `Dependencies` field rather than `Sources` field).
    inject_request_types = union_membership.get(InjectDependenciesRequest)
    injected = await MultiGet(
        Get(InjectedDependencies, InjectDependenciesRequest,
            inject_request_type(request.field))
        for inject_request_type in inject_request_types
        if isinstance(request.field, inject_request_type.inject_for))

    # Infer any dependencies (based on `Sources` field).
    inference_request_types = union_membership.get(InferDependenciesRequest)
    inferred: Tuple[InferredDependencies, ...] = ()
    if inference_request_types:
        sources_field = tgt.get(Sources)
        relevant_inference_request_types = [
            inference_request_type
            for inference_request_type in inference_request_types
            if isinstance(sources_field, inference_request_type.infer_from)
        ]
        inferred = await MultiGet(
            Get(
                InferredDependencies,
                InferDependenciesRequest,
                inference_request_type(sources_field),
            ) for inference_request_type in relevant_inference_request_types)

    # If it's a target generator, inject dependencies on all of its generated targets.
    generated_addresses: tuple[Address, ...] = ()
    if target_types_to_generate_requests.is_generator(
            tgt) and not tgt.address.is_generated_target:
        generate_request = target_types_to_generate_requests[type(tgt)]
        generated_targets = await Get(GeneratedTargets, GenerateTargetsRequest,
                                      generate_request(tgt))
        generated_addresses = tuple(generated_targets.keys())

    # If the target has `SpecialCasedDependencies`, such as the `archive` target having
    # `files` and `packages` fields, then we possibly include those too. We don't want to always
    # include those dependencies because they should often be excluded from the result due to
    # being handled elsewhere in the calling code.
    special_cased: Tuple[Address, ...] = ()
    if request.include_special_cased_deps:
        # Unlike normal, we don't use `tgt.get()` because there may be >1 subclass of
        # SpecialCasedDependencies.
        special_cased_fields = tuple(
            field for field in tgt.field_values.values()
            if isinstance(field, SpecialCasedDependencies))
        # We can't use the normal `Get(Addresses, UnparsedAddressInputs)` due to a graph cycle.
        special_cased = await MultiGet(
            Get(
                Address,
                AddressInput,
                AddressInput.parse(
                    addr,
                    relative_to=tgt.address.spec_path,
                    subproject_roots=global_options.options.subproject_roots,
                ),
            ) for special_cased_field in special_cased_fields
            for addr in special_cased_field.to_unparsed_address_inputs().values
        )

    result = {
        addr
        for addr in (
            *generated_addresses,
            *explicitly_provided.includes,
            *itertools.chain.from_iterable(injected),
            *itertools.chain.from_iterable(inferred),
            *special_cased,
        ) if addr not in explicitly_provided.ignores
    }
    return Addresses(sorted(result))
Exemple #21
0
async def generate_chroot(request: SetupPyChrootRequest) -> SetupPyChroot:
    exported_target = request.exported_target

    owned_deps = await Get(OwnedDependencies, DependencyOwner(exported_target))
    transitive_targets = await Get(TransitiveTargets,
                                   Addresses([exported_target.target.address]))
    # files() targets aren't owned by a single exported target - they aren't code, so
    # we allow them to be in multiple dists. This is helpful for, e.g., embedding
    # a standard license file in a dist.
    files_targets = (tgt for tgt in transitive_targets.closure
                     if tgt.has_field(FilesSources))
    targets = Targets(
        itertools.chain((od.target for od in owned_deps), files_targets))
    sources = await Get(SetupPySources,
                        SetupPySourcesRequest(targets, py2=request.py2))
    requirements = await Get(ExportedTargetRequirements,
                             DependencyOwner(exported_target))

    # Nest the sources under the src/ prefix.
    src_digest = await Get(Digest, AddPrefix(sources.digest,
                                             CHROOT_SOURCE_ROOT))

    target = exported_target.target
    provides = exported_target.provides

    # Generate the kwargs to the setup() call.
    setup_kwargs = provides.setup_py_keywords.copy()
    setup_kwargs.update({
        "package_dir": {
            "": CHROOT_SOURCE_ROOT
        },
        "packages": sources.packages,
        "namespace_packages": sources.namespace_packages,
        "package_data": dict(sources.package_data),
        "install_requires": tuple(requirements),
    })
    key_to_binary_spec = provides.binaries
    keys = list(key_to_binary_spec.keys())
    addresses = await MultiGet(
        Get(
            Address,
            AddressInput,
            AddressInput.parse(key_to_binary_spec[key],
                               relative_to=target.address.spec_path),
        ) for key in keys)
    binaries = await Get(Targets, Addresses(addresses))
    for key, binary in zip(keys, binaries):
        binary_entry_point = binary.get(PythonEntryPoint).value
        if not binary_entry_point:
            raise InvalidEntryPoint(
                f"The binary {key} exported by {target.address} is not a valid entry point."
            )
        entry_points = setup_kwargs["entry_points"] = setup_kwargs.get(
            "entry_points", {})
        console_scripts = entry_points["console_scripts"] = entry_points.get(
            "console_scripts", [])
        console_scripts.append(f"{key}={binary_entry_point}")

    # Generate the setup script.
    setup_py_content = SETUP_BOILERPLATE.format(
        target_address_spec=target.address.spec,
        setup_kwargs_str=distutils_repr(setup_kwargs),
    ).encode()
    extra_files_digest = await Get(
        Digest,
        CreateDigest([
            FileContent("setup.py", setup_py_content),
            FileContent(
                "MANIFEST.in",
                "include *.py".encode()),  # Make sure setup.py is included.
        ]),
    )

    chroot_digest = await Get(Digest,
                              MergeDigests((src_digest, extra_files_digest)))
    return SetupPyChroot(chroot_digest, json.dumps(setup_kwargs,
                                                   sort_keys=True))
Exemple #22
0
async def inject_dependencies(_: InjectProtobufDependencies,
                              protoc: Protoc) -> InjectedDependencies:
    addresses = await MultiGet(
        Get(Address, AddressInput, AddressInput.parse(addr))
        for addr in protoc.runtime_targets)
    return InjectedDependencies(addresses)
Exemple #23
0
async def resolve_dependencies(
    request: DependenciesRequest,
    target_types_to_generate_requests: TargetTypesToGenerateTargetsRequests,
    union_membership: UnionMembership,
    subproject_roots: SubprojectRoots,
) -> Addresses:
    wrapped_tgt, explicitly_provided = await MultiGet(
        Get(
            WrappedTarget,
            WrappedTargetRequest(request.field.address,
                                 description_of_origin="<infallible>"),
        ),
        Get(ExplicitlyProvidedDependencies, DependenciesRequest, request),
    )
    tgt = wrapped_tgt.target

    # Inject any dependencies (based on `Dependencies` field rather than `SourcesField`).
    inject_request_types = union_membership.get(InjectDependenciesRequest)
    injected = await MultiGet(
        Get(InjectedDependencies, InjectDependenciesRequest,
            inject_request_type(request.field))
        for inject_request_type in inject_request_types
        if isinstance(request.field, inject_request_type.inject_for))

    # Infer any dependencies (based on `SourcesField` field).
    inference_request_types = union_membership.get(InferDependenciesRequest)
    inferred: tuple[InferredDependencies, ...] = ()
    if inference_request_types:
        sources_field = tgt.get(SourcesField)
        relevant_inference_request_types = [
            inference_request_type
            for inference_request_type in inference_request_types
            if isinstance(sources_field, inference_request_type.infer_from)
        ]
        inferred = await MultiGet(
            Get(
                InferredDependencies,
                InferDependenciesRequest,
                inference_request_type(sources_field),
            ) for inference_request_type in relevant_inference_request_types)

    # If it's a target generator, inject dependencies on all of its generated targets.
    generated_addresses: tuple[Address, ...] = ()
    if target_types_to_generate_requests.is_generator(
            tgt) and not tgt.address.is_generated_target:
        parametrizations = await Get(
            _TargetParametrizations,
            _TargetParametrizationsRequest(
                tgt.address.maybe_convert_to_target_generator(),
                description_of_origin=
                (f"the target generator {tgt.address.maybe_convert_to_target_generator()}"
                 ),
            ),
        )
        generated_addresses = tuple(
            parametrizations.generated_for(tgt.address).keys())

    # If the target is parametrized, see whether any explicitly provided dependencies are also
    # parametrized, but with partial/no parameters. If so, fill them in.
    explicitly_provided_includes: Iterable[
        Address] = explicitly_provided.includes
    if request.field.address.is_parametrized and explicitly_provided_includes:
        explicit_dependency_parametrizations = await MultiGet(
            Get(
                _TargetParametrizations,
                _TargetParametrizationsRequest(
                    address.maybe_convert_to_target_generator(),
                    description_of_origin=
                    (f"the `{request.field.alias}` field of the target {tgt.address}"
                     ),
                ),
            ) for address in explicitly_provided_includes)

        explicitly_provided_includes = [
            parametrizations.get_subset(address, tgt).address
            for address, parametrizations in zip(
                explicitly_provided_includes,
                explicit_dependency_parametrizations)
        ]

    # If the target has `SpecialCasedDependencies`, such as the `archive` target having
    # `files` and `packages` fields, then we possibly include those too. We don't want to always
    # include those dependencies because they should often be excluded from the result due to
    # being handled elsewhere in the calling code.
    special_cased: tuple[Address, ...] = ()
    if request.include_special_cased_deps:
        # Unlike normal, we don't use `tgt.get()` because there may be >1 subclass of
        # SpecialCasedDependencies.
        special_cased_fields = tuple(
            field for field in tgt.field_values.values()
            if isinstance(field, SpecialCasedDependencies))
        # We can't use the normal `Get(Addresses, UnparsedAddressInputs)` due to a graph cycle.
        special_cased = await MultiGet(
            Get(
                Address,
                AddressInput,
                AddressInput.parse(
                    addr,
                    relative_to=tgt.address.spec_path,
                    subproject_roots=subproject_roots,
                    description_of_origin=
                    (f"the `{special_cased_field.alias}` field from the target {tgt.address}"
                     ),
                ),
            ) for special_cased_field in special_cased_fields
            for addr in special_cased_field.to_unparsed_address_inputs().values
        )

    result = Addresses(
        sorted({
            addr
            for addr in (
                *generated_addresses,
                *explicitly_provided_includes,
                *itertools.chain.from_iterable(injected),
                *itertools.chain.from_iterable(inferred),
                *special_cased,
            ) if addr not in explicitly_provided.ignores
        }))

    # Validate dependencies.
    _ = await MultiGet(
        Get(
            ValidatedDependencies,
            ValidateDependenciesRequest,
            vd_request_type(vd_request_type.field_set_type.create(tgt),
                            result),  # type: ignore[misc]
        ) for vd_request_type in union_membership.get(
            ValidateDependenciesRequest) if
        vd_request_type.field_set_type.is_applicable(tgt)  # type: ignore[misc]
    )

    return result
Exemple #24
0
def calculate_specs(
    options_bootstrapper: OptionsBootstrapper,
    options: Options,
    session: SchedulerSession,
) -> Specs:
    """Determine the specs for a given Pants run."""
    global_options = options.for_global_scope()
    unmatched_cli_globs = global_options.unmatched_cli_globs.to_glob_match_error_behavior(
    )
    convert_dir_literal_to_address_literal = (
        global_options.use_deprecated_directory_cli_args_semantics)
    if global_options.is_default(
            "use_deprecated_directory_cli_args_semantics"):
        warn_or_error(
            "2.14.0.dev0",
            "`use_deprecated_directory_cli_args_semantics` defaulting to True",
            softwrap(f"""
                Currently, a directory argument like `{bin_name()} test dir` is shorthand for the
                target `dir:dir`, i.e. the target that leaves off `name=`.

                In Pants 2.14, by default, a directory argument will instead match all
                targets/files in the directory.

                To opt into the new and more intuitive semantics early, set
                `use_deprecated_directory_cli_args_semantics = false` in the `[GLOBAL]` section in
                `pants.toml`. Otherwise, set to `true` to silence this warning.
                """),
        )
    specs = SpecsParser().parse_specs(
        options.specs,
        description_of_origin="CLI arguments",
        unmatched_glob_behavior=unmatched_cli_globs,
        convert_dir_literal_to_address_literal=
        convert_dir_literal_to_address_literal,
    )

    changed_options = ChangedOptions.from_options(options.for_scope("changed"))
    logger.debug("specs are: %s", specs)
    logger.debug("changed_options are: %s", changed_options)

    if specs and changed_options.provided:
        changed_name = "--changed-since" if changed_options.since else "--changed-diffspec"
        specs_description = specs.arguments_provided_description()
        assert specs_description is not None
        raise InvalidSpecConstraint(
            f"You used `{changed_name}` at the same time as using {specs_description}. You can "
            f"only use `{changed_name}` or use normal arguments.")

    if not changed_options.provided:
        return specs

    (git_binary, ) = session.product_request(GitBinary,
                                             [Params(GitBinaryRequest())])
    (maybe_git_worktree, ) = session.product_request(
        MaybeGitWorktree, [Params(GitWorktreeRequest(), git_binary)])
    if not maybe_git_worktree.git_worktree:
        raise InvalidSpecConstraint(
            "The `--changed-*` options are only available if Git is used for the repository."
        )

    changed_files = tuple(
        changed_options.changed_files(maybe_git_worktree.git_worktree))
    file_literal_specs = tuple(FileLiteralSpec(f) for f in changed_files)

    changed_request = ChangedRequest(changed_files, changed_options.dependees)
    (changed_addresses, ) = session.product_request(
        ChangedAddresses, [Params(changed_request, options_bootstrapper)])
    logger.debug("changed addresses: %s", changed_addresses)

    address_literal_specs = []
    for address in cast(ChangedAddresses, changed_addresses):
        address_input = AddressInput.parse(
            address.spec, description_of_origin="`--changed-since`")
        address_literal_specs.append(
            AddressLiteralSpec(
                path_component=address_input.path_component,
                target_component=address_input.target_component,
                generated_component=address_input.generated_component,
                parameters=address_input.parameters,
            ))

    return Specs(
        includes=RawSpecs(
            # We need both address_literals and file_literals to cover all our edge cases, including
            # target-aware vs. target-less goals, e.g. `list` vs `count-loc`.
            address_literals=tuple(address_literal_specs),
            file_literals=file_literal_specs,
            unmatched_glob_behavior=unmatched_cli_globs,
            filter_by_global_options=True,
            from_change_detection=True,
            description_of_origin="`--changed-since`",
        ),
        ignores=RawSpecs(description_of_origin="`--changed-since`"),
    )