Exemplo n.º 1
0
    def test_fails_on_nonexistent_specs(self) -> None:
        """Test that address specs referring to nonexistent targets raise a ResolveError."""
        address_family = AddressFamily('root',
                                       {'a': ('root/BUILD', TargetAdaptor())})
        address_specs = AddressSpecs(
            [SingleAddress('root', 'b'),
             SingleAddress('root', 'a')])

        expected_rx_str = re.escape(
            """"b" was not found in namespace "root". Did you mean one of:
  :a""")
        with self.assertRaisesRegex(ResolveError, expected_rx_str):
            self._resolve_build_file_addresses(address_specs, address_family,
                                               self._snapshot(),
                                               self._address_mapper())

        # Ensure that we still catch nonexistent targets later on in the list of command-line
        # address specs.
        address_specs = AddressSpecs(
            [SingleAddress('root', 'a'),
             SingleAddress('root', 'b')])
        with self.assertRaisesRegex(ResolveError, expected_rx_str):
            self._resolve_build_file_addresses(address_specs, address_family,
                                               self._snapshot(),
                                               self._address_mapper())
Exemplo n.º 2
0
async def find_owners(owners_request: OwnersRequest) -> Owners:
    # Determine which of the sources are live and which are deleted.
    sources_set_snapshot = await Get(Snapshot,
                                     PathGlobs(owners_request.sources))

    live_files = FrozenOrderedSet(sources_set_snapshot.files)
    deleted_files = FrozenOrderedSet(s for s in owners_request.sources
                                     if s not in live_files)
    live_dirs = FrozenOrderedSet(os.path.dirname(s) for s in live_files)
    deleted_dirs = FrozenOrderedSet(os.path.dirname(s) for s in deleted_files)

    matching_addresses: OrderedSet[Address] = OrderedSet()
    unmatched_sources = set(owners_request.sources)
    for live in (True, False):
        # Walk up the buildroot looking for targets that would conceivably claim changed sources.
        # For live files, we use expanded Targets, which have file level precision but which are
        # only created for existing files. For deleted files we use UnexpandedTargets, which have
        # the original declared glob.
        candidate_targets: Iterable[Target]
        if live:
            if not live_dirs:
                continue
            sources_set = live_files
            candidate_specs = tuple(
                AscendantAddresses(directory=d) for d in live_dirs)
            candidate_targets = await Get(Targets,
                                          AddressSpecs(candidate_specs))
        else:
            if not deleted_dirs:
                continue
            sources_set = deleted_files
            candidate_specs = tuple(
                AscendantAddresses(directory=d) for d in deleted_dirs)
            candidate_targets = await Get(UnexpandedTargets,
                                          AddressSpecs(candidate_specs))

        build_file_addresses = await MultiGet(
            Get(BuildFileAddress, Address, tgt.address)
            for tgt in candidate_targets)

        for candidate_tgt, bfa in zip(candidate_targets, build_file_addresses):
            matching_files = set(
                matches_filespec(candidate_tgt.get(Sources).filespec,
                                 paths=sources_set))
            if not matching_files and bfa.rel_path not in sources_set:
                continue

            unmatched_sources -= matching_files
            matching_addresses.add(candidate_tgt.address)

    if (unmatched_sources and owners_request.owners_not_found_behavior !=
            OwnersNotFoundBehavior.ignore):
        _log_or_raise_unmatched_owners(
            [PurePath(path) for path in unmatched_sources],
            owners_request.owners_not_found_behavior)

    return Owners(matching_addresses)
Exemplo n.º 3
0
async def find_owners(build_configuration: BuildConfiguration,
                      address_mapper: AddressMapper,
                      owners_request: OwnersRequest) -> BuildFileAddresses:
    sources_set = OrderedSet(owners_request.sources)
    dirs_set = OrderedSet(dirname(source) for source in sources_set)

    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    candidate_specs = tuple(AscendantAddresses(directory=d) for d in dirs_set)
    candidate_targets = await Get[HydratedTargets](
        AddressSpecs(candidate_specs))

    # Match the source globs against the expanded candidate targets.
    def owns_any_source(legacy_target):
        """Given a `HydratedTarget` instance, check if it owns the given source file."""
        target_kwargs = legacy_target.adaptor.kwargs()

        # Handle `sources`-declaring targets.
        # NB: Deleted files can only be matched against the 'filespec' (ie, `PathGlobs`) for a target,
        # so we don't actually call `fileset.matches` here.
        # TODO: This matching logic should be implemented using the rust `fs` crate for two reasons:
        #  1) having two implementations isn't great
        #  2) we're expanding sources via HydratedTarget, but it isn't necessary to do that to match
        target_sources = target_kwargs.get('sources', None)
        if target_sources and any_matches_filespec(
                paths=sources_set, spec=target_sources.filespec):
            return True
        return False

    direct_owners = tuple(
        ht.adaptor.address for ht in candidate_targets
        if LegacyAddressMapper.any_is_declaring_file(
            ht.adaptor.address, sources_set) or owns_any_source(ht))

    # If the OwnersRequest does not require dependees, then we're done.
    if owners_request.include_dependees == IncludeDependeesOption.NONE:
        return BuildFileAddresses(direct_owners)

    # Otherwise: find dependees.
    all_addresses = await Get[BuildFileAddresses](AddressSpecs(
        (DescendantAddresses(''), )))
    all_structs = [
        s.value
        for s in await MultiGet(Get[HydratedStruct](Address, a.to_address())
                                for a in all_addresses)
    ]

    bfa = build_configuration.registered_aliases()
    graph = _DependentGraph.from_iterable(
        target_types_from_build_file_aliases(bfa), address_mapper, all_structs)
    if owners_request.include_dependees == IncludeDependeesOption.DIRECT:
        return BuildFileAddresses(
            tuple(graph.dependents_of_addresses(direct_owners)))
    assert owners_request.include_dependees == IncludeDependeesOption.TRANSITIVE
    return BuildFileAddresses(
        tuple(graph.transitive_dependents_of_addresses(direct_owners)))
Exemplo n.º 4
0
async def addresses_with_origins_from_address_specs(
        address_mapper: AddressMapper,
        address_specs: AddressSpecs) -> AddressesWithOrigins:
    """Given an AddressMapper and list of AddressSpecs, return matching AddressesWithOrigins.

    :raises: :class:`ResolveError` if there were no matching AddressFamilies or no targets
        were matched.
    """
    # Snapshot all BUILD files covered by the AddressSpecs, then group by directory.
    snapshot = await Get(
        Snapshot,
        PathGlobs,
        address_specs.to_path_globs(
            build_patterns=address_mapper.build_patterns,
            build_ignore_patterns=address_mapper.build_ignore_patterns,
        ),
    )
    dirnames = {os.path.dirname(f) for f in snapshot.files}
    address_families = await MultiGet(
        Get(AddressFamily, Dir(d)) for d in dirnames)
    address_family_by_directory = {af.namespace: af for af in address_families}

    matched_addresses: OrderedSet[Address] = OrderedSet()
    addr_to_origin: Dict[Address, AddressSpec] = {}

    for address_spec in address_specs:
        # These may raise ResolveError, depending on the type of spec.
        addr_families_for_spec = address_spec.matching_address_families(
            address_family_by_directory)
        addr_target_pairs_for_spec = address_spec.matching_addresses(
            addr_families_for_spec)

        if isinstance(address_spec,
                      SingleAddress) and not addr_target_pairs_for_spec:
            addr_family = assert_single_element(addr_families_for_spec)
            raise _did_you_mean_exception(addr_family, address_spec.name)

        for addr, _ in addr_target_pairs_for_spec:
            # A target might be covered by multiple specs, so we take the most specific one.
            addr_to_origin[addr] = AddressSpecs.more_specific(
                addr_to_origin.get(addr), address_spec)

        matched_addresses.update(
            addr for (addr, tgt) in addr_target_pairs_for_spec
            if (address_specs.filter_by_global_options is False
                or address_mapper.matches_filter_options(addr, tgt)))

    return AddressesWithOrigins(
        AddressWithOrigin(address=addr, origin=addr_to_origin[addr])
        for addr in matched_addresses)
async def map_protobuf_to_python_modules(
    _: PythonProtobufMappingMarker,
) -> FirstPartyPythonMappingImpl:
    all_expanded_targets = await Get(Targets, AddressSpecs([DescendantAddresses("")]))
    protobuf_targets = tuple(tgt for tgt in all_expanded_targets if tgt.has_field(ProtobufSources))
    stripped_sources_per_target = await MultiGet(
        Get(StrippedSourceFileNames, SourcesPathsRequest(tgt[ProtobufSources]))
        for tgt in protobuf_targets
    )

    modules_to_addresses: Dict[str, Tuple[Address]] = {}
    modules_with_multiple_owners: Set[str] = set()

    def add_module(module: str, tgt: Target) -> None:
        if module in modules_to_addresses:
            modules_with_multiple_owners.add(module)
        else:
            modules_to_addresses[module] = (tgt.address,)

    for tgt, stripped_sources in zip(protobuf_targets, stripped_sources_per_target):
        for stripped_f in stripped_sources:
            # NB: We don't consider the MyPy plugin, which generates `_pb2.pyi`. The stubs end up
            # sharing the same module as the implementation `_pb2.py`. Because both generated files
            # come from the same original Protobuf target, we're covered.
            add_module(proto_path_to_py_module(stripped_f, suffix="_pb2"), tgt)
            if tgt.get(ProtobufGrpcToggle).value:
                add_module(proto_path_to_py_module(stripped_f, suffix="_pb2_grpc"), tgt)

    # Remove modules with ambiguous owners.
    for ambiguous_module in modules_with_multiple_owners:
        modules_to_addresses.pop(ambiguous_module)

    return FirstPartyPythonMappingImpl(modules_to_addresses)
Exemplo n.º 6
0
 def inject_addresses_closure(self, addresses):
   addresses = set(addresses) - set(self._target_by_address.keys())
   if not addresses:
     return
   dependencies = (SingleAddress(directory=a.spec_path, name=a.target_name) for a in addresses)
   for _ in self._inject_address_specs(AddressSpecs(dependencies)):
     pass
Exemplo n.º 7
0
async def rename_conflicting_targets(
        ptgts: PutativeTargets) -> UniquelyNamedPutativeTargets:
    """Ensure that no target addresses collide."""
    all_existing_tgts = await Get(
        UnexpandedTargets, AddressSpecs([MaybeEmptyDescendantAddresses("")]))
    existing_addrs: Set[str] = {tgt.address.spec for tgt in all_existing_tgts}
    uniquely_named_putative_targets: List[PutativeTarget] = []
    for ptgt in ptgts:
        if not ptgt.addressable:
            # Non-addressable PutativeTargets never have collision issues.
            uniquely_named_putative_targets.append(ptgt)
            continue

        idx = 0
        possibly_renamed_ptgt = ptgt
        # Targets in root-level BUILD files must be named explicitly.
        if possibly_renamed_ptgt.path == "" and possibly_renamed_ptgt.kwargs.get(
                "name") is None:
            possibly_renamed_ptgt = possibly_renamed_ptgt.rename("root")
        # Eliminate any address collisions.
        while possibly_renamed_ptgt.address.spec in existing_addrs:
            possibly_renamed_ptgt = ptgt.rename(f"{ptgt.name}{idx}")
            idx += 1
        uniquely_named_putative_targets.append(possibly_renamed_ptgt)
        existing_addrs.add(possibly_renamed_ptgt.address.spec)

    return UniquelyNamedPutativeTargets(
        PutativeTargets(uniquely_named_putative_targets))
Exemplo n.º 8
0
    def parse_specs(
        cls,
        raw_specs: Iterable[str],
        build_root: Optional[str] = None,
        exclude_patterns: Optional[Iterable[str]] = None,
        tags: Optional[Iterable[str]] = None,
    ) -> Specs:
        """Parse raw string specs into a Specs object."""
        build_root = build_root or get_buildroot()
        spec_parser = CmdLineSpecParser(build_root)

        address_specs: OrderedSet[AddressSpec] = OrderedSet()
        filesystem_specs: OrderedSet[FilesystemSpec] = OrderedSet()
        for spec_str in raw_specs:
            parsed_spec = spec_parser.parse_spec(spec_str)
            if isinstance(parsed_spec, AddressSpec):
                address_specs.add(parsed_spec)
            else:
                filesystem_specs.add(parsed_spec)

        address_specs_collection = AddressSpecs(
            dependencies=address_specs,
            exclude_patterns=exclude_patterns if exclude_patterns else tuple(),
            tags=tags,
        )
        filesystem_specs_collection = FilesystemSpecs(filesystem_specs)
        return Specs(
            address_specs=address_specs_collection,
            filesystem_specs=filesystem_specs_collection,
        )
Exemplo n.º 9
0
async def setup_pytest_lockfile(
        _: PytestLockfileSentinel, pytest: PyTest,
        python_setup: PythonSetup) -> PythonLockfileRequest:
    if not pytest.uses_lockfile:
        return PythonLockfileRequest.from_tool(pytest)

    # Even though we run each python_tests target in isolation, we need a single lockfile that
    # works with them all (and their transitive deps).
    #
    # This first computes the constraints for each individual `python_tests` target
    # (which will AND across each target in the closure). Then, it ORs all unique resulting
    # interpreter constraints. The net effect is that every possible Python interpreter used will
    # be covered.
    all_build_targets = await Get(UnexpandedTargets,
                                  AddressSpecs([DescendantAddresses("")]))
    transitive_targets_per_test = await MultiGet(
        Get(TransitiveTargets, TransitiveTargetsRequest([tgt.address]))
        for tgt in all_build_targets if PythonTestFieldSet.is_applicable(tgt))
    unique_constraints = {
        InterpreterConstraints.create_from_targets(transitive_targets.closure,
                                                   python_setup)
        for transitive_targets in transitive_targets_per_test
    }
    constraints = InterpreterConstraints(
        itertools.chain.from_iterable(unique_constraints))
    return PythonLockfileRequest.from_tool(
        pytest, constraints
        or InterpreterConstraints(python_setup.interpreter_constraints))
Exemplo n.º 10
0
async def map_first_party_modules_to_addresses(
) -> FirstPartyModuleToAddressMapping:
    all_expanded_targets = await Get(Targets,
                                     AddressSpecs([DescendantAddresses("")]))
    candidate_targets = tuple(tgt for tgt in all_expanded_targets
                              if tgt.has_field(PythonSources))
    stripped_sources_per_explicit_target = await MultiGet(
        Get(StrippedSourceFiles, SourceFilesRequest([tgt[PythonSources]]))
        for tgt in candidate_targets)

    modules_to_addresses: Dict[str, Address] = {}
    modules_with_multiple_owners: Set[str] = set()
    for tgt, stripped_sources in zip(candidate_targets,
                                     stripped_sources_per_explicit_target):
        for stripped_f in stripped_sources.snapshot.files:
            module = PythonModule.create_from_stripped_path(
                PurePath(stripped_f)).module
            if module in modules_to_addresses:
                modules_with_multiple_owners.add(module)
            else:
                modules_to_addresses[module] = tgt.address

    # Remove modules with ambiguous owners.
    for module in modules_with_multiple_owners:
        modules_to_addresses.pop(module)
    return FirstPartyModuleToAddressMapping(
        FrozenDict(sorted(modules_to_addresses.items())))
Exemplo n.º 11
0
async def inject_docker_dependencies(request: InjectDockerDependencies) -> InjectedDependencies:
    """Inspects COPY instructions in the Dockerfile for references to known packagable targets."""
    dockerfile_info = await Get(
        DockerfileInfo, DockerfileInfoRequest(request.dependencies_field.address)
    )

    # Parse all putative target addresses.
    putative_addresses = await Get(
        Addresses,
        UnparsedAddressInputs(
            dockerfile_info.putative_target_addresses,
            owning_address=dockerfile_info.address,
        ),
    )

    # Get the target for those addresses that are known.
    directories = {address.spec_path for address in putative_addresses}
    all_addresses = await Get(Addresses, AddressSpecs(map(MaybeEmptySiblingAddresses, directories)))
    targets = await Get(
        Targets, Addresses((address for address in putative_addresses if address in all_addresses))
    )

    # Only keep those targets that we can "package".
    package = await Get(FieldSetsPerTarget, FieldSetsPerTargetRequest(PackageFieldSet, targets))
    referenced_targets = (
        field_sets[0].address for field_sets in package.collection if len(field_sets) > 0
    )
    return InjectedDependencies(Addresses(referenced_targets))
Exemplo n.º 12
0
async def setup_mypy_lockfile(
    _: MyPyLockfileSentinel,
    first_party_plugins: MyPyFirstPartyPlugins,
    mypy: MyPy,
    python_setup: PythonSetup,
) -> PythonLockfileRequest:
    if not mypy.uses_lockfile:
        return PythonLockfileRequest.from_tool(mypy)

    constraints = mypy.interpreter_constraints
    if mypy.options.is_default("interpreter_constraints"):
        all_build_targets = await Get(UnexpandedTargets,
                                      AddressSpecs([DescendantAddresses("")]))
        all_transitive_targets = await MultiGet(
            Get(TransitiveTargets, TransitiveTargetsRequest([tgt.address]))
            for tgt in all_build_targets if MyPyFieldSet.is_applicable(tgt))
        unique_constraints = {
            InterpreterConstraints.create_from_targets(
                transitive_targets.closure, python_setup)
            for transitive_targets in all_transitive_targets
        }
        code_constraints = InterpreterConstraints(
            itertools.chain.from_iterable(unique_constraints))
        if code_constraints.requires_python38_or_newer(
                python_setup.interpreter_universe):
            constraints = code_constraints

    return PythonLockfileRequest.from_tool(
        mypy,
        constraints,
        extra_requirements=first_party_plugins.requirement_strings)
Exemplo n.º 13
0
async def map_third_party_modules_to_addresses(
) -> ThirdPartyPythonModuleMapping:
    all_targets = await Get(Targets, AddressSpecs([DescendantAddresses("")]))
    modules_to_addresses: dict[str, Address] = {}
    modules_with_multiple_owners: DefaultDict[str,
                                              set[Address]] = defaultdict(set)
    for tgt in all_targets:
        if not tgt.has_field(PythonRequirementsField):
            continue
        module_map = tgt.get(ModuleMappingField).value
        for python_req in tgt[PythonRequirementsField].value:
            modules = module_map.get(
                python_req.project_name,
                [python_req.project_name.lower().replace("-", "_")],
            )
            for module in modules:
                if module in modules_to_addresses:
                    modules_with_multiple_owners[module].update(
                        {modules_to_addresses[module], tgt.address})
                else:
                    modules_to_addresses[module] = tgt.address
    # Remove modules with ambiguous owners.
    for module in modules_with_multiple_owners:
        modules_to_addresses.pop(module)
    return ThirdPartyPythonModuleMapping(
        mapping=FrozenDict(sorted(modules_to_addresses.items())),
        ambiguous_modules=FrozenDict(
            (k, tuple(sorted(v)))
            for k, v in sorted(modules_with_multiple_owners.items())),
    )
Exemplo n.º 14
0
async def setup_ipython_lockfile(
    _: IPythonLockfileSentinel, ipython: IPython, python_setup: PythonSetup
) -> PythonLockfileRequest:
    if not ipython.uses_lockfile:
        return PythonLockfileRequest.from_tool(ipython)

    # IPython is often run against the whole repo (`./pants repl ::`), but it is possible to run
    # on subsets of the codebase with disjoint interpreter constraints, such as
    # `./pants repl py2::` and then `./pants repl py3::`. Still, even with those subsets possible,
    # we need a single lockfile that works with all possible Python interpreters in use.
    #
    # This ORs all unique interpreter constraints. The net effect is that every possible Python
    # interpreter used will be covered.
    all_build_targets = await Get(UnexpandedTargets, AddressSpecs([DescendantAddresses("")]))
    unique_constraints = {
        InterpreterConstraints.create_from_compatibility_fields(
            [tgt[InterpreterConstraintsField]], python_setup
        )
        for tgt in all_build_targets
        if tgt.has_field(InterpreterConstraintsField)
    }
    constraints = InterpreterConstraints(itertools.chain.from_iterable(unique_constraints))
    return PythonLockfileRequest.from_tool(
        ipython, constraints or InterpreterConstraints(python_setup.interpreter_constraints)
    )
Exemplo n.º 15
0
async def map_third_party_modules_to_addresses(
) -> ThirdPartyModuleToAddressMapping:
    all_targets = await Get(Targets, AddressSpecs([DescendantAddresses("")]))
    modules_to_addresses: Dict[str, Address] = {}
    modules_with_multiple_owners: Set[str] = set()
    for tgt in all_targets:
        if not tgt.has_field(PythonRequirementsField):
            continue
        module_map = tgt.get(ModuleMappingField).value or {
        }  # type: ignore[var-annotated]
        for python_req in tgt[PythonRequirementsField].value:
            modules = module_map.get(
                python_req.project_name,
                [python_req.project_name.lower().replace("-", "_")],
            )
            for module in modules:
                if module in modules_to_addresses:
                    modules_with_multiple_owners.add(module)
                else:
                    modules_to_addresses[module] = tgt.address
    # Remove modules with ambiguous owners.
    for module in modules_with_multiple_owners:
        modules_to_addresses.pop(module)
    return ThirdPartyModuleToAddressMapping(
        FrozenDict(sorted(modules_to_addresses.items())))
Exemplo n.º 16
0
def test_get_target_data(rule_runner: RuleRunner) -> None:
    rule_runner.write_files({
        "foo/BUILD":
        dedent("""\
            target(name="bar", dependencies=[":baz"])

            files(name="baz", sources=["*.txt"])
            """),
        "foo/a.txt":
        "",
        "foo/b.txt":
        "",
    })
    tds = rule_runner.request(TargetDatas,
                              [AddressSpecs([DescendantAddresses("foo")])])
    assert tds == TargetDatas([
        TargetData(
            GenericTarget({"dependencies": [":baz"]},
                          Address("foo", target_name="bar")),
            None,
            ("foo:baz", ),
        ),
        TargetData(
            FilesGeneratorTarget({"sources": ["*.txt"]},
                                 Address("foo", target_name="baz")),
            ("foo/a.txt", "foo/b.txt"),
            tuple(),
        ),
    ])
Exemplo n.º 17
0
async def map_shell_files() -> ShellMapping:
    all_expanded_targets = await Get(Targets,
                                     AddressSpecs([DescendantAddresses("")]))
    shell_tgts = tuple(tgt for tgt in all_expanded_targets
                       if tgt.has_field(ShellSources))
    sources_per_target = await MultiGet(
        Get(SourcesPaths, SourcesPathsRequest(tgt[ShellSources]))
        for tgt in shell_tgts)

    files_to_addresses: dict[str, Address] = {}
    files_with_multiple_owners: DefaultDict[str,
                                            set[Address]] = defaultdict(set)
    for tgt, sources in zip(shell_tgts, sources_per_target):
        for f in sources.files:
            if f in files_to_addresses:
                files_with_multiple_owners[f].update(
                    {files_to_addresses[f], tgt.address})
            else:
                files_to_addresses[f] = tgt.address

    # Remove files with ambiguous owners.
    for ambiguous_f in files_with_multiple_owners:
        files_to_addresses.pop(ambiguous_f)

    return ShellMapping(
        mapping=FrozenDict(sorted(files_to_addresses.items())),
        ambiguous_modules=FrozenDict(
            (k, tuple(sorted(v)))
            for k, v in sorted(files_with_multiple_owners.items())),
    )
Exemplo n.º 18
0
async def find_owners(owners_request: OwnersRequest) -> Owners:
    sources_set = FrozenOrderedSet(owners_request.sources)
    dirs_set = FrozenOrderedSet(
        os.path.dirname(source) for source in sources_set)

    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    candidate_specs = tuple(AscendantAddresses(directory=d) for d in dirs_set)
    candidate_targets = await Get[HydratedTargets](
        AddressSpecs(candidate_specs))

    # Match the source globs against the expanded candidate targets.
    def owns_any_source(legacy_target: HydratedTarget) -> bool:
        """Given a `HydratedTarget` instance, check if it owns the given source file."""
        target_kwargs = legacy_target.adaptor.kwargs()

        # Handle `sources`-declaring targets.
        # NB: Deleted files can only be matched against the 'filespec' (ie, `PathGlobs`) for a target,
        # so we don't actually call `fileset.matches` here.
        # TODO: This matching logic should be implemented using the rust `fs` crate for two reasons:
        #  1) having two implementations isn't great
        #  2) we're expanding sources via HydratedTarget, but it isn't necessary to do that to match
        target_sources = target_kwargs.get("sources", None)
        return target_sources and any_matches_filespec(
            paths=sources_set, spec=target_sources.filespec)

    build_file_addresses = await MultiGet(
        Get[BuildFileAddress](Address, ht.adaptor.address)
        for ht in candidate_targets)
    owners = Addresses(
        ht.adaptor.address
        for ht, bfa in zip(candidate_targets, build_file_addresses)
        if LegacyAddressMapper.any_is_declaring_file(bfa, sources_set)
        or owns_any_source(ht))
    return Owners(owners)
Exemplo n.º 19
0
async def find_nearest_go_module(
        request: FindNearestGoModuleRequest) -> ResolvedOwningGoModule:
    spec_path = request.spec_path
    candidate_targets = await Get(
        UnexpandedTargets,
        AddressSpecs([
            AscendantAddresses(spec_path),
            MaybeEmptySiblingAddresses(spec_path)
        ]),
    )
    go_module_targets = [
        tgt for tgt in candidate_targets if tgt.has_field(GoModuleSources)
    ]

    # Sort by address.spec_path in descending order so the nearest go_module target is sorted first.
    sorted_go_module_targets = sorted(go_module_targets,
                                      key=lambda tgt: tgt.address.spec_path,
                                      reverse=True)
    if sorted_go_module_targets:
        nearest_go_module_target = sorted_go_module_targets[0]
        return ResolvedOwningGoModule(
            module_address=nearest_go_module_target.address)
    else:
        # TODO: Consider eventually requiring all go_package's to associate with a go_module.
        return ResolvedOwningGoModule(module_address=None)
Exemplo n.º 20
0
async def infer_terraform_module_dependencies(
    request: InferTerraformModuleDependenciesRequest,
) -> InferredDependencies:
    hydrated_sources = await Get(HydratedSources,
                                 HydrateSourcesRequest(request.sources_field))

    paths = OrderedSet(filename for filename in hydrated_sources.snapshot.files
                       if filename.endswith(".tf"))
    result = await Get(
        ProcessResult,
        ParseTerraformModuleSources(
            sources_digest=hydrated_sources.snapshot.digest,
            paths=tuple(paths),
        ),
    )
    candidate_spec_paths = [
        line for line in result.stdout.decode("utf-8").split("\n") if line
    ]

    # For each path, see if there is a `terraform_module` target at the specified spec_path.
    candidate_targets = await Get(
        Targets,
        AddressSpecs([
            MaybeEmptySiblingAddresses(path) for path in candidate_spec_paths
        ]))
    # TODO: Need to either implement the standard ambiguous dependency logic or ban >1 terraform_module
    # per directory.
    terraform_module_addresses = [
        tgt.address for tgt in candidate_targets
        if tgt.has_field(TerraformModuleSourcesField)
    ]
    return InferredDependencies(terraform_module_addresses)
Exemplo n.º 21
0
def resolve_address_specs(
    rule_runner: RuleRunner,
    specs: Iterable[AddressSpec],
) -> Set[Address]:
    result = rule_runner.request(
        Addresses, [AddressSpecs(specs, filter_by_global_options=True)])
    return set(result)
Exemplo n.º 22
0
async def find_owners(
    build_configuration: BuildConfiguration,
    address_mapper: AddressMapper,
    changed_request: ChangedRequest,
) -> ChangedAddresses:
    owners = await Get[Owners](OwnersRequest(sources=changed_request.sources))

    # If the ChangedRequest does not require dependees, then we're done.
    if changed_request.include_dependees == IncludeDependeesOption.NONE:
        return ChangedAddresses(owners.addresses)

    # Otherwise: find dependees.
    all_addresses = await Get[Addresses](AddressSpecs(
        (DescendantAddresses(""), )))
    all_structs = [
        s.value for s in await MultiGet(Get[HydratedStruct](Address, a)
                                        for a in all_addresses)
    ]

    bfa = build_configuration.registered_aliases()
    graph = _DependentGraph.from_iterable(
        target_types_from_build_file_aliases(bfa), address_mapper, all_structs)
    if changed_request.include_dependees == IncludeDependeesOption.DIRECT:
        return ChangedAddresses(
            Addresses(graph.dependents_of_addresses(owners.addresses)))
    return ChangedAddresses(
        Addresses(graph.transitive_dependents_of_addresses(owners.addresses)))
Exemplo n.º 23
0
async def map_protobuf_files() -> ProtobufMapping:
    all_expanded_targets = await Get(Targets, AddressSpecs([DescendantAddresses("")]))
    protobuf_targets = tuple(tgt for tgt in all_expanded_targets if tgt.has_field(ProtobufSources))
    stripped_sources_per_target = await MultiGet(
        Get(StrippedSourceFileNames, SourcesPathsRequest(tgt[ProtobufSources]))
        for tgt in protobuf_targets
    )

    stripped_files_to_addresses: dict[str, Address] = {}
    stripped_files_with_multiple_owners: DefaultDict[str, set[Address]] = defaultdict(set)
    for tgt, stripped_sources in zip(protobuf_targets, stripped_sources_per_target):
        for stripped_f in stripped_sources:
            if stripped_f in stripped_files_to_addresses:
                stripped_files_with_multiple_owners[stripped_f].update(
                    {stripped_files_to_addresses[stripped_f], tgt.address}
                )
            else:
                stripped_files_to_addresses[stripped_f] = tgt.address

    # Remove files with ambiguous owners.
    for ambiguous_stripped_f in stripped_files_with_multiple_owners:
        stripped_files_to_addresses.pop(ambiguous_stripped_f)

    return ProtobufMapping(
        mapping=FrozenDict(sorted(stripped_files_to_addresses.items())),
        ambiguous_modules=FrozenDict(
            (k, tuple(sorted(v))) for k, v in sorted(stripped_files_with_multiple_owners.items())
        ),
    )
Exemplo n.º 24
0
    def _internal_scan_specs(self,
                             specs,
                             fail_fast=True,
                             missing_is_fatal=True):
        # TODO: This should really use `product_request`, but on the other hand, we need to
        # deprecate the entire `AddressMapper` interface anyway. See #4769.
        request = self._scheduler.execution_request(
            [BuildFileAddresses], [AddressSpecs(tuple(specs))])
        returns, throws = self._scheduler.execute(request)

        if throws:
            _, state = throws[0]
            if isinstance(state.exc, (AddressLookupError, ResolveError)):
                if missing_is_fatal:
                    raise self.BuildFileScanError(
                        "AddressSpec `{}` does not match any targets.\n{}".
                        format(self._specs_string(specs), str(state.exc)))
                else:
                    # NB: ignore Throws containing ResolveErrors because they are due to missing targets / files
                    return set()
            else:
                raise self.BuildFileScanError(str(state.exc))

        _, state = returns[0]
        if missing_is_fatal and not state.value.dependencies:
            raise self.BuildFileScanError(
                "AddressSpec `{}` does not match any targets.".format(
                    self._specs_string(specs)))

        return set(state.value.dependencies)
Exemplo n.º 25
0
    def _init_engine(self, local_store_dir: Optional[str] = None) -> None:
        if self._scheduler is not None:
            return

        options_bootstrapper = OptionsBootstrapper.create(
            args=["--pants-config-files=[]"])
        local_store_dir = (local_store_dir
                           or options_bootstrapper.bootstrap_options.
                           for_global_scope().local_store_dir)

        # NB: This uses the long form of initialization because it needs to directly specify
        # `cls.alias_groups` rather than having them be provided by bootstrap options.
        graph_session = EngineInitializer.setup_legacy_graph_extended(
            pants_ignore_patterns=[],
            local_store_dir=local_store_dir,
            build_file_imports_behavior=BuildFileImportsBehavior.error,
            native=init_native(),
            options_bootstrapper=options_bootstrapper,
            build_root=self.build_root,
            build_configuration=self.build_config(),
            build_ignore_patterns=None,
        ).new_session(zipkin_trace_v2=False, build_id="buildid_for_test")
        self._scheduler = graph_session.scheduler_session
        self._build_graph, self._address_mapper = graph_session.create_build_graph(
            Specs(address_specs=AddressSpecs([]),
                  filesystem_specs=FilesystemSpecs([])),
            self._build_root(),
        )
Exemplo n.º 26
0
async def restrict_conflicting_sources(
        ptgt: PutativeTarget) -> DisjointSourcePutativeTarget:
    source_paths = await Get(
        Paths,
        PathGlobs(
            Sources.prefix_glob_with_dirpath(ptgt.path, glob)
            for glob in ptgt.owned_sources),
    )
    source_path_set = set(source_paths.files)
    source_dirs = {os.path.dirname(path) for path in source_path_set}
    possible_owners = await Get(
        UnexpandedTargets,
        AddressSpecs(AscendantAddresses(d) for d in source_dirs))
    possible_owners_sources = await MultiGet(
        Get(SourcesPaths, SourcesPathsRequest(t.get(Sources)))
        for t in possible_owners)
    conflicting_targets = []
    for tgt, sources in zip(possible_owners, possible_owners_sources):
        if source_path_set.intersection(sources.files):
            conflicting_targets.append(tgt)

    if conflicting_targets:
        conflicting_addrs = sorted(tgt.address.spec
                                   for tgt in conflicting_targets)
        explicit_srcs_str = ", ".join(ptgt.kwargs.get("sources")
                                      or [])  # type: ignore[arg-type]
        orig_sources_str = (f"[{explicit_srcs_str}]" if explicit_srcs_str else
                            f"the default for {ptgt.type_alias}")
        ptgt = ptgt.restrict_sources().add_comments([
            f"# NOTE: Sources restricted from {orig_sources_str} due to conflict with"
        ] + [f"#   - {caddr}" for caddr in conflicting_addrs])
    return DisjointSourcePutativeTarget(ptgt)
Exemplo n.º 27
0
 def inject_address_specs_closure(
     self,
     address_specs: Iterable[AddressSpec],
     fail_fast=None,
 ) -> Iterator[BuildFileAddress]:
     # Request loading of these address specs.
     for address in self._inject_address_specs(AddressSpecs(address_specs)):
         yield address
Exemplo n.º 28
0
    def create(
        cls,
        options_bootstrapper: OptionsBootstrapper,
        options: Options,
        session: SchedulerSession,
        build_root: Optional[str] = None,
    ) -> Specs:
        specs = cls.parse_specs(raw_specs=options.specs, build_root=build_root)
        changed_options = ChangedOptions.from_options(
            options.for_scope("changed"))

        logger.debug("specs are: %s", specs)
        logger.debug("changed_options are: %s", changed_options)

        if specs.provided and changed_options.provided:
            changed_name = "--changed-since" if changed_options.since else "--changed-diffspec"
            if specs.filesystem_specs and specs.address_specs:
                specs_description = "target and file arguments"
            elif specs.filesystem_specs:
                specs_description = "file arguments"
            else:
                specs_description = "target arguments"
            raise InvalidSpecConstraint(
                f"You used `{changed_name}` at the same time as using {specs_description}. Please "
                "use only one.")

        if not changed_options.provided:
            return specs

        scm = get_scm()
        if not scm:
            raise InvalidSpecConstraint(
                "The `--changed-*` options are not available without a recognized SCM (usually "
                "Git).")
        changed_request = ChangedRequest(
            sources=tuple(changed_options.changed_files(scm=scm)),
            dependees=changed_options.dependees,
        )
        (changed_addresses, ) = session.product_request(
            ChangedAddresses, [Params(changed_request, options_bootstrapper)])
        logger.debug("changed addresses: %s", changed_addresses)

        address_specs = []
        filesystem_specs = []
        for address in cast(ChangedAddresses, changed_addresses):
            if not address.is_base_target:
                # TODO: Should adjust Specs parsing to support parsing the disambiguated file
                # Address, which would bypass-rediscovering owners.
                filesystem_specs.append(FilesystemLiteralSpec(
                    address.filename))
            else:
                address_specs.append(
                    SingleAddress(address.spec_path, address.target_name))

        return Specs(
            AddressSpecs(address_specs, filter_by_global_options=True),
            FilesystemSpecs(filesystem_specs),
        )
Exemplo n.º 29
0
    def __init__(
        self,
        root_dir: str,
        options_bootstrapper: OptionsBootstrapper,
        options: Options,
        build_config: BuildConfiguration,
        run_tracker: RunTracker,
        reporting: Reporting,
        graph_session: LegacyGraphSession,
        specs: Specs,
        exiter=sys.exit,
    ) -> None:
        """
        :param root_dir: The root directory of the pants workspace (aka the "build root").
        :param options: The global, pre-initialized Options instance.
        :param build_config: A pre-initialized BuildConfiguration instance.
        :param run_tracker: The global, pre-initialized/running RunTracker instance.
        :param reporting: The global, pre-initialized Reporting instance.
        :param graph_session: The graph session for this run.
        :param specs: The specs for this run, i.e. either the address or filesystem specs.
        :param func exiter: A function that accepts an exit code value and exits. (for tests, Optional)
        """
        self._root_dir = root_dir
        self._options_bootstrapper = options_bootstrapper
        self._options = options
        self._build_config = build_config
        self._run_tracker = run_tracker
        self._reporting = reporting
        self._graph_session = graph_session
        self._specs = specs
        self._exiter = exiter

        self._global_options = options.for_global_scope()
        self._fail_fast = self._global_options.fail_fast
        self._explain = self._global_options.explain
        self._kill_nailguns = self._global_options.kill_nailguns

        # V1 tasks do not understand FilesystemSpecs, so we eagerly convert them into AddressSpecs.
        if self._specs.filesystem_specs.dependencies:
            (owned_addresses,
             ) = self._graph_session.scheduler_session.product_request(
                 Addresses, [
                     Params(self._specs.filesystem_specs,
                            self._options_bootstrapper)
                 ])
            updated_address_specs = AddressSpecs(
                dependencies=tuple(
                    SingleAddress(a.spec_path, a.target_name)
                    for a in owned_addresses),
                tags=self._specs.address_specs.matcher.tags,
                exclude_patterns=self._specs.address_specs.matcher.
                exclude_patterns,
            )
            self._specs = Specs(
                address_specs=updated_address_specs,
                filesystem_specs=FilesystemSpecs([]),
            )
Exemplo n.º 30
0
async def determine_all_owned_sources() -> AllOwnedSources:
    all_tgts = await Get(UnexpandedTargets,
                         AddressSpecs([MaybeEmptyDescendantAddresses("")]))
    all_sources_paths = await MultiGet(
        Get(SourcesPaths, SourcesPathsRequest(tgt.get(Sources)))
        for tgt in all_tgts)
    return AllOwnedSources(
        itertools.chain.from_iterable(sources_paths.files
                                      for sources_paths in all_sources_paths))