Example #1
0
    def scan_addresses(self, root=None):
        if root:
            try:
                base_path = fast_relpath(root, self._build_root)
            except ValueError as e:
                raise self.InvalidRootError(e)
        else:
            base_path = ''

        addresses = set()
        for address in self._graph.inject_specs_closure(
            [DescendantAddresses(base_path)]):
            addresses.add(address)
        return addresses
Example #2
0
async def setup_pylint_lockfile(
    _: PylintLockfileSentinel,
    first_party_plugins: PylintFirstPartyPlugins,
    pylint: Pylint,
    python_setup: PythonSetup,
) -> PythonLockfileRequest:
    if not pylint.uses_lockfile:
        return PythonLockfileRequest.from_tool(pylint)

    # While Pylint will run in partitions, we need a single lockfile that works with every
    # partition. We must also consider any 3rd-party requirements used by 1st-party plugins.
    #
    # This first computes the constraints for each individual target, including its direct
    # dependencies (which will AND across each target in the closure). Then, it ORs all unique
    # resulting interpreter constraints. The net effect is that every possible Python interpreter
    # used will be covered.
    all_build_targets = await Get(UnexpandedTargets,
                                  AddressSpecs([DescendantAddresses("")]))
    relevant_targets = tuple(tgt for tgt in all_build_targets
                             if PylintFieldSet.is_applicable(tgt))
    direct_deps_per_target = await MultiGet(
        Get(UnexpandedTargets, DependenciesRequest(tgt.get(Dependencies)))
        for tgt in relevant_targets)

    unique_constraints = set()
    for tgt, direct_deps in zip(relevant_targets, direct_deps_per_target):
        constraints_fields = (t[InterpreterConstraintsField]
                              for t in (tgt, *direct_deps)
                              if t.has_field(InterpreterConstraintsField))
        unique_constraints.add(
            InterpreterConstraints.create_from_compatibility_fields(
                (*constraints_fields,
                 *first_party_plugins.interpreter_constraints_fields),
                python_setup,
            ))
    if not unique_constraints:
        unique_constraints.add(
            InterpreterConstraints.create_from_compatibility_fields(
                first_party_plugins.interpreter_constraints_fields,
                python_setup,
            ))

    constraints = InterpreterConstraints(
        itertools.chain.from_iterable(unique_constraints))
    return PythonLockfileRequest.from_tool(
        pylint,
        constraints
        or InterpreterConstraints(python_setup.interpreter_constraints),
        extra_requirements=first_party_plugins.requirement_strings,
    )
Example #3
0
 def test_walk_descendants(self):
     self.assertEqual(
         {
             self.addr('//:root'):
             Struct(name='root', type_alias='struct'),
             self.addr('a/b:b'):
             self.a_b_target,
             self.addr('a/d:d'):
             Target(name='d', type_alias='target'),
             self.addr('a/d/e:e'):
             Target(name='e', type_alias='target'),
             self.addr('a/d/e:e-prime'):
             Struct(name='e-prime', type_alias='struct')
         }, self.resolve_multi(DescendantAddresses('')))
Example #4
0
async def setup_user_lockfile_requests(
    requested: _SpecifiedUserResolves, python_setup: PythonSetup
) -> _UserLockfileRequests:
    # First, associate all resolves with their consumers.
    all_build_targets = await Get(UnexpandedTargets, AddressSpecs([DescendantAddresses("")]))
    resolves_to_roots = defaultdict(list)
    for tgt in all_build_targets:
        if not tgt.has_field(PythonResolveField):
            continue
        tgt[PythonResolveField].validate(python_setup)
        resolve = tgt[PythonResolveField].value
        if resolve is None:
            continue
        resolves_to_roots[resolve].append(tgt.address)

    # Expand the resolves for all specified.
    transitive_targets_per_resolve = await MultiGet(
        Get(TransitiveTargets, TransitiveTargetsRequest(resolves_to_roots[resolve]))
        for resolve in requested
    )
    pex_requirements_per_resolve = []
    interpreter_constraints_per_resolve = []
    for transitive_targets in transitive_targets_per_resolve:
        req_fields = []
        ic_fields = []
        for tgt in transitive_targets.closure:
            if tgt.has_field(PythonRequirementsField):
                req_fields.append(tgt[PythonRequirementsField])
            if tgt.has_field(InterpreterConstraintsField):
                ic_fields.append(tgt[InterpreterConstraintsField])
        pex_requirements_per_resolve.append(
            PexRequirements.create_from_requirement_fields(req_fields)
        )
        interpreter_constraints_per_resolve.append(
            InterpreterConstraints.create_from_compatibility_fields(ic_fields, python_setup)
        )

    requests = (
        PythonLockfileRequest(
            requirements.req_strings,
            interpreter_constraints,
            resolve_name=resolve,
            lockfile_dest=python_setup.resolves_to_lockfiles[resolve],
        )
        for resolve, requirements, interpreter_constraints in zip(
            requested, pex_requirements_per_resolve, interpreter_constraints_per_resolve
        )
    )
    return _UserLockfileRequests(requests)
Example #5
0
    def scan_build_files(self, base_path):
        request = self._scheduler.execution_request(
            [BuildFilesCollection], [(DescendantAddresses(base_path))])

        result = self._scheduler.execute(request)
        if result.error:
            raise result.error

        build_files_set = set()
        for _, state in result.root_products:
            for build_files in state.value.dependencies:
                build_files_set.update(
                    f.path for f in build_files.files_content.dependencies)

        return build_files_set
Example #6
0
  def scan_build_files(self, base_path):
    subject = DescendantAddresses(base_path)
    selector = SelectDependencies(BuildFiles, BuildDirs, field_types=(Dir,))
    request = self._scheduler.selection_request([(selector, subject)])

    result = self._engine.execute(request)
    if result.error:
      raise result.error

    build_files_set = set()
    for state in result.root_products.values():
      for build_files in state.value:
        build_files_set.update(f.path for f in build_files.files_content.dependencies)

    return build_files_set
def assert_poetry_requirements(
    rule_runner: RuleRunner,
    build_file_entry: str,
    pyproject_toml: str,
    *,
    expected_file_dep: PythonRequirementsFile,
    expected_targets: Iterable[PythonRequirementLibrary],
    pyproject_toml_relpath: str = "pyproject.toml",
) -> None:
    rule_runner.write_files({"BUILD": build_file_entry, pyproject_toml_relpath: pyproject_toml})
    targets = rule_runner.request(
        Targets,
        [Specs(AddressSpecs([DescendantAddresses("")]), FilesystemSpecs([]))],
    )
    assert {expected_file_dep, *expected_targets} == set(targets)
Example #8
0
def assert_python_requirements(
    rule_runner: RuleRunner,
    build_file_entry: str,
    requirements_txt: str,
    *,
    expected_file_dep: PythonRequirementsFile,
    expected_targets: Iterable[PythonRequirementLibrary],
    requirements_txt_relpath: str = "requirements.txt",
) -> None:
    rule_runner.write_files({"BUILD": build_file_entry, requirements_txt_relpath: requirements_txt})
    targets = rule_runner.request(
        Targets,
        [Specs(AddressSpecs([DescendantAddresses("")]), FilesystemSpecs([]))],
    )
    assert {expected_file_dep, *expected_targets} == set(targets)
Example #9
0
 def all_jar_libs(self):
   # NOTE: We always operate over 3rdparty::.  This is somewhat arbitrary and could instead
   # live in configuration, but for now it is so universal that I will leave it hard coded.
   # Note that since we don't mess with the actual target roots here, there should be no
   # side effects downstream from injecting more targets in the build graph (they will
   # either be unconnected nodes of the target root graph or they will have been pulled
   # in anyway)
   if self._all_jar_libs is None:
     build_graph = self.context.build_graph
     third_party_libs = set()
     for address in self.context.address_mapper.scan_specs([DescendantAddresses('3rdparty')]):
       build_graph.inject_address_closure(address)
       third_party_libs.add(build_graph.get_target(address))
     self._all_jar_libs = set(t for t in third_party_libs if isinstance(t, JarLibrary))
   return self._all_jar_libs
Example #10
0
def test_address_specs_more_specific() -> None:
    literal_addr = AddressLiteralSpec(path_component="foo/bar",
                                      target_component="baz")
    sibling_addresses = SiblingAddresses(directory="foo/bar")
    ascendant_addresses = AscendantAddresses(directory="foo/bar")
    descendant_addresses = DescendantAddresses(directory="foo/bar")

    assert literal_addr == AddressSpecs.more_specific(literal_addr, None)
    assert literal_addr == AddressSpecs.more_specific(literal_addr,
                                                      sibling_addresses)
    assert literal_addr == AddressSpecs.more_specific(literal_addr,
                                                      ascendant_addresses)
    assert literal_addr == AddressSpecs.more_specific(literal_addr,
                                                      descendant_addresses)
    assert literal_addr == AddressSpecs.more_specific(None, literal_addr)
    assert literal_addr == AddressSpecs.more_specific(sibling_addresses,
                                                      literal_addr)
    assert literal_addr == AddressSpecs.more_specific(ascendant_addresses,
                                                      literal_addr)
    assert literal_addr == AddressSpecs.more_specific(descendant_addresses,
                                                      literal_addr)

    assert sibling_addresses == AddressSpecs.more_specific(
        sibling_addresses, None)
    assert sibling_addresses == AddressSpecs.more_specific(
        sibling_addresses, ascendant_addresses)
    assert sibling_addresses == AddressSpecs.more_specific(
        sibling_addresses, descendant_addresses)
    assert sibling_addresses == AddressSpecs.more_specific(
        None, sibling_addresses)
    assert sibling_addresses == AddressSpecs.more_specific(
        ascendant_addresses, sibling_addresses)
    assert sibling_addresses == AddressSpecs.more_specific(
        descendant_addresses, sibling_addresses)

    assert ascendant_addresses == AddressSpecs.more_specific(
        ascendant_addresses, None)
    assert ascendant_addresses == AddressSpecs.more_specific(
        ascendant_addresses, descendant_addresses)
    assert ascendant_addresses == AddressSpecs.more_specific(
        None, ascendant_addresses)
    assert ascendant_addresses == AddressSpecs.more_specific(
        descendant_addresses, ascendant_addresses)

    assert descendant_addresses == AddressSpecs.more_specific(
        descendant_addresses, None)
    assert descendant_addresses == AddressSpecs.more_specific(
        None, descendant_addresses)
Example #11
0
    def parse_spec(self, spec: str) -> AddressSpec | FilesystemSpec:
        """Parse the given spec into an `AddressSpec` or `FilesystemSpec` object.

        :raises: CmdLineSpecParser.BadSpecError if the address selector could not be parsed.
        """
        if spec.endswith("::"):
            spec_path = spec[:-len("::")]
            return DescendantAddresses(
                directory=self._normalize_spec_path(spec_path))
        if spec.endswith(":"):
            spec_path = spec[:-len(":")]
            return SiblingAddresses(
                directory=self._normalize_spec_path(spec_path))
        if ":" in spec or "#" in spec:
            tgt_parts = spec.split(":", maxsplit=1)
            path_component = tgt_parts[0]
            if len(tgt_parts) == 1:
                target_component = None
                generated_parts = path_component.split("#", maxsplit=1)
                if len(generated_parts) == 1:
                    generated_component = None
                else:
                    path_component, generated_component = generated_parts
            else:
                generated_parts = tgt_parts[1].split("#", maxsplit=1)
                if len(generated_parts) == 1:
                    target_component = generated_parts[0]
                    generated_component = None
                else:
                    target_component, generated_component = generated_parts
            return AddressLiteralSpec(
                path_component=self._normalize_spec_path(path_component),
                target_component=target_component,
                generated_component=generated_component,
            )
        if spec.startswith("!"):
            return FileIgnoreSpec(spec[1:])
        if "*" in spec:
            return FileGlobSpec(spec)
        if PurePath(spec).suffix:
            return FileLiteralSpec(self._normalize_spec_path(spec))
        spec_path = self._normalize_spec_path(spec)
        if spec_path == ".":
            return DirLiteralSpec("")
        # Some paths that look like dirs can actually be files without extensions.
        if Path(self._root_dir, spec_path).is_file():
            return FileLiteralSpec(spec_path)
        return DirLiteralSpec(spec_path)
Example #12
0
def find_owners(build_configuration, address_mapper, owners_request):
  sources_set = OrderedSet(owners_request.sources)
  dirs_set = OrderedSet(dirname(source) for source in sources_set)

  # Walk up the buildroot looking for targets that would conceivably claim changed sources.
  candidate_specs = tuple(AscendantAddresses(directory=d) for d in dirs_set)
  candidate_targets = yield Get(HydratedTargets, Specs(candidate_specs))

  # Match the source globs against the expanded candidate targets.
  def owns_any_source(legacy_target):
    """Given a `HydratedTarget` instance, check if it owns the given source file."""
    target_kwargs = legacy_target.adaptor.kwargs()

    # Handle `sources`-declaring targets.
    # NB: Deleted files can only be matched against the 'filespec' (ie, `PathGlobs`) for a target,
    # so we don't actually call `fileset.matches` here.
    # TODO: This matching logic should be implemented using the rust `fs` crate for two reasons:
    #  1) having two implementations isn't great
    #  2) we're expanding sources via HydratedTarget, but it isn't necessary to do that to match
    target_sources = target_kwargs.get('sources', None)
    if target_sources and any_matches_filespec(sources_set, target_sources.filespec):
      return True

    return False

  direct_owners = tuple(ht.adaptor.address
                        for ht in candidate_targets
                        if LegacyAddressMapper.any_is_declaring_file(ht.adaptor.address, sources_set) or
                           owns_any_source(ht))

  # If the OwnersRequest does not require dependees, then we're done.
  if owners_request.include_dependees == 'none':
    yield BuildFileAddresses(direct_owners)
  else:
    # Otherwise: find dependees.
    all_addresses = yield Get(BuildFileAddresses, Specs((DescendantAddresses(''),)))
    all_structs = yield [Get(HydratedStruct, Address, a.to_address()) for a in all_addresses]
    all_structs = [s.value for s in all_structs]

    bfa = build_configuration.registered_aliases()
    graph = _DependentGraph.from_iterable(target_types_from_build_file_aliases(bfa),
                                          address_mapper,
                                          all_structs)
    if owners_request.include_dependees == 'direct':
      yield BuildFileAddresses(tuple(graph.dependents_of_addresses(direct_owners)))
    else:
      assert owners_request.include_dependees == 'transitive'
      yield BuildFileAddresses(tuple(graph.transitive_dependents_of_addresses(direct_owners)))
Example #13
0
async def map_third_party_modules_to_addresses(
) -> ThirdPartyModuleToAddressMapping:
    all_targets = await Get[Targets](AddressSpecs([DescendantAddresses("")]))
    modules_to_addresses: Dict[str, Address] = {}
    for tgt in all_targets:
        if not tgt.has_field(PythonRequirementsField):
            continue
        for python_req in tgt[PythonRequirementsField].value:
            for module in python_req.modules:
                # NB: If >1 targets have the same module, we do not record the module. This is to
                # avoid ambiguity.
                if module in modules_to_addresses:
                    modules_to_addresses.pop(module)
                else:
                    modules_to_addresses[module] = tgt.address
    return ThirdPartyModuleToAddressMapping(FrozenDict(modules_to_addresses))
Example #14
0
async def dependees_goal(specified_addresses: Addresses,
                         options: DependeesOptions,
                         console: Console) -> Dependees:
    # Get every target in the project so that we can iterate over them to find their dependencies.
    all_targets = await Get[Targets](AddressSpecs([DescendantAddresses("")]))
    dependencies_per_target = await MultiGet(
        Get[Addresses](DependenciesRequest(tgt.get(Dependencies)))
        for tgt in all_targets)

    address_to_dependees = defaultdict(set)
    for tgt, dependencies in zip(all_targets, dependencies_per_target):
        for dependency in dependencies:
            address_to_dependees[dependency].add(tgt.address)

    # JSON should output each distinct specified target with its dependees, unlike the `text`
    # format flattening into a single set.
    if options.values.output_format == DependeesOutputFormat.json:
        json_result = {}
        for specified_address in specified_addresses:
            dependees = calculate_dependees(
                address_to_dependees, [specified_address],
                transitive=options.values.transitive)
            if options.values.closed:
                dependees.add(specified_address)
            json_result[specified_address.spec] = sorted(addr.spec
                                                         for addr in dependees)
        with options.line_oriented(console) as print_stdout:
            print_stdout(
                json.dumps(json_result,
                           indent=4,
                           separators=(",", ": "),
                           sort_keys=True))
        return Dependees(exit_code=0)

    # Filter `address_to_dependees` based on the specified addresses, and flatten it into a
    # single set.
    result_addresses = calculate_dependees(
        address_to_dependees,
        specified_addresses,
        transitive=options.values.transitive)
    if options.values.closed:
        result_addresses |= set(specified_addresses)

    with options.line_oriented(console) as print_stdout:
        for address in sorted(result_addresses):
            print_stdout(address)
    return Dependees(exit_code=0)
Example #15
0
    def parse_spec(self, spec: str) -> AddressSpec | FilesystemSpec:
        """Parse the given spec into an `AddressSpec` or `FilesystemSpec` object.

        :raises: CmdLineSpecParser.BadSpecError if the address selector could not be parsed.
        """
        if spec.endswith("::"):
            spec_path = spec[:-len("::")]
            return DescendantAddresses(
                directory=self._normalize_spec_path(spec_path))
        if spec.endswith(":"):
            spec_path = spec[:-len(":")]
            return SiblingAddresses(
                directory=self._normalize_spec_path(spec_path))
        if ":" in spec or "#" in spec:
            tgt_parts = spec.split(":", maxsplit=1)
            path_component = tgt_parts[0]
            if len(tgt_parts) == 1:
                target_component = None
                generated_parts = path_component.split("#", maxsplit=1)
                if len(generated_parts) == 1:
                    generated_component = None
                else:
                    path_component, generated_component = generated_parts
            else:
                generated_parts = tgt_parts[1].split("#", maxsplit=1)
                if len(generated_parts) == 1:
                    target_component = generated_parts[0]
                    generated_component = None
                else:
                    target_component, generated_component = generated_parts
            return AddressLiteralSpec(
                path_component=self._normalize_spec_path(path_component),
                target_component=target_component,
                generated_component=generated_component,
            )
        if spec.startswith("!"):
            return FilesystemIgnoreSpec(spec[1:])
        if "*" in spec:
            return FilesystemGlobSpec(spec)
        if PurePath(spec).suffix:
            return FilesystemLiteralSpec(self._normalize_spec_path(spec))
        spec_path = self._normalize_spec_path(spec)
        if Path(self._root_dir, spec_path).is_file():
            return FilesystemLiteralSpec(spec_path)
        # Else we apply address shorthand, i.e. `src/python/pants/util` ->
        # `src/python/pants/util:util`
        return AddressLiteralSpec(spec_path, None, None)
Example #16
0
def assert_pipenv_requirements(
    rule_runner: RuleRunner,
    build_file_entry: str,
    pipfile_lock: dict,
    *,
    expected_file_dep: PythonRequirementsFile,
    expected_targets: Iterable[PythonRequirementLibrary],
    pipfile_lock_relpath: str = "Pipfile.lock",
) -> None:
    rule_runner.add_to_build_file("", f"{build_file_entry}\n")
    rule_runner.create_file(pipfile_lock_relpath, dumps(pipfile_lock))
    targets = rule_runner.request(
        Targets,
        [Specs(AddressSpecs([DescendantAddresses("")]), FilesystemSpecs([]))],
    )

    assert {expected_file_dep, *expected_targets} == set(targets)
Example #17
0
  def parse_address_spec(self, spec: str) -> AddressSpec:
    """Parse the given spec into an `AddressSpec` object.

    :raises: CmdLineSpecParser.BadSpecError if the address selector could not be parsed.
    """

    if spec.endswith('::'):
      spec_path = spec[:-len('::')]
      return DescendantAddresses(self._normalize_spec_path(spec_path))
    elif spec.endswith(':'):
      spec_path = spec[:-len(':')]
      return SiblingAddresses(self._normalize_spec_path(spec_path))
    else:
      spec_parts = spec.rsplit(':', 1)
      spec_path = self._normalize_spec_path(spec_parts[0]) 
      name = spec_parts[1] if len(spec_parts) > 1 else os.path.basename(spec_path)
      return SingleAddress(spec_path, name)
Example #18
0
async def map_first_party_python_targets_to_modules(
    _: FirstPartyPythonTargetsMappingMarker, ) -> FirstPartyPythonMappingImpl:
    all_expanded_targets = await Get(Targets,
                                     AddressSpecs([DescendantAddresses("")]))
    python_targets = tuple(tgt for tgt in all_expanded_targets
                           if tgt.has_field(PythonSources))
    stripped_sources_per_target = await MultiGet(
        Get(StrippedSourceFileNames, SourcesPathsRequest(tgt[PythonSources]))
        for tgt in python_targets)

    modules_to_addresses: DefaultDict[str, list[Address]] = defaultdict(list)
    modules_with_multiple_implementations: DefaultDict[
        str, set[Address]] = defaultdict(set)
    for tgt, stripped_sources in zip(python_targets,
                                     stripped_sources_per_target):
        for stripped_f in stripped_sources:
            module = PythonModule.create_from_stripped_path(
                PurePath(stripped_f)).module
            if module in modules_to_addresses:
                # We check if one of the targets is an implementation (.py file) and the other is
                # a type stub (.pyi file), which we allow. Otherwise, we have ambiguity.
                prior_is_type_stub = len(
                    modules_to_addresses[module]
                ) == 1 and modules_to_addresses[module][0].filename.endswith(
                    ".pyi")
                current_is_type_stub = tgt.address.filename.endswith(".pyi")
                if prior_is_type_stub ^ current_is_type_stub:
                    modules_to_addresses[module].append(tgt.address)
                else:
                    modules_with_multiple_implementations[module].update(
                        {*modules_to_addresses[module], tgt.address})
            else:
                modules_to_addresses[module].append(tgt.address)

    # Remove modules with ambiguous owners.
    for module in modules_with_multiple_implementations:
        modules_to_addresses.pop(module)

    return FirstPartyPythonMappingImpl(
        mapping=FrozenDict((k, tuple(sorted(v)))
                           for k, v in sorted(modules_to_addresses.items())),
        ambiguous_modules=FrozenDict(
            (k, tuple(sorted(v)))
            for k, v in sorted(modules_with_multiple_implementations.items())),
    )
Example #19
0
def test_get_target_data(rule_runner: RuleRunner) -> None:
    rule_runner.write_files({
        "foo/BUILD":
        dedent("""\
            target(name="bar", dependencies=[":baz"])

            files(name="baz", sources=["*.txt"])
            """),
        "foo/a.txt":
        "",
        "foo/b.txt":
        "",
    })
    tds = rule_runner.request(TargetDatas,
                              [AddressSpecs([DescendantAddresses("foo")])])
    assert list(tds) == [
        TargetData(
            GenericTarget({"dependencies": [":baz"]},
                          Address("foo", target_name="bar")),
            None,
            ("foo/a.txt:baz", "foo/b.txt:baz"),
        ),
        TargetData(
            FilesGeneratorTarget({"sources": ["*.txt"]},
                                 Address("foo", target_name="baz")),
            ("foo/a.txt", "foo/b.txt"),
            ("foo/a.txt:baz", "foo/b.txt:baz"),
        ),
        TargetData(
            FileTarget({"source": "a.txt"},
                       Address("foo",
                               relative_file_path="a.txt",
                               target_name="baz")),
            ("foo/a.txt", ),
            (),
        ),
        TargetData(
            FileTarget({"source": "b.txt"},
                       Address("foo",
                               relative_file_path="b.txt",
                               target_name="baz")),
            ("foo/b.txt", ),
            (),
        ),
    ]
Example #20
0
async def setup_black_lockfile(
        _: BlackLockfileSentinel, black: Black,
        python_setup: PythonSetup) -> PythonLockfileRequest:
    if not black.uses_lockfile:
        return PythonLockfileRequest.from_tool(black)

    constraints = black.interpreter_constraints
    if black.options.is_default("interpreter_constraints"):
        all_build_targets = await Get(UnexpandedTargets,
                                      AddressSpecs([DescendantAddresses("")]))
        code_constraints = InterpreterConstraints.create_from_targets(
            (tgt for tgt in all_build_targets
             if not tgt.get(SkipBlackField).value), python_setup)
        if code_constraints.requires_python38_or_newer(
                python_setup.interpreter_universe):
            constraints = code_constraints

    return PythonLockfileRequest.from_tool(black, constraints)
Example #21
0
def test_address_specs_deduplication(
        address_specs_rule_runner: RuleRunner) -> None:
    """When multiple specs cover the same address, we should deduplicate to one single Address."""
    address_specs_rule_runner.create_file("demo/f.txt")
    address_specs_rule_runner.add_to_build_file("demo",
                                                "mock_tgt(sources=['f.txt'])")
    # We also include a file address to ensure that that is included in the result.
    specs = [
        AddressLiteralSpec("demo", "demo"),
        AddressLiteralSpec("demo/f.txt", "demo"),
        SiblingAddresses("demo"),
        DescendantAddresses("demo"),
        AscendantAddresses("demo"),
    ]
    assert resolve_address_specs(address_specs_rule_runner, specs) == {
        Address("demo"),
        Address("demo", relative_file_path="f.txt"),
    }
async def map_protobuf_to_python_modules(
    _: PythonProtobufMappingMarker, ) -> FirstPartyPythonMappingImpl:
    all_expanded_targets = await Get(Targets,
                                     AddressSpecs([DescendantAddresses("")]))
    protobuf_targets = tuple(tgt for tgt in all_expanded_targets
                             if tgt.has_field(ProtobufSources))
    stripped_sources_per_target = await MultiGet(
        Get(StrippedSourceFileNames, SourcesPathsRequest(tgt[ProtobufSources]))
        for tgt in protobuf_targets)

    # NB: There should be only one address per module, else it's ambiguous.
    modules_to_addresses: dict[str, tuple[Address]] = {}
    modules_with_multiple_owners: DefaultDict[str,
                                              set[Address]] = defaultdict(set)

    def add_module(module: str, tgt: Target) -> None:
        if module in modules_to_addresses:
            modules_with_multiple_owners[module].update(
                {*modules_to_addresses[module], tgt.address})
        else:
            modules_to_addresses[module] = (tgt.address, )

    for tgt, stripped_sources in zip(protobuf_targets,
                                     stripped_sources_per_target):
        for stripped_f in stripped_sources:
            # NB: We don't consider the MyPy plugin, which generates `_pb2.pyi`. The stubs end up
            # sharing the same module as the implementation `_pb2.py`. Because both generated files
            # come from the same original Protobuf target, we're covered.
            add_module(proto_path_to_py_module(stripped_f, suffix="_pb2"), tgt)
            if tgt.get(ProtobufGrpcToggle).value:
                add_module(
                    proto_path_to_py_module(stripped_f, suffix="_pb2_grpc"),
                    tgt)

    # Remove modules with ambiguous owners.
    for ambiguous_module in modules_with_multiple_owners:
        modules_to_addresses.pop(ambiguous_module)

    return FirstPartyPythonMappingImpl(
        mapping=FrozenDict(sorted(modules_to_addresses.items())),
        ambiguous_modules=FrozenDict(
            (k, tuple(sorted(v)))
            for k, v in sorted(modules_with_multiple_owners.items())),
    )
Example #23
0
    def parse_spec(self, spec):
        """Parse the given spec into a `specs.Spec` object.

    :param spec: a single spec string.
    :return: a single specs.Specs object.
    :raises: CmdLineSpecParser.BadSpecError if the address selector could not be parsed.
    """

        if spec.endswith('::'):
            spec_path = spec[:-len('::')]
            return DescendantAddresses(self._normalize_spec_path(spec_path))
        elif spec.endswith(':'):
            spec_path = spec[:-len(':')]
            return SiblingAddresses(self._normalize_spec_path(spec_path))
        else:
            spec_parts = spec.rsplit(':', 1)
            return SingleAddress(
                self._normalize_spec_path(spec_parts[0]),
                spec_parts[1] if len(spec_parts) > 1 else None)
def assert_python_requirements(
    rule_runner: RuleRunner,
    build_file_entry: str,
    requirements_txt: str,
    *,
    expected_file_dep: PythonRequirementsFile,
    expected_targets: Iterable[PythonRequirementLibrary],
    requirements_txt_relpath: str = "requirements.txt",
) -> None:
    rule_runner.add_to_build_file("", f"{build_file_entry}\n")
    rule_runner.create_file(requirements_txt_relpath, requirements_txt)
    targets = rule_runner.request_product(
        Targets,
        [
            Specs(AddressSpecs([DescendantAddresses("")]), FilesystemSpecs([])),
            create_options_bootstrapper(),
        ],
    )
    assert {expected_file_dep, *expected_targets} == set(targets)
Example #25
0
async def map_third_party_modules_to_addresses(
) -> ThirdPartyModuleToAddressMapping:
    all_targets = await Get[Targets](AddressSpecs([DescendantAddresses("")]))
    modules_to_addresses: Dict[str, Address] = {}
    modules_with_multiple_owners: Set[str] = set()
    for tgt in all_targets:
        if not tgt.has_field(PythonRequirementsField):
            continue
        for python_req in tgt[PythonRequirementsField].value:
            for module in python_req.modules:
                if module in modules_to_addresses:
                    modules_with_multiple_owners.add(module)
                else:
                    modules_to_addresses[module] = tgt.address
    # Remove modules with ambiguous owners.
    for module in modules_with_multiple_owners:
        modules_to_addresses.pop(module)
    return ThirdPartyModuleToAddressMapping(
        FrozenDict(sorted(modules_to_addresses.items())))
Example #26
0
def test_address_specs_deduplication(address_specs_rule_runner: RuleRunner) -> None:
    """When multiple specs cover the same address, we should deduplicate to one single Address."""
    address_specs_rule_runner.write_files(
        {"demo/f.txt": "", "demo/BUILD": "generator(sources=['f.txt'])"}
    )
    specs = [
        AddressLiteralSpec("demo"),
        SiblingAddresses("demo"),
        DescendantAddresses("demo"),
        AscendantAddresses("demo"),
        # We also include targets generated from `demo` to ensure that the final result has both
        # the generator and its generated targets.
        AddressLiteralSpec("demo", None, "f.txt"),
        AddressLiteralSpec("demo/f.txt"),
    ]
    assert resolve_address_specs(address_specs_rule_runner, specs) == {
        Address("demo"),
        Address("demo", generated_name="f.txt"),
        Address("demo", relative_file_path="f.txt"),
    }
Example #27
0
  def test_globbed_non_test_target(self):
    bfaddr = BuildFileAddress(None, 'bin', 'some/dir')
    target_adaptor = PythonBinaryAdaptor(type_alias='python_binary')
    with self.captured_logging(logging.INFO):
      result = run_rule(
        coordinator_of_tests,
        HydratedTarget(bfaddr.to_address(), target_adaptor, ()),
        UnionMembership(union_rules={TestTarget: [PythonTestsAdaptor]}),
        AddressProvenanceMap(bfaddr_to_spec={
          bfaddr: DescendantAddresses(directory='some/dir')
        }),
        {
          (TestResult, PythonTestsAdaptor):
            lambda _: TestResult(status=Status.SUCCESS, stdout='foo', stderr=''),
        })

      self.assertEqual(
        result,
        AddressAndTestResult(bfaddr.to_address(), None)
      )
Example #28
0
async def setup_flake8_lockfile(
    _: Flake8LockfileSentinel, flake8: Flake8, python_setup: PythonSetup
) -> PythonLockfileRequest:
    if not flake8.uses_lockfile:
        return PythonLockfileRequest.from_tool(flake8)

    # While Flake8 will run in partitions, we need a single lockfile that works with every
    # partition.
    #
    # This ORs all unique interpreter constraints. The net effect is that every possible Python
    # interpreter used will be covered.
    all_build_targets = await Get(UnexpandedTargets, AddressSpecs([DescendantAddresses("")]))
    unique_constraints = {
        InterpreterConstraints.create_from_targets([tgt], python_setup)
        for tgt in all_build_targets
        if Flake8FieldSet.is_applicable(tgt)
    }
    constraints = InterpreterConstraints(itertools.chain.from_iterable(unique_constraints))
    return PythonLockfileRequest.from_tool(
        flake8, constraints or InterpreterConstraints(python_setup.interpreter_constraints)
    )
Example #29
0
async def map_third_party_modules_to_addresses() -> ThirdPartyModuleToAddressMapping:
    all_targets = await Get(Targets, AddressSpecs([DescendantAddresses("")]))
    modules_to_addresses: Dict[str, Address] = {}
    modules_with_multiple_owners: Set[str] = set()
    for tgt in all_targets:
        if not tgt.has_field(PythonRequirementsField):
            continue
        module_map = tgt.get(ModuleMappingField).value or {}  # type: ignore[var-annotated]
        for python_req in tgt[PythonRequirementsField].value:
            modules = module_map.get(
                python_req.project_name, [python_req.project_name.lower().replace("-", "_")],
            )
            for module in modules:
                if module in modules_to_addresses:
                    modules_with_multiple_owners.add(module)
                else:
                    modules_to_addresses[module] = tgt.address
    # Remove modules with ambiguous owners.
    for module in modules_with_multiple_owners:
        modules_to_addresses.pop(module)
    return ThirdPartyModuleToAddressMapping(FrozenDict(sorted(modules_to_addresses.items())))
Example #30
0
async def analyze_import_path_to_package_mapping(
) -> GoImportPathToPackageMapping:
    mapping: dict[str, list[Address]] = defaultdict(list)

    all_targets = await Get(Targets, AddressSpecs([DescendantAddresses("")]))
    for tgt in all_targets:
        if not tgt.has_field(GoImportPath):
            continue

        # Note: This will usually skip go_package targets since they need analysis to infer the import path
        # since there is no way in the engine to attach inferred values as fields.
        import_path = tgt[GoImportPath].value
        if not import_path:
            continue

        mapping[import_path].append(tgt.address)

    frozen_mapping = FrozenDict(
        {ip: tuple(tgts)
         for ip, tgts in mapping.items()})
    return GoImportPathToPackageMapping(mapping=frozen_mapping)