Beispiel #1
0
async def parse_python_imports(request: ParsePythonImportsRequest) -> ParsedPythonImports:
    python_interpreter, script_digest, stripped_sources = await MultiGet(
        Get(PythonExecutable, PexInterpreterConstraints, request.interpreter_constraints),
        Get(Digest, CreateDigest([FileContent("__parse_python_imports.py", _SCRIPT.encode())])),
        Get(StrippedSourceFiles, SourceFilesRequest([request.sources])),
    )
    input_digest = await Get(
        Digest, MergeDigests([script_digest, stripped_sources.snapshot.digest])
    )
    process_result = await Get(
        ProcessResult,
        Process(
            argv=[
                python_interpreter.path,
                "./__parse_python_imports.py",
                *stripped_sources.snapshot.files,
            ],
            input_digest=input_digest,
            description=f"Determine Python imports for {request.sources.address}",
            level=LogLevel.DEBUG,
        ),
    )
    explicit_imports, _, string_imports = process_result.stdout.decode().partition("--")
    return ParsedPythonImports(
        explicit_imports=FrozenOrderedSet(explicit_imports.strip().splitlines()),
        string_imports=FrozenOrderedSet(string_imports.strip().splitlines()),
    )
Beispiel #2
0
def find_artifact_mapping(
    import_name: str,
    mapping: ThirdPartyPackageToArtifactMapping,
    available_artifacts: AvailableThirdPartyArtifacts,
) -> FrozenOrderedSet[Address]:
    imp_parts = import_name.split(".")
    current_node = mapping.mapping_root

    found_nodes = []
    for imp_part in imp_parts:
        child_node_opt = current_node.find_child(imp_part)
        if not child_node_opt:
            break
        found_nodes.append(child_node_opt)
        current_node = child_node_opt

    if not found_nodes:
        return FrozenOrderedSet()

    # If the length of the found nodes equals the number of parts of the package path, then there
    # is an exact match.
    if len(found_nodes) == len(imp_parts):
        addresses = available_artifacts.addresses_for_coordinates(
            found_nodes[-1].coordinates)
        return addresses

    # Otherwise, check for the first found node (in reverse order) to match recursively, and use its coordinate.
    for found_node in reversed(found_nodes):
        if found_node.recursive:
            addresses = available_artifacts.addresses_for_coordinates(
                found_node.coordinates)
            return addresses

    # Nothing matched so return no match.
    return FrozenOrderedSet()
Beispiel #3
0
    def test_target_types(self):
        def target_types():
            return [DummyTarget, DummyTarget2]

        with self.create_register(
                target_types=target_types) as backend_package:
            load_backend(self.bc_builder, backend_package, is_v1_backend=True)
            assert self.bc_builder.create().target_types() == FrozenOrderedSet(
                [DummyTarget, DummyTarget2])

            load_backend(self.bc_builder, backend_package, is_v1_backend=False)
            assert self.bc_builder.create().target_types() == FrozenOrderedSet(
                [DummyTarget, DummyTarget2])

        class PluginTarget(Target):
            alias = "plugin_tgt"
            core_fields = ()

        def plugin_targets():
            return [PluginTarget]

        self.working_set.add(
            self.get_mock_plugin("new-targets",
                                 "0.0.1",
                                 target_types=plugin_targets))
        self.load_plugins(["new-targets"], is_v1_plugin=True)
        assert self.bc_builder.create().target_types() == FrozenOrderedSet(
            [DummyTarget, DummyTarget2, PluginTarget])
        self.load_plugins(["new-targets"], is_v1_plugin=False)
        assert self.bc_builder.create().target_types() == FrozenOrderedSet(
            [DummyTarget, DummyTarget2, PluginTarget])
def test_duplicate_test_mains_different_files(rule_runner: RuleRunner) -> None:
    input_digest = rule_runner.make_snapshot(
        {
            "foo_test.go":
            dedent("""
                package foo

                func TestMain(m *testing.M) {
                }
                """),
            "bar_test.go":
            dedent("""
                package foo

                func TestMain(m *testing.M) {
                }
                """),
        }, ).digest

    result = rule_runner.request(
        GeneratedTestMain,
        [
            GenerateTestMainRequest(
                input_digest,
                FrozenOrderedSet(["foo_test.go", "bar_test.go"]),
                FrozenOrderedSet(),
                "foo",
                Address("foo"),
            )
        ],
    )
    assert result.failed_exit_code_and_stderr is not None
    exit_code, stderr = result.failed_exit_code_and_stderr
    assert exit_code == 1
    assert "multiple definitions of TestMain" in stderr
Beispiel #5
0
def test_trie_node_merge_basic() -> None:
    one = MutableTrieNode()
    one.insert("a/b/c", [Address("1")], recursive=True, first_party=False)
    one.insert("a/b/c/d", [Address("2")], recursive=False, first_party=False)
    two = MutableTrieNode()
    two.insert("a/b/c/d", [Address("3")], recursive=False, first_party=False)

    merged = FrozenTrieNode.merge([one.frozen(), two.frozen()])
    assert list(merged) == [
        (
            "a/b/c",
            True,
            FrozenDict(
                {DEFAULT_SYMBOL_NAMESPACE: FrozenOrderedSet([Address("1")])}),
            False,
        ),
        (
            "a/b/c/d",
            False,
            FrozenDict({
                DEFAULT_SYMBOL_NAMESPACE:
                FrozenOrderedSet([Address("2"), Address("3")])
            }),
            False,
        ),
    ]
Beispiel #6
0
def test_duplicate_test_mains_different_files(rule_runner: RuleRunner) -> None:
    input_digest = rule_runner.make_snapshot(
        {
            "foo_test.go":
            dedent("""
                package foo

                func TestMain(m *testing.M) {
                }
                """),
            "bar_test.go":
            dedent("""
                package foo

                func TestMain(m *testing.M) {
                }
                """),
        }, ).digest

    with pytest.raises(ExecutionError) as exc_info:
        rule_runner.request(
            GeneratedTestMain,
            [
                GenerateTestMainRequest(
                    input_digest,
                    FrozenOrderedSet(["foo_test.go", "bar_test.go"]),
                    FrozenOrderedSet(),
                    "foo",
                )
            ],
        )

    assert "multiple definitions of TestMain" in str(exc_info.value)
Beispiel #7
0
async def transitive_targets(targets: Targets) -> TransitiveTargets:
    """Find all the targets transitively depended upon by the target roots.

    This uses iteration, rather than recursion, so that we can tolerate dependency cycles. Unlike a
    traditional BFS algorithm, we batch each round of traversals via `MultiGet` for improved
    performance / concurrency.
    """
    visited: OrderedSet[Target] = OrderedSet()
    queued = FrozenOrderedSet(targets)
    dependency_mapping: Dict[Address, Tuple[Address, ...]] = {}
    while queued:
        direct_dependencies = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies)))
            for tgt in queued)

        dependency_mapping.update(
            zip(
                (t.address for t in queued),
                (tuple(t.address for t in deps)
                 for deps in direct_dependencies),
            ))

        queued = FrozenOrderedSet(
            itertools.chain.from_iterable(direct_dependencies)).difference(
                visited)
        visited.update(queued)

    transitive_targets = TransitiveTargets(tuple(targets),
                                           FrozenOrderedSet(visited))
    _detect_cycles(tuple(t.address for t in targets), dependency_mapping)
    return transitive_targets
Beispiel #8
0
def test_lockfile_validation(rule_runner: RuleRunner) -> None:
    """Check that we properly load and validate lockfile metadata for both types of locks.

    Note that we don't exhaustively test every source of lockfile failure nor the different options
    for `--invalid-lockfile-behavior`, as those are already tested in pex_requirements_test.py.
    """

    # We create a lockfile that claims it works with no requirements. It should fail when we try
    # to build a PEX with a requirement.
    lock_content = PythonLockfileMetadata.new(InterpreterConstraints(),
                                              set()).add_header_to_lockfile(
                                                  b"",
                                                  regenerate_command="regen",
                                                  delimeter="#")
    rule_runner.write_files({"lock.txt": lock_content.decode()})

    lockfile = Lockfile(
        "lock.txt",
        file_path_description_of_origin="a test",
        resolve_name="a",
        req_strings=FrozenOrderedSet("ansicolors"),
    )
    with engine_error(InvalidLockfileError):
        create_pex_and_get_all_data(rule_runner, requirements=lockfile)

    lockfile_content = LockfileContent(
        FileContent("lock.txt", lock_content),
        resolve_name="a",
        req_strings=FrozenOrderedSet("ansicolors"),
    )
    with engine_error(InvalidLockfileError):
        create_pex_and_get_all_data(rule_runner, requirements=lockfile_content)
Beispiel #9
0
async def find_owners(owners_request: OwnersRequest) -> Owners:
    sources_set = FrozenOrderedSet(owners_request.sources)
    dirs_set = FrozenOrderedSet(
        os.path.dirname(source) for source in sources_set)

    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    candidate_specs = tuple(AscendantAddresses(directory=d) for d in dirs_set)
    candidate_targets = await Get[HydratedTargets](
        AddressSpecs(candidate_specs))

    # Match the source globs against the expanded candidate targets.
    def owns_any_source(legacy_target: HydratedTarget) -> bool:
        """Given a `HydratedTarget` instance, check if it owns the given source file."""
        target_kwargs = legacy_target.adaptor.kwargs()

        # Handle `sources`-declaring targets.
        # NB: Deleted files can only be matched against the 'filespec' (ie, `PathGlobs`) for a target,
        # so we don't actually call `fileset.matches` here.
        # TODO: This matching logic should be implemented using the rust `fs` crate for two reasons:
        #  1) having two implementations isn't great
        #  2) we're expanding sources via HydratedTarget, but it isn't necessary to do that to match
        target_sources = target_kwargs.get("sources", None)
        return target_sources and any_matches_filespec(
            paths=sources_set, spec=target_sources.filespec)

    build_file_addresses = await MultiGet(
        Get[BuildFileAddress](Address, ht.adaptor.address)
        for ht in candidate_targets)
    owners = Addresses(
        ht.adaptor.address
        for ht, bfa in zip(candidate_targets, build_file_addresses)
        if LegacyAddressMapper.any_is_declaring_file(bfa, sources_set)
        or owns_any_source(ht))
    return Owners(owners)
Beispiel #10
0
async def get_requirements(
    dep_owner: DependencyOwner,
    union_membership: UnionMembership,
    setup_py_generation: SetupPyGeneration,
) -> ExportedTargetRequirements:
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([dep_owner.exported_target.target.address]),
    )
    ownable_tgts = [
        tgt for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership)
    ]
    owners = await MultiGet(Get(ExportedTarget, OwnedDependency(tgt)) for tgt in ownable_tgts)
    owned_by_us: Set[Target] = set()
    owned_by_others: Set[Target] = set()
    for tgt, owner in zip(ownable_tgts, owners):
        (owned_by_us if owner == dep_owner.exported_target else owned_by_others).add(tgt)

    # Get all 3rdparty deps of our owned deps.
    #
    # Note that we need only consider requirements that are direct dependencies of our owned deps:
    # If T depends on R indirectly, then it must be via some direct deps U1, U2, ... For each such U,
    # if U is in the owned deps then we'll pick up R through U. And if U is not in the owned deps
    # then it's owned by an exported target ET, and so R will be in the requirements for ET, and we
    # will require ET.
    direct_deps_tgts = await MultiGet(
        Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in owned_by_us
    )

    transitive_excludes: FrozenOrderedSet[Target] = FrozenOrderedSet()
    uneval_trans_excl = [
        tgt.get(Dependencies).unevaluated_transitive_excludes for tgt in transitive_targets.closure
    ]
    if uneval_trans_excl:
        nested_trans_excl = await MultiGet(
            Get(Targets, UnparsedAddressInputs, unparsed) for unparsed in uneval_trans_excl
        )
        transitive_excludes = FrozenOrderedSet(
            itertools.chain.from_iterable(excludes for excludes in nested_trans_excl)
        )

    direct_deps_chained = FrozenOrderedSet(itertools.chain.from_iterable(direct_deps_tgts))
    direct_deps_with_excl = direct_deps_chained.difference(transitive_excludes)

    reqs = PexRequirements.create_from_requirement_fields(
        tgt[PythonRequirementsField]
        for tgt in direct_deps_with_excl
        if tgt.has_field(PythonRequirementsField)
    )
    req_strs = list(reqs)

    # Add the requirements on any exported targets on which we depend.
    kwargs_for_exported_targets_we_depend_on = await MultiGet(
        Get(SetupKwargs, OwnedDependency(tgt)) for tgt in owned_by_others
    )
    req_strs.extend(
        f"{kwargs.name}{setup_py_generation.first_party_dependency_version(kwargs.version)}"
        for kwargs in set(kwargs_for_exported_targets_we_depend_on)
    )
    return ExportedTargetRequirements(req_strs)
Beispiel #11
0
    def from_tool(
        cls,
        subsystem: PythonToolRequirementsBase,
        interpreter_constraints: InterpreterConstraints | None = None,
        *,
        extra_requirements: Iterable[str] = (),
    ) -> PythonLockfileRequest:
        """Create a request for a dedicated lockfile for the tool.

        If the tool determines its interpreter constraints by using the constraints of user code,
        rather than the option `--interpreter-constraints`, you must pass the arg
        `interpreter_constraints`.
        """
        if not subsystem.uses_lockfile:
            return cls(
                FrozenOrderedSet(),
                InterpreterConstraints(),
                resolve_name=subsystem.options_scope,
                lockfile_dest=subsystem.lockfile,
            )
        return cls(
            requirements=FrozenOrderedSet((*subsystem.all_requirements, *extra_requirements)),
            interpreter_constraints=(
                interpreter_constraints
                if interpreter_constraints is not None
                else subsystem.interpreter_constraints
            ),
            resolve_name=subsystem.options_scope,
            lockfile_dest=subsystem.lockfile,
        )
Beispiel #12
0
async def transitive_targets(
        request: TransitiveTargetsRequest) -> TransitiveTargets:
    """Find all the targets transitively depended upon by the target roots."""

    dependency_mapping = await Get(_DependencyMapping,
                                   _DependencyMappingRequest(request, True))

    # Apply any transitive excludes (`!!` ignores).
    transitive_excludes: FrozenOrderedSet[Target] = FrozenOrderedSet()
    unevaluated_transitive_excludes = []
    for t in (*dependency_mapping.roots_as_targets,
              *dependency_mapping.visited):
        unparsed = t.get(Dependencies).unevaluated_transitive_excludes
        if unparsed.values:
            unevaluated_transitive_excludes.append(unparsed)
    if unevaluated_transitive_excludes:
        nested_transitive_excludes = await MultiGet(
            Get(Targets, UnparsedAddressInputs, unparsed)
            for unparsed in unevaluated_transitive_excludes)
        transitive_excludes = FrozenOrderedSet(
            itertools.chain.from_iterable(
                excludes for excludes in nested_transitive_excludes))

    return TransitiveTargets(
        tuple(dependency_mapping.roots_as_targets),
        FrozenOrderedSet(
            dependency_mapping.visited.difference(transitive_excludes)),
    )
Beispiel #13
0
    def create(cls, rule_entries) -> RuleIndex:
        """Creates a RuleIndex with tasks indexed by their output type."""
        rules: OrderedSet[TaskRule] = OrderedSet()
        queries: OrderedSet[QueryRule] = OrderedSet()
        union_rules: OrderedSet[UnionRule] = OrderedSet()

        for entry in rule_entries:
            if isinstance(entry, TaskRule):
                rules.add(entry)
            elif isinstance(entry, UnionRule):
                union_rules.add(entry)
            elif isinstance(entry, QueryRule):
                queries.add(entry)
            elif hasattr(entry, "__call__"):
                rule = getattr(entry, "rule", None)
                if rule is None:
                    raise TypeError(
                        f"Expected function {entry} to be decorated with @rule."
                    )
                rules.add(rule)
            else:
                raise TypeError(
                    f"Rule entry {entry} had an unexpected type: {type(entry)}. Rules either "
                    "extend Rule or UnionRule, or are static functions decorated with @rule."
                )

        return RuleIndex(
            rules=FrozenOrderedSet(rules),
            queries=FrozenOrderedSet(queries),
            union_rules=FrozenOrderedSet(union_rules),
        )
Beispiel #14
0
async def transitive_targets(
        request: TransitiveTargetsRequest) -> TransitiveTargets:
    """Find all the targets transitively depended upon by the target roots.

    This uses iteration, rather than recursion, so that we can tolerate dependency cycles. Unlike a
    traditional BFS algorithm, we batch each round of traversals via `MultiGet` for improved
    performance / concurrency.
    """
    roots_as_targets = await Get(Targets, Addresses(request.roots))
    visited: OrderedSet[Target] = OrderedSet()
    queued = FrozenOrderedSet(roots_as_targets)
    dependency_mapping: Dict[Address, Tuple[Address, ...]] = {}
    while queued:
        direct_dependencies = await MultiGet(
            Get(
                Targets,
                DependenciesRequest(
                    tgt.get(Dependencies),
                    include_special_cased_deps=request.
                    include_special_cased_deps,
                ),
            ) for tgt in queued)

        dependency_mapping.update(
            zip(
                (t.address for t in queued),
                (tuple(t.address for t in deps)
                 for deps in direct_dependencies),
            ))

        queued = FrozenOrderedSet(
            itertools.chain.from_iterable(direct_dependencies)).difference(
                visited)
        visited.update(queued)

    # NB: We use `roots_as_targets` to get the root addresses, rather than `request.roots`. This
    # is because expanding from the `Addresses` -> `Targets` may have resulted in generated
    # subtargets being used, so we need to use `roots_as_targets` to have this expansion.
    _detect_cycles(tuple(t.address for t in roots_as_targets),
                   dependency_mapping)

    # Apply any transitive excludes (`!!` ignores).
    transitive_excludes: FrozenOrderedSet[Target] = FrozenOrderedSet()
    unevaluated_transitive_excludes = []
    for t in (*roots_as_targets, *visited):
        unparsed = t.get(Dependencies).unevaluated_transitive_excludes
        if unparsed.values:
            unevaluated_transitive_excludes.append(unparsed)
    if unevaluated_transitive_excludes:
        nested_transitive_excludes = await MultiGet(
            Get(Targets, UnparsedAddressInputs, unparsed)
            for unparsed in unevaluated_transitive_excludes)
        transitive_excludes = FrozenOrderedSet(
            itertools.chain.from_iterable(
                excludes for excludes in nested_transitive_excludes))

    return TransitiveTargets(
        tuple(roots_as_targets),
        FrozenOrderedSet(visited.difference(transitive_excludes)))
Beispiel #15
0
def test_frozen_is_hashable() -> None:
    set1 = FrozenOrderedSet("abcabc")
    assert hash(set1) == hash(set1.copy())
    assert hash(set1) == hash(("a", "b", "c"))

    set2 = FrozenOrderedSet("abcd")
    assert hash(set1) != hash(set2)
    assert hash(set1) != hash(("a", "b", "c", "d"))
Beispiel #16
0
 def addresses_for_coordinates(
     self, coordinates: Iterable[UnversionedCoordinate]
 ) -> FrozenOrderedSet[Address]:
     candidate_artifact_addresses: Set[Address] = set()
     for coordinate in coordinates:
         candidates = self.artifacts.get(coordinate, FrozenOrderedSet())
         candidate_artifact_addresses.update(candidates)
     return FrozenOrderedSet(candidate_artifact_addresses)
Beispiel #17
0
async def resolve_addresses_from_specs(specs: Specs) -> Addresses:
    includes, ignores = await MultiGet(
        Get(Addresses, RawSpecs, specs.includes),
        Get(Addresses, RawSpecs, specs.ignores),
    )
    # No matter what, ignores win out over includes. This avoids "specificity wars" and keeps our
    # semantics simple/predictable.
    return Addresses(FrozenOrderedSet(includes) - FrozenOrderedSet(ignores))
Beispiel #18
0
def test_first_party_plugins(rule_runner: RuleRunner) -> None:
    rule_runner.write_files(
        {
            "BUILD": dedent(
                """\
                python_requirement(name='flake8', requirements=['flake8==2.11.1'])
                python_requirement(name='colors', requirements=['ansicolors'])
                """
            ),
            "flake8-plugins/subdir1/util.py": "",
            "flake8-plugins/subdir1/BUILD": dedent(
                """\
                python_sources(
                    interpreter_constraints=['==3.9.*'],
                    dependencies=['flake8-plugins/subdir2']
                )
                """
            ),
            "flake8-plugins/subdir2/another_util.py": "",
            "flake8-plugins/subdir2/BUILD": "python_sources(interpreter_constraints=['==3.8.*'])",
            "flake8-plugins/plugin.py": "",
            "flake8-plugins/BUILD": dedent(
                """\
                python_sources(
                    dependencies=['//:flake8', '//:colors', "flake8-plugins/subdir1"]
                )
                """
            ),
        }
    )
    rule_runner.set_options(
        [
            "--source-root-patterns=flake8-plugins",
            "--flake8-source-plugins=flake8-plugins/plugin.py",
        ],
        env_inherit={"PATH", "PYENV_ROOT", "HOME"},
    )
    first_party_plugins = rule_runner.request(Flake8FirstPartyPlugins, [])
    assert first_party_plugins.requirement_strings == FrozenOrderedSet(
        ["ansicolors", "flake8==2.11.1"]
    )
    assert first_party_plugins.interpreter_constraints_fields == FrozenOrderedSet(
        [
            InterpreterConstraintsField(ic, Address("", target_name="tgt"))
            for ic in (None, ["==3.9.*"], ["==3.8.*"])
        ]
    )
    assert (
        first_party_plugins.sources_digest
        == rule_runner.make_snapshot(
            {
                f"{Flake8FirstPartyPlugins.PREFIX}/plugin.py": "",
                f"{Flake8FirstPartyPlugins.PREFIX}/subdir1/util.py": "",
                f"{Flake8FirstPartyPlugins.PREFIX}/subdir2/another_util.py": "",
            }
        ).digest
    )
Beispiel #19
0
async def find_owners(owners_request: OwnersRequest) -> Owners:
    # Determine which of the sources are live and which are deleted.
    sources_set_snapshot = await Get(Snapshot,
                                     PathGlobs(owners_request.sources))

    live_files = FrozenOrderedSet(sources_set_snapshot.files)
    deleted_files = FrozenOrderedSet(s for s in owners_request.sources
                                     if s not in live_files)
    live_dirs = FrozenOrderedSet(os.path.dirname(s) for s in live_files)
    deleted_dirs = FrozenOrderedSet(os.path.dirname(s) for s in deleted_files)

    matching_addresses: OrderedSet[Address] = OrderedSet()
    unmatched_sources = set(owners_request.sources)
    for live in (True, False):
        # Walk up the buildroot looking for targets that would conceivably claim changed sources.
        # For live files, we use expanded Targets, which have file level precision but which are
        # only created for existing files. For deleted files we use UnexpandedTargets, which have
        # the original declared glob.
        candidate_targets: Iterable[Target]
        if live:
            if not live_dirs:
                continue
            sources_set = live_files
            candidate_specs = tuple(
                AscendantAddresses(directory=d) for d in live_dirs)
            candidate_targets = await Get(Targets,
                                          AddressSpecs(candidate_specs))
        else:
            if not deleted_dirs:
                continue
            sources_set = deleted_files
            candidate_specs = tuple(
                AscendantAddresses(directory=d) for d in deleted_dirs)
            candidate_targets = await Get(UnexpandedTargets,
                                          AddressSpecs(candidate_specs))

        build_file_addresses = await MultiGet(
            Get(BuildFileAddress, Address, tgt.address)
            for tgt in candidate_targets)

        for candidate_tgt, bfa in zip(candidate_targets, build_file_addresses):
            matching_files = set(
                matches_filespec(candidate_tgt.get(Sources).filespec,
                                 paths=sources_set))
            if not matching_files and bfa.rel_path not in sources_set:
                continue

            unmatched_sources -= matching_files
            matching_addresses.add(candidate_tgt.address)

    if (unmatched_sources and owners_request.owners_not_found_behavior !=
            OwnersNotFoundBehavior.ignore):
        _log_or_raise_unmatched_owners(
            [PurePath(path) for path in unmatched_sources],
            owners_request.owners_not_found_behavior)

    return Owners(matching_addresses)
Beispiel #20
0
def test_first_party_plugins(rule_runner: RuleRunner) -> None:
    rule_runner.write_files(
        {
            "BUILD": dedent(
                """\
                python_requirement_library(name='pylint', requirements=['pylint==2.6.2'])
                python_requirement_library(name='colors', requirements=['ansicolors'])
                """
            ),
            "pylint-plugins/subdir1/util.py": "",
            "pylint-plugins/subdir1/BUILD": dedent(
                """\
                python_library(
                    interpreter_constraints=['==3.5.*'],
                    dependencies=['pylint-plugins/subdir2']
                )
                """
            ),
            "pylint-plugins/subdir2/another_util.py": "",
            "pylint-plugins/subdir2/BUILD": ("python_library(interpreter_constraints=['==3.4.*'])"),
            "pylint-plugins/plugin.py": "",
            "pylint-plugins/BUILD": dedent(
                """\
                python_library(
                    dependencies=['//:pylint', '//:colors', "pylint-plugins/subdir1"]
                )
                """
            ),
        }
    )
    rule_runner.set_options(
        [
            "--source-root-patterns=pylint-plugins",
            "--pylint-source-plugins=pylint-plugins/plugin.py",
        ]
    )
    first_party_plugins = rule_runner.request(PylintFirstPartyPlugins, [])
    assert first_party_plugins.requirement_strings == FrozenOrderedSet(
        ["ansicolors", "pylint==2.6.2"]
    )
    assert first_party_plugins.interpreter_constraints_fields == FrozenOrderedSet(
        [
            InterpreterConstraintsField(ic, Address("", target_name="tgt"))
            for ic in (None, ["==3.5.*"], ["==3.4.*"])
        ]
    )
    assert (
        first_party_plugins.sources_digest
        == rule_runner.make_snapshot(
            {
                f"{PylintFirstPartyPlugins.PREFIX}/plugin.py": "",
                f"{PylintFirstPartyPlugins.PREFIX}/subdir1/util.py": "",
                f"{PylintFirstPartyPlugins.PREFIX}/subdir2/another_util.py": "",
            }
        ).digest
    )
Beispiel #21
0
 def get_disambiguated(
     *,
     ambiguous: List[Address],
     ignores: List[Address],
     includes: Optional[List[Address]] = None,
 ) -> Optional[Address]:
     epd = ExplicitlyProvidedDependencies(
         includes=FrozenOrderedSet(includes or []), ignores=FrozenOrderedSet(ignores)
     )
     return epd.disambiguated_via_ignores(tuple(ambiguous))
Beispiel #22
0
async def mypy_typecheck(
    request: MyPyRequest, mypy: MyPy, python_setup: PythonSetup
) -> TypecheckResults:
    if mypy.skip:
        return TypecheckResults([], typechecker_name="MyPy")

    # We batch targets by their interpreter constraints to ensure, for example, that all Python 2
    # targets run together and all Python 3 targets run together. We can only do this by setting
    # the `--python-version` option, but we allow the user to set it as a safety valve. We warn if
    # they've set the option.
    config_files = await Get(ConfigFiles, ConfigFilesRequest, mypy.config_request)
    config_content = await Get(DigestContents, Digest, config_files.snapshot.digest)
    python_version_configured = check_and_warn_if_python_version_configured(
        config=next(iter(config_content), None), args=mypy.args
    )

    # When determining how to batch by interpreter constraints, we must consider the entire
    # transitive closure to get the final resulting constraints.
    # TODO(#10863): Improve the performance of this.
    transitive_targets_per_field_set = await MultiGet(
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address]))
        for field_set in request.field_sets
    )

    interpreter_constraints_to_transitive_targets = defaultdict(set)
    for transitive_targets in transitive_targets_per_field_set:
        interpreter_constraints = PexInterpreterConstraints.create_from_targets(
            transitive_targets.closure, python_setup
        ) or PexInterpreterConstraints(mypy.interpreter_constraints)
        interpreter_constraints_to_transitive_targets[interpreter_constraints].add(
            transitive_targets
        )

    partitions = []
    for interpreter_constraints, all_transitive_targets in sorted(
        interpreter_constraints_to_transitive_targets.items()
    ):
        combined_roots: OrderedSet[Target] = OrderedSet()
        combined_closure: OrderedSet[Target] = OrderedSet()
        for transitive_targets in all_transitive_targets:
            combined_roots.update(transitive_targets.roots)
            combined_closure.update(transitive_targets.closure)
        partitions.append(
            MyPyPartition(
                FrozenOrderedSet(combined_roots),
                FrozenOrderedSet(combined_closure),
                interpreter_constraints,
                python_version_already_configured=python_version_configured,
            )
        )

    partitioned_results = await MultiGet(
        Get(TypecheckResult, MyPyPartition, partition) for partition in partitions
    )
    return TypecheckResults(partitioned_results, typechecker_name="MyPy")
Beispiel #23
0
async def transitive_dependency_mapping(
        request: _DependencyMappingRequest) -> _DependencyMapping:
    """This uses iteration, rather than recursion, so that we can tolerate dependency cycles.

    Unlike a traditional BFS algorithm, we batch each round of traversals via `MultiGet` for
    improved performance / concurrency.
    """
    roots_as_targets = await Get(UnexpandedTargets,
                                 Addresses(request.tt_request.roots))
    visited: OrderedSet[Target] = OrderedSet()
    queued = FrozenOrderedSet(roots_as_targets)
    dependency_mapping: dict[Address, tuple[Address, ...]] = {}
    while queued:
        direct_dependencies: tuple[Collection[Target], ...]
        if request.expanded_targets:
            direct_dependencies = await MultiGet(
                Get(
                    Targets,
                    DependenciesRequest(
                        tgt.get(Dependencies),
                        include_special_cased_deps=request.tt_request.
                        include_special_cased_deps,
                    ),
                ) for tgt in queued)
        else:
            direct_dependencies = await MultiGet(
                Get(
                    UnexpandedTargets,
                    DependenciesRequest(
                        tgt.get(Dependencies),
                        include_special_cased_deps=request.tt_request.
                        include_special_cased_deps,
                    ),
                ) for tgt in queued)

        dependency_mapping.update(
            zip(
                (t.address for t in queued),
                (tuple(t.address for t in deps)
                 for deps in direct_dependencies),
            ))

        queued = FrozenOrderedSet(
            itertools.chain.from_iterable(direct_dependencies)).difference(
                visited)
        visited.update(queued)

    # NB: We use `roots_as_targets` to get the root addresses, rather than `request.roots`. This
    # is because expanding from the `Addresses` -> `Targets` may have resulted in generated
    # targets being used, so we need to use `roots_as_targets` to have this expansion.
    # TODO(#12871): Fix this to not be based on generated targets.
    _detect_cycles(tuple(t.address for t in roots_as_targets),
                   dependency_mapping)
    return _DependencyMapping(FrozenDict(dependency_mapping),
                              FrozenOrderedSet(visited), roots_as_targets)
Beispiel #24
0
    def enumerate_python_versions(
        self, interpreter_universe: Iterable[str]
    ) -> FrozenOrderedSet[tuple[int, int, int]]:
        """Return a set of all plausible (major, minor, patch) tuples for all Python 2.7/3.x in the
        specified interpreter universe that matches this set of interpreter constraints.

        This also validates our assumptions around the `interpreter_universe`:

        - Python 2.7 is the only Python 2 version in the universe, if at all.
        - Python 3 is the last major release of Python, which the core devs have committed to in
          public several times.
        """
        if not self:
            return FrozenOrderedSet()

        minors = []
        for major_minor in interpreter_universe:
            major, minor = _major_minor_to_int(major_minor)
            if major == 2:
                if minor != 7:
                    raise AssertionError(
                        "Unexpected value in `[python-setup].interpreter_versions_universe`: "
                        f"{major_minor}. Expected the only Python 2 value to be '2.7', given that "
                        f"all other versions are unmaintained or do not exist."
                    )
                minors.append((2, minor))
            elif major == 3:
                minors.append((3, minor))
            else:
                raise AssertionError(
                    "Unexpected value in `[python-setup].interpreter_versions_universe`: "
                    f"{major_minor}. Expected to only include '2.7' and/or Python 3 versions, "
                    "given that Python 3 will be the last major Python version. Please open an "
                    "issue at https://github.com/pantsbuild/pants/issues/new if this is no longer "
                    "true."
                )

        valid_patches = FrozenOrderedSet(
            (major, minor, patch)
            for (major, minor) in sorted(minors)
            for patch in self._valid_patch_versions(major, minor)
        )

        if not valid_patches:
            raise ValueError(
                f"The interpreter constraints `{self}` are not compatible with any of the "
                "interpreter versions from `[python-setup].interpreter_versions_universe`.\n\n"
                "Please either change these interpreter constraints or update the "
                "`interpreter_versions_universe` to include the interpreters set in these "
                "constraints. Run `./pants help-advanced python-setup` for more information on the "
                "`interpreter_versions_universe` option."
            )

        return valid_patches
Beispiel #25
0
def test_multiple_resolves() -> None:
    rule_runner = RuleRunner(
        rules=[
            setup_user_lockfile_requests,
            SubsystemRule(PythonSetup),
            QueryRule(UserGenerateLockfiles,
                      [RequestedPythonUserResolveNames]),
        ],
        target_types=[PythonRequirementTarget],
    )
    rule_runner.write_files({
        "BUILD":
        dedent("""\
                python_requirement(
                    name='a',
                    requirements=['a'],
                    resolve='a',
                )
                python_requirement(
                    name='b',
                    requirements=['b'],
                    resolve='b',
                )
                """),
    })
    rule_runner.set_options(
        [
            "--python-resolves={'a': 'a.lock', 'b': 'b.lock'}",
            # Override interpreter constraints for 'b', but use default for 'a'.
            "--python-resolves-to-interpreter-constraints={'b': ['==3.7.*']}",
            "--python-enable-resolves",
            "--python-lockfile-generator=pex",
        ],
        env_inherit=PYTHON_BOOTSTRAP_ENV,
    )
    result = rule_runner.request(UserGenerateLockfiles,
                                 [RequestedPythonUserResolveNames(["a", "b"])])
    assert set(result) == {
        GeneratePythonLockfile(
            requirements=FrozenOrderedSet(["a"]),
            interpreter_constraints=InterpreterConstraints(
                PythonSetup.default_interpreter_constraints),
            resolve_name="a",
            lockfile_dest="a.lock",
            use_pex=True,
        ),
        GeneratePythonLockfile(
            requirements=FrozenOrderedSet(["b"]),
            interpreter_constraints=InterpreterConstraints(["==3.7.*"]),
            resolve_name="b",
            lockfile_dest="b.lock",
            use_pex=True,
        ),
    }
Beispiel #26
0
 def create(self) -> BuildConfiguration:
     registered_aliases = BuildFileAliases(
         objects=self._exposed_object_by_alias.copy(),
         context_aware_object_factories=self._exposed_context_aware_object_factory_by_alias.copy(),
     )
     return BuildConfiguration(
         registered_aliases=registered_aliases,
         optionables=FrozenOrderedSet(self._optionables),
         rules=FrozenOrderedSet(self._rules),
         union_rules=FrozenOrderedSet(self._union_rules),
         target_types=FrozenOrderedSet(self._target_types),
     )
Beispiel #27
0
 def maybe_warn(
     *,
     ambiguous: List[Address],
     ignores: Optional[List[Address]] = None,
     includes: Optional[List[Address]] = None,
 ) -> None:
     caplog.clear()
     epd = ExplicitlyProvidedDependencies(
         includes=FrozenOrderedSet(includes or []), ignores=FrozenOrderedSet(ignores or [])
     )
     epd.maybe_warn_of_ambiguous_dependency_inference(
         tuple(ambiguous), Address("some_dir"), import_reference="file", context="foo"
     )
Beispiel #28
0
def test_explicitly_provided_dependencies_remaining_after_disambiguation(
) -> None:
    # First check disambiguation via ignores (`!` and `!!`).
    addr = Address("", target_name="a")
    generated_addr = Address("", target_name="b", generated_name="gen")
    epd = ExplicitlyProvidedDependencies(
        Address("", target_name="input_tgt"),
        includes=FrozenOrderedSet(),
        ignores=FrozenOrderedSet([addr, generated_addr]),
    )

    def assert_disambiguated_via_ignores(ambiguous: List[Address],
                                         expected: Set[Address]) -> None:
        assert (epd.remaining_after_disambiguation(
            tuple(ambiguous), owners_must_be_ancestors=False) == expected)

    assert_disambiguated_via_ignores([], set())
    assert_disambiguated_via_ignores([addr], set())
    assert_disambiguated_via_ignores([generated_addr], set())
    assert_disambiguated_via_ignores([addr, generated_addr], set())
    # Generated targets are covered if their original target generator is in the ignores.
    assert_disambiguated_via_ignores(
        [Address("", target_name="a", generated_name="gen")], set())

    bad_tgt = Address("", target_name="x")
    bad_generated_tgt = Address("", target_name="x", generated_name="gen")
    assert_disambiguated_via_ignores([bad_tgt], {bad_tgt})
    assert_disambiguated_via_ignores([bad_generated_tgt], {bad_generated_tgt})
    assert_disambiguated_via_ignores([bad_generated_tgt, addr, generated_addr],
                                     {bad_generated_tgt})

    # Check disambiguation via `owners_must_be_ancestors`.
    epd = ExplicitlyProvidedDependencies(Address("src/lang/project"),
                                         FrozenOrderedSet(),
                                         FrozenOrderedSet())
    valid_candidates = {
        Address("src/lang/project", target_name="another_tgt"),
        Address("src/lang"),
        Address("src"),
        Address("", target_name="root_owner"),
    }
    invalid_candidates = {
        Address("tests/lang"),
        Address("src/another_lang"),
        Address("src/lang/another_project"),
        Address("src/lang/project/subdir"),
    }
    assert (epd.remaining_after_disambiguation(
        (*valid_candidates, *invalid_candidates),
        owners_must_be_ancestors=True) == valid_candidates)
Beispiel #29
0
def find_python_imports(source_code: str, *, module_name: str) -> ParsedPythonImports:
    parse_result = parse_file(source_code)
    # If there were syntax errors, gracefully early return. This is more user friendly than
    # propagating the exception. Dependency inference simply won't be used for that file, and
    # it'll be up to the tool actually being run (e.g. Pytest or Flake8) to error.
    if parse_result is None:
        return ParsedPythonImports(FrozenOrderedSet(), FrozenOrderedSet())
    tree, ast_visitor_cls = parse_result
    ast_visitor = ast_visitor_cls(module_name)
    ast_visitor.visit(tree)
    return ParsedPythonImports(
        explicit_imports=FrozenOrderedSet(sorted(ast_visitor.explicit_imports)),
        inferred_imports=FrozenOrderedSet(sorted(ast_visitor.inferred_imports)),
    )
Beispiel #30
0
async def transitive_targets_lite(request: TransitiveTargetsRequestLite) -> TransitiveTargets:
    roots_as_targets = await Get(Targets, Addresses(request.roots))
    visited: OrderedSet[Target] = OrderedSet()
    queued = FrozenOrderedSet(roots_as_targets)
    dependency_mapping: Dict[Address, Tuple[Address, ...]] = {}
    while queued:
        direct_dependencies_addresses_per_tgt = await MultiGet(
            Get(Addresses, DependenciesRequestLite(tgt.get(Dependencies))) for tgt in queued
        )
        direct_dependencies_per_tgt = []
        for addresses_per_tgt in direct_dependencies_addresses_per_tgt:
            wrapped_tgts = await MultiGet(
                Get(WrappedTarget, Address, addr) for addr in addresses_per_tgt
            )
            direct_dependencies_per_tgt.append(
                tuple(wrapped_t.target for wrapped_t in wrapped_tgts)
            )

        dependency_mapping.update(
            zip(
                (t.address for t in queued),
                (tuple(t.address for t in deps) for deps in direct_dependencies_per_tgt),
            )
        )

        queued = FrozenOrderedSet(
            itertools.chain.from_iterable(direct_dependencies_per_tgt)
        ).difference(visited)
        visited.update(queued)

    # NB: We use `roots_as_targets` to get the root addresses, rather than `request.roots`. This
    # is because expanding from the `Addresses` -> `Targets` may have resulted in generated
    # subtargets being used, so we need to use `roots_as_targets` to have this expansion.
    _detect_cycles(tuple(t.address for t in roots_as_targets), dependency_mapping)

    # Apply any transitive excludes (`!!` ignores).
    wrapped_transitive_excludes = await MultiGet(
        Get(
            WrappedTarget, AddressInput, AddressInput.parse(addr, relative_to=tgt.address.spec_path)
        )
        for tgt in (*roots_as_targets, *visited)
        for addr in tgt.get(Dependencies).unevaluated_transitive_excludes.values
    )
    transitive_excludes = FrozenOrderedSet(
        wrapped_t.target for wrapped_t in wrapped_transitive_excludes
    )

    return TransitiveTargets(
        tuple(roots_as_targets), FrozenOrderedSet(visited.difference(transitive_excludes))
    )