Beispiel #1
0
def generate_scala_parser_lockfile_request(
    _: ScalaParserToolLockfileSentinel, ) -> GenerateJvmLockfileFromTool:
    return GenerateJvmLockfileFromTool(
        artifact_inputs=FrozenOrderedSet({
            f"org.scalameta:scalameta_{_PARSER_SCALA_BINARY_VERSION}:4.4.30",
            f"io.circe:circe-generic_{_PARSER_SCALA_BINARY_VERSION}:0.14.1",
            f"org.scala-lang:scala-library:{_PARSER_SCALA_VERSION}",
        }),
        artifact_option_name="n/a",
        lockfile_option_name="n/a",
        resolve_name=ScalaParserToolLockfileSentinel.resolve_name,
        read_lockfile_dest=DEFAULT_TOOL_LOCKFILE,
        write_lockfile_dest=
        "src/python/pants/backend/scala/dependency_inference/scala_parser.lock",
        default_lockfile_resource=(
            "pants.backend.scala.dependency_inference",
            "scala_parser.lock",
        ),
    )
Beispiel #2
0
def find_dependees(
    request: DependeesRequest, address_to_dependees: AddressToDependees
) -> Dependees:
    check = set(request.addresses)
    known_dependents: Set[Address] = set()
    while True:
        dependents = set(known_dependents)
        for target in check:
            target_dependees = address_to_dependees.mapping.get(target, FrozenOrderedSet())
            dependents.update(target_dependees)
        check = dependents - known_dependents
        if not check or not request.transitive:
            result = (
                dependents | set(request.addresses)
                if request.include_roots
                else dependents - set(request.addresses)
            )
            return Dependees(result)
        known_dependents = dependents
Beispiel #3
0
def generate_java_parser_lockfile_request(
    _: JavaParserToolLockfileSentinel, ) -> GenerateJvmLockfileFromTool:
    return GenerateJvmLockfileFromTool(
        artifact_inputs=FrozenOrderedSet({
            "com.fasterxml.jackson.core:jackson-databind:2.12.4",
            "com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.12.4",
            "com.github.javaparser:javaparser-symbol-solver-core:3.23.0",
        }),
        artifact_option_name="n/a",
        lockfile_option_name="n/a",
        resolve_name=JavaParserToolLockfileSentinel.resolve_name,
        read_lockfile_dest=DEFAULT_TOOL_LOCKFILE,
        write_lockfile_dest=
        "src/python/pants/backend/java/dependency_inference/java_parser.lock",
        default_lockfile_resource=(
            "pants.backend.java.dependency_inference",
            "java_parser.lock",
        ),
    )
Beispiel #4
0
class PylintSourcePlugin(Target):
    """A Pylint plugin loaded through source code.

    To load a source plugin:

        1. Write your plugin. See http://pylint.pycqa.org/en/latest/how_tos/plugins.html.
        2. Define a `pylint_source_plugin` target with the plugin's Python files included in the
            `sources` field.
        3. Add the parent directory of your target to the `root_patterns` option in the `[source]`
            scope. For example, if your plugin is at `build-support/pylint/custom_plugin.py`, add
            'build-support/pylint'. This is necessary for Pants to know how to tell Pylint to
            discover your plugin. See https://pants.readme.io/docs/source-roots.
        4. Add `load-plugins=$module_name` to your Pylint config file. For example, if your Python
            file is called `custom_plugin.py`, set `load-plugins=custom_plugin`. Set the `config`
            option in the `[pylint]` scope to point to your Pylint config file.
        5. Set the option `source_plugins` in the `[pylint]` scope to include this target's
            address, e.g. `source_plugins = ["build-support/pylint:plugin"]`.

    To instead load a third-party plugin, set the option `extra_requirements` in the `[pylint]`
    scope (see https://pants.readme.io/docs/python-linters-and-formatters). Set `load-plugins` in
    your config file, like you'd do with a source plugin.

    This target type is treated similarly to a `python_library` target. For example, Python linters
    and formatters will run on this target.

    You can include other targets in the `dependencies` field, so long as those targets are
    third-party dependencies or are located in the same directory or a subdirectory.

    Other targets can depend on this target. This allows you to write a `python_tests` target for
    this code.

    You can define the `provides` field to release this plugin as a distribution
    (https://pants.readme.io/docs/python-setup-py-goal).
    """

    alias = "pylint_source_plugin"
    core_fields = (
        *(FrozenOrderedSet(COMMON_PYTHON_FIELDS) -
          {Dependencies}),  # type: ignore[misc]
        PylintPluginDependencies,
        PylintPluginSources,
    )
Beispiel #5
0
def test_validate_user_lockfiles(
    invalid_reqs: bool,
    invalid_constraints: bool,
    caplog,
) -> None:
    runtime_interpreter_constraints = (InterpreterConstraints([
        "==2.7.*"
    ]) if invalid_constraints else METADATA.valid_for_interpreter_constraints)
    req_strings = FrozenOrderedSet(["bad-req"] if invalid_reqs else
                                   [str(r) for r in METADATA.requirements])
    lockfile = Lockfile(
        file_path="lock.txt",
        file_path_description_of_origin="foo",
        resolve_name="a",
    )

    # Ignore validation if resolves are manually managed.
    assert not should_validate_metadata(
        lockfile,
        create_python_setup(InvalidLockfileBehavior.warn,
                            enable_resolves=False))

    validate_metadata(
        METADATA,
        runtime_interpreter_constraints,
        lockfile,
        req_strings,
        create_python_setup(InvalidLockfileBehavior.warn),
    )

    def contains(msg: str, if_: bool = True) -> None:
        assert (msg in caplog.text) is if_

    contains(
        "You are using the lockfile at lock.txt to install the resolve `a`")
    contains(
        "The targets depend on requirements that are not in the lockfile: ['bad-req']",
        if_=invalid_reqs,
    )
    contains("The targets use interpreter constraints",
             if_=invalid_constraints)
    contains("./pants generate-lockfiles --resolve=a`")
Beispiel #6
0
    def complete_scopes(
            cls,
            scope_infos: Iterable[ScopeInfo]) -> FrozenOrderedSet[ScopeInfo]:
        """Expand a set of scopes to include scopes they deprecate.

        Also validates that scopes do not collide.
        """
        ret: OrderedSet[ScopeInfo] = OrderedSet()
        original_scopes: dict[str, ScopeInfo] = {}
        for si in sorted(scope_infos, key=lambda _si: _si.scope):
            if si.scope in original_scopes:
                raise cls.DuplicateScopeError(
                    f"Scope `{si.scope}` claimed by {si}, was also claimed "
                    f"by {original_scopes[si.scope]}.")
            original_scopes[si.scope] = si
            ret.add(si)
            if si.deprecated_scope:
                ret.add(dataclasses.replace(si, scope=si.deprecated_scope))
                original_scopes[si.deprecated_scope] = si
        return FrozenOrderedSet(ret)
Beispiel #7
0
    def test_plugin_installs_alias(self):
        def reg_alias():
            return BuildFileAliases(
                objects={"FROMPLUGIN1": DummyObject1, "FROMPLUGIN2": DummyObject2},
            )

        self.working_set.add(self.get_mock_plugin("aliasdemo", "0.0.1", alias=reg_alias))

        # Start with no aliases.
        self.assert_empty()

        # Now load the plugin which defines aliases.
        self.load_plugins(["aliasdemo"])

        # Aliases now exist.
        build_configuration = self.bc_builder.create()
        registered_aliases = build_configuration.registered_aliases
        self.assertEqual(DummyObject1, registered_aliases.objects["FROMPLUGIN1"])
        self.assertEqual(DummyObject2, registered_aliases.objects["FROMPLUGIN2"])
        self.assertEqual(build_configuration.optionables, FrozenOrderedSet([DummySubsystem]))
async def map_addresses_to_dependees() -> AddressToDependees:
    # Get every target in the project so that we can iterate over them to find their dependencies.
    all_expanded_targets, all_explicit_targets = await MultiGet(
        Get(Targets, AddressSpecs([DescendantAddresses("")])),
        Get(UnexpandedTargets, AddressSpecs([DescendantAddresses("")])),
    )
    all_targets = {*all_expanded_targets, *all_explicit_targets}
    dependencies_per_target = await MultiGet(
        Get(Addresses, DependenciesRequest(tgt.get(Dependencies))) for tgt in all_targets
    )

    address_to_dependees = defaultdict(set)
    for tgt, dependencies in zip(all_targets, dependencies_per_target):
        for dependency in dependencies:
            address_to_dependees[dependency].add(tgt.address)
    return AddressToDependees(
        FrozenDict(
            {addr: FrozenOrderedSet(dependees) for addr, dependees in address_to_dependees.items()}
        )
    )
Beispiel #9
0
async def mypy_first_party_plugins(mypy: MyPy) -> MyPyFirstPartyPlugins:
    if not mypy.source_plugins:
        return MyPyFirstPartyPlugins(FrozenOrderedSet(), EMPTY_DIGEST, ())

    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs,
                                        mypy.source_plugins)
    transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses))

    requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in transitive_targets.closure
        if plugin_tgt.has_field(PythonRequirementsField))

    sources = await Get(PythonSourceFiles,
                        PythonSourceFilesRequest(transitive_targets.closure))
    return MyPyFirstPartyPlugins(
        requirement_strings=requirements.req_strings,
        sources_digest=sources.source_files.snapshot.digest,
        source_roots=sources.source_roots,
    )
def create_tool_lock(
    req_strings: list[str],
    *,
    default_lock: bool = False,
    uses_source_plugins: bool = False,
    uses_project_interpreter_constraints: bool = False,
) -> ToolDefaultLockfile | ToolCustomLockfile:
    common_kwargs = dict(
        req_strings=FrozenOrderedSet(req_strings),
        resolve_name="my_tool",
        uses_source_plugins=uses_source_plugins,
        uses_project_interpreter_constraints=
        uses_project_interpreter_constraints,
    )
    return (ToolDefaultLockfile(file_content=FileContent("", b""),
                                **common_kwargs)  # type: ignore[arg-type]
            if default_lock else ToolCustomLockfile(
                file_path="lock.txt",
                file_path_description_of_origin="",
                **common_kwargs  # type: ignore[arg-type]
            ))
Beispiel #11
0
async def transitive_targets(addresses: Addresses) -> TransitiveTargets:
    """Given Addresses, kicks off recursion on expansion of TransitiveTargets.

    The TransitiveTarget dataclass represents a structure-shared graph, which we walk and flatten
    here. The engine memoizes the computation of TransitiveTarget, so when multiple
    TransitiveTargets objects are being constructed for multiple roots, their structure will be
    shared.
    """
    transitive_targets = await MultiGet(Get[TransitiveTarget](Address, a) for a in addresses)

    closure: OrderedSet[Target] = OrderedSet()
    to_visit = deque(transitive_targets)

    while to_visit:
        tt = to_visit.popleft()
        if tt.root in closure:
            continue
        closure.add(tt.root)
        to_visit.extend(tt.dependencies)

    return TransitiveTargets(tuple(tt.root for tt in transitive_targets), FrozenOrderedSet(closure))
Beispiel #12
0
 def assert_lockfile_request(
     build_file: str,
     expected_ics: list[str],
     *,
     extra_expected_requirements: list[str] | None = None,
     extra_args: list[str] | None = None,
 ) -> None:
     rule_runner.write_files({"project/BUILD": build_file})
     rule_runner.set_options(
         ["--pylint-lockfile=lockfile.txt", *(extra_args or [])],
         env={"PANTS_PYTHON_SETUP_INTERPRETER_CONSTRAINTS": f"['{global_constraint}']"},
     )
     lockfile_request = rule_runner.request(PythonLockfileRequest, [PylintLockfileSentinel()])
     assert lockfile_request.interpreter_constraints == InterpreterConstraints(expected_ics)
     assert lockfile_request.requirements == FrozenOrderedSet(
         [
             Pylint.default_version,
             *Pylint.default_extra_requirements,
             *(extra_expected_requirements or ()),
         ]
     )
Beispiel #13
0
def test_first_party_plugins(rule_runner: RuleRunner) -> None:
    rule_runner.write_files({
        "BUILD":
        dedent("""\
                python_requirement_library(name='mypy', requirements=['mypy==0.81'])
                python_requirement_library(name='colors', requirements=['ansicolors'])
                """),
        "mypy-plugins/subdir1/util.py":
        "",
        "mypy-plugins/subdir1/BUILD":
        "python_library(dependencies=['mypy-plugins/subdir2'])",
        "mypy-plugins/subdir2/another_util.py":
        "",
        "mypy-plugins/subdir2/BUILD":
        "python_library()",
        "mypy-plugins/plugin.py":
        "",
        "mypy-plugins/BUILD":
        dedent("""\
                python_library(
                    dependencies=['//:mypy', '//:colors', "mypy-plugins/subdir1"]
                )
                """),
    })
    rule_runner.set_options([
        "--source-root-patterns=mypy-plugins",
        "--mypy-source-plugins=mypy-plugins/plugin.py",
    ])
    first_party_plugins = rule_runner.request(MyPyFirstPartyPlugins, [])
    assert first_party_plugins.requirement_strings == FrozenOrderedSet(
        ["ansicolors", "mypy==0.81"])
    assert (first_party_plugins.sources_digest == rule_runner.make_snapshot({
        "mypy-plugins/plugin.py":
        "",
        "mypy-plugins/subdir1/util.py":
        "",
        "mypy-plugins/subdir2/another_util.py":
        "",
    }).digest)
    assert first_party_plugins.source_roots == ("mypy-plugins", )
Beispiel #14
0
    def from_strings(cls: type[_T],
                     *strings: str,
                     duplicates_must_match: bool = False) -> _T:
        """Takes all `KEY`/`KEY=VALUE` strings and dedupes by `KEY`.

        The last seen `KEY` wins in case of duplicates, unless `duplicates_must_match` is `True`, in
        which case all `VALUE`s must be equal, if present.
        """

        key_to_entry_and_value: dict[str, tuple[str, str | None]] = {}
        for entry in strings:
            key, has_value, value = entry.partition("=")
            if not duplicates_must_match:
                # Note that last entry with the same key wins.
                key_to_entry_and_value[key] = (entry,
                                               value if has_value else None)
            else:
                prev_entry, prev_value = key_to_entry_and_value.get(
                    key, (None, None))
                if prev_entry is None:
                    # Not seen before.
                    key_to_entry_and_value[key] = (entry, value
                                                   if has_value else None)
                elif not has_value:
                    # Seen before, no new value, so keep existing.
                    pass
                elif prev_value is None:
                    # Update value.
                    key_to_entry_and_value[key] = (entry, value)
                elif prev_value != value:
                    # Seen before with a different value.
                    raise ValueError(
                        f"{cls.__name__}: duplicated {key!r} with different values: "
                        f"{prev_value!r} != {value!r}.")

        deduped_entries = sorted(
            entry_and_value[0]
            for entry_and_value in key_to_entry_and_value.values())
        return cls(FrozenOrderedSet(deduped_entries))
Beispiel #15
0
 def assert_lockfile_request(
     build_file: str,
     expected_ics: list[str],
     *,
     extra_expected_requirements: list[str] | None = None,
     extra_args: list[str] | None = None,
 ) -> None:
     rule_runner.write_files({"project/BUILD": build_file, "project/f.py": ""})
     rule_runner.set_options(
         ["--flake8-lockfile=lockfile.txt", *(extra_args or [])],
         env={"PANTS_PYTHON_INTERPRETER_CONSTRAINTS": f"['{global_constraint}']"},
         env_inherit={"PATH", "PYENV_ROOT", "HOME"},
     )
     lockfile_request = rule_runner.request(GeneratePythonLockfile, [Flake8LockfileSentinel()])
     assert lockfile_request.interpreter_constraints == InterpreterConstraints(expected_ics)
     assert lockfile_request.requirements == FrozenOrderedSet(
         [
             Flake8.default_version,
             *Flake8.default_extra_requirements,
             *(extra_expected_requirements or ()),
         ]
     )
Beispiel #16
0
    def test_transitive_targets(self) -> None:
        t1 = MockTarget({}, address=Address.parse(":t1"))
        t2 = MockTarget({Dependencies.alias: [t1.address]}, address=Address.parse(":t2"))
        d1 = MockTarget({Dependencies.alias: [t1.address]}, address=Address.parse(":d1"))
        d2 = MockTarget({Dependencies.alias: [t2.address]}, address=Address.parse(":d2"))
        d3 = MockTarget({}, address=Address.parse(":d3"))
        root = MockTarget(
            {Dependencies.alias: [d1.address, d2.address, d3.address]},
            address=Address.parse(":root"),
        )

        self.add_to_build_file(
            "",
            dedent(
                """\
                target(name='t1')
                target(name='t2', dependencies=[':t1'])
                target(name='d1', dependencies=[':t1'])
                target(name='d2', dependencies=[':t2'])
                target(name='d3')
                target(name='root', dependencies=[':d1', ':d2', ':d3'])
                """
            ),
        )

        direct_deps = self.request_single_product(Targets, DependenciesRequest(root[Dependencies]))
        assert direct_deps == Targets([d1, d2, d3])

        transitive_target = self.request_single_product(TransitiveTarget, WrappedTarget(root))
        assert transitive_target.root == root
        assert {
            dep_transitive_target.root for dep_transitive_target in transitive_target.dependencies
        } == {d1, d2, d3}

        transitive_targets = self.request_single_product(
            TransitiveTargets, Addresses([root.address, d2.address])
        )
        assert transitive_targets.roots == (root, d2)
        assert transitive_targets.closure == FrozenOrderedSet([root, d2, d1, d3, t2, t1])
Beispiel #17
0
def identify_missing_ancestor_files(
        name: str, sources: Sequence[str]) -> FrozenOrderedSet[str]:
    """Return the paths of potentially missing ancestor files.

    NB: The sources are expected to not have had their source roots stripped.
    Therefore this function will consider superfluous files at and above the source roots,
    (e.g., src/python/<name>, src/<name>). It is the caller's responsibility to filter these
    out if necessary.
    """
    packages: Set[str] = set()
    for source in sources:
        if not source.endswith(".py"):
            continue
        pkg_dir = os.path.dirname(source)
        if not pkg_dir or pkg_dir in packages:
            continue
        package = ""
        for component in pkg_dir.split(os.sep):
            package = os.path.join(package, component)
            packages.add(package)

    return FrozenOrderedSet(
        sorted({os.path.join(package, name)
                for package in packages} - set(sources)))
Beispiel #18
0
async def run_setup_pys(
    targets_with_origins: TargetsWithOrigins,
    options: SetupPyOptions,
    console: Console,
    python_setup: PythonSetup,
    distdir: DistDir,
    workspace: Workspace,
) -> SetupPy:
    """Run setup.py commands on all exported targets addressed."""
    args = tuple(options.values.args)
    validate_args(args)

    # Get all exported targets, ignoring any non-exported targets that happened to be
    # globbed over, but erroring on any explicitly-requested non-exported targets.

    exported_targets: List[ExportedTarget] = []
    explicit_nonexported_targets: List[Target] = []

    for target_with_origin in targets_with_origins:
        tgt = target_with_origin.target
        if _is_exported(tgt):
            exported_targets.append(ExportedTarget(tgt))
        elif isinstance(target_with_origin.origin, SingleAddress):
            explicit_nonexported_targets.append(tgt)
    if explicit_nonexported_targets:
        raise TargetNotExported(
            "Cannot run setup.py on these targets, because they have no `provides=` clause: "
            f'{", ".join(so.address.reference() for so in explicit_nonexported_targets)}'
        )

    if options.values.transitive:
        # Expand out to all owners of the entire dep closure.
        transitive_targets = await Get[TransitiveTargets](Addresses(
            et.target.address for et in exported_targets))
        owners = await MultiGet(Get[ExportedTarget](OwnedDependency(tgt))
                                for tgt in transitive_targets.closure
                                if is_ownable_target(tgt))
        exported_targets = list(FrozenOrderedSet(owners))

    py2 = is_python2(
        (target_with_origin.target.get(PythonInterpreterCompatibility).value
         for target_with_origin in targets_with_origins),
        python_setup,
    )
    chroots = await MultiGet(
        Get[SetupPyChroot](SetupPyChrootRequest(exported_target, py2))
        for exported_target in exported_targets)

    # If args were provided, run setup.py with them; Otherwise just dump chroots.
    if args:
        setup_py_results = await MultiGet(
            Get[RunSetupPyResult](RunSetupPyRequest(exported_target, chroot,
                                                    tuple(args)))
            for exported_target, chroot in zip(exported_targets, chroots))

        for exported_target, setup_py_result in zip(exported_targets,
                                                    setup_py_results):
            addr = exported_target.target.address.reference()
            console.print_stderr(
                f"Writing dist for {addr} under {distdir.relpath}/.")
            workspace.materialize_directory(
                DirectoryToMaterialize(setup_py_result.output,
                                       path_prefix=str(distdir.relpath)))
    else:
        # Just dump the chroot.
        for exported_target, chroot in zip(exported_targets, chroots):
            addr = exported_target.target.address.reference()
            provides = exported_target.provides
            setup_py_dir = distdir.relpath / f"{provides.name}-{provides.version}"
            console.print_stderr(
                f"Writing setup.py chroot for {addr} to {setup_py_dir}")
            workspace.materialize_directory(
                DirectoryToMaterialize(chroot.digest,
                                       path_prefix=str(setup_py_dir)))

    return SetupPy(0)
Beispiel #19
0
async def find_owners(owners_request: OwnersRequest) -> Owners:
    # Determine which of the sources are live and which are deleted.
    sources_paths = await Get(Paths, PathGlobs(owners_request.sources))

    live_files = FrozenOrderedSet(sources_paths.files)
    deleted_files = FrozenOrderedSet(s for s in owners_request.sources
                                     if s not in live_files)
    live_dirs = FrozenOrderedSet(os.path.dirname(s) for s in live_files)
    deleted_dirs = FrozenOrderedSet(os.path.dirname(s) for s in deleted_files)

    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    # For live files, we use expanded Targets, which have file level precision but which are
    # only created for existing files. For deleted files we use UnexpandedTargets, which have
    # the original declared glob.
    live_candidate_specs = tuple(
        AscendantAddresses(directory=d) for d in live_dirs)
    deleted_candidate_specs = tuple(
        AscendantAddresses(directory=d) for d in deleted_dirs)
    live_candidate_tgts, deleted_candidate_tgts = await MultiGet(
        Get(Targets, AddressSpecs(live_candidate_specs)),
        Get(UnexpandedTargets, AddressSpecs(deleted_candidate_specs)),
    )

    matching_addresses: OrderedSet[Address] = OrderedSet()
    unmatched_sources = set(owners_request.sources)
    for live in (True, False):
        candidate_tgts: Sequence[Target]
        if live:
            candidate_tgts = live_candidate_tgts
            sources_set = live_files
        else:
            candidate_tgts = deleted_candidate_tgts
            sources_set = deleted_files

        build_file_addresses = await MultiGet(
            Get(BuildFileAddress, Address, tgt.address)
            for tgt in candidate_tgts)

        for candidate_tgt, bfa in zip(candidate_tgts, build_file_addresses):
            matching_files = set(
                matches_filespec(candidate_tgt.get(Sources).filespec,
                                 paths=sources_set))
            # Also consider secondary ownership, meaning it's not a `Sources` field with primary
            # ownership, but the target still should match the file. We can't use `tgt.get()`
            # because this is a mixin, and there technically may be >1 field.
            secondary_owner_fields = tuple(
                field  # type: ignore[misc]
                for field in candidate_tgt.field_values.values()
                if isinstance(field, SecondaryOwnerMixin))
            for secondary_owner_field in secondary_owner_fields:
                matching_files.update(
                    matches_filespec(secondary_owner_field.filespec,
                                     paths=sources_set))
            if not matching_files and bfa.rel_path not in sources_set:
                continue

            unmatched_sources -= matching_files
            matching_addresses.add(candidate_tgt.address)

    if (unmatched_sources and owners_request.owners_not_found_behavior !=
            OwnersNotFoundBehavior.ignore):
        _log_or_raise_unmatched_owners(
            [PurePath(path) for path in unmatched_sources],
            owners_request.owners_not_found_behavior)

    return Owners(matching_addresses)
Beispiel #20
0
def test_parser_simple(rule_runner: RuleRunner) -> None:
    rule_runner.write_files(
        {
            "BUILD": textwrap.dedent(
                """
            scala_source(
                name="simple-source",
                source="SimpleSource.scala",
            )
            """
            ),
            "SimpleSource.scala": textwrap.dedent(
                """
            package org.pantsbuild
            package example

            import scala.collection.mutable.{ArrayBuffer, HashMap => RenamedHashMap}
            import java.io._

            class OuterClass {
                import foo.bar.SomeItem

                val NestedVal = 3
                var NestedVar = "foo"
                trait NestedTrait {
                }
                class NestedClass {
                }
                type NestedType = Foo
                object NestedObject {
                }
            }

            trait OuterTrait {
                val NestedVal = 3
                var NestedVar = "foo"
                trait NestedTrait {
                }
                class NestedClass {
                }
                type NestedType = Foo
                object NestedObject {
                }
            }

            object OuterObject {
                val NestedVal = 3
                var NestedVar = "foo"
                trait NestedTrait {
                }
                class NestedClass {
                }
                type NestedType = Foo
                object NestedObject {
                }
            }
            """
            ),
        }
    )

    target = rule_runner.get_target(address=Address("", target_name="simple-source"))

    source_files = rule_runner.request(
        SourceFiles,
        [
            SourceFilesRequest(
                (target.get(SourcesField),),
                for_sources_types=(ScalaSourceField,),
                enable_codegen=True,
            )
        ],
    )

    analysis = rule_runner.request(
        ScalaSourceDependencyAnalysis,
        [source_files],
    )

    assert analysis.provided_names == FrozenOrderedSet(
        [
            "org.pantsbuild.example.OuterClass",
            "org.pantsbuild.example.OuterClass.NestedVal",
            "org.pantsbuild.example.OuterClass.NestedVar",
            "org.pantsbuild.example.OuterClass.NestedTrait",
            "org.pantsbuild.example.OuterClass.NestedClass",
            "org.pantsbuild.example.OuterClass.NestedType",
            "org.pantsbuild.example.OuterClass.NestedObject",
            "org.pantsbuild.example.OuterTrait",
            "org.pantsbuild.example.OuterTrait.NestedVal",
            "org.pantsbuild.example.OuterTrait.NestedVar",
            "org.pantsbuild.example.OuterTrait.NestedTrait",
            "org.pantsbuild.example.OuterTrait.NestedClass",
            "org.pantsbuild.example.OuterTrait.NestedType",
            "org.pantsbuild.example.OuterTrait.NestedObject",
            "org.pantsbuild.example.OuterObject",
            "org.pantsbuild.example.OuterObject.NestedVal",
            "org.pantsbuild.example.OuterObject.NestedVar",
            "org.pantsbuild.example.OuterObject.NestedTrait",
            "org.pantsbuild.example.OuterObject.NestedClass",
            "org.pantsbuild.example.OuterObject.NestedType",
            "org.pantsbuild.example.OuterObject.NestedObject",
        ]
    )

    assert analysis.imports_by_scope == FrozenDict(
        {
            "org.pantsbuild.example.OuterClass": (
                ScalaImport(name="foo.bar.SomeItem", is_wildcard=False),
            ),
            "org.pantsbuild.example": (
                ScalaImport(name="scala.collection.mutable.ArrayBuffer", is_wildcard=False),
                ScalaImport(name="scala.collection.mutable.HashMap", is_wildcard=False),
                ScalaImport(name="java.io", is_wildcard=True),
            ),
        }
    )
Beispiel #21
0
 def all_imports(self) -> FrozenOrderedSet[str]:
     return FrozenOrderedSet(
         sorted([*self.explicit_imports, *self.inferred_imports]))
Beispiel #22
0
async def pex_from_targets(
    request: PexFromTargetsRequest,
    python_setup: PythonSetup,
    constraints_file: MaybeConstraintsFile,
) -> PexRequest:
    if request.direct_deps_only:
        targets = await Get(Targets, Addresses(request.addresses))
        direct_deps = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in targets
        )
        all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets))
    else:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest(request.addresses)
        )
        all_targets = transitive_targets.closure

    input_digests = []
    if request.additional_sources:
        input_digests.append(request.additional_sources)
    if request.include_source_files:
        prepared_sources = await Get(
            StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets)
        )
        input_digests.append(prepared_sources.stripped_source_files.snapshot.digest)
    merged_input_digest = await Get(Digest, MergeDigests(input_digests))

    if request.hardcoded_interpreter_constraints:
        interpreter_constraints = request.hardcoded_interpreter_constraints
    else:
        calculated_constraints = PexInterpreterConstraints.create_from_targets(
            all_targets, python_setup
        )
        # If there are no targets, we fall back to the global constraints. This is relevant,
        # for example, when running `./pants repl` with no specs.
        interpreter_constraints = calculated_constraints or PexInterpreterConstraints(
            python_setup.interpreter_constraints
        )

    exact_reqs = PexRequirements.create_from_requirement_fields(
        (
            tgt[PythonRequirementsField]
            for tgt in all_targets
            if tgt.has_field(PythonRequirementsField)
        ),
        additional_requirements=request.additional_requirements,
    )

    requirements = exact_reqs
    repository_pex: Pex | None = None
    description = request.description

    if constraints_file.path:
        constraints_file_contents = await Get(DigestContents, Digest, constraints_file.digest)
        constraints_file_reqs = set(
            parse_requirements_file(
                constraints_file_contents[0].content.decode(),
                rel_path=constraints_file.path,
            )
        )

        # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
        # packaging canonicalize for us.
        # See: https://www.python.org/dev/peps/pep-0503/#normalized-names

        url_reqs = set()  # E.g., 'foobar@ git+https://github.com/foo/bar.git@branch'
        name_reqs = set()  # E.g., foobar>=1.2.3
        name_req_projects = set()

        for req_str in exact_reqs:
            req = Requirement.parse(req_str)
            if req.url:  # type: ignore[attr-defined]
                url_reqs.add(req)
            else:
                name_reqs.add(req)
                name_req_projects.add(canonicalize_project_name(req.project_name))

        constraint_file_projects = {
            canonicalize_project_name(req.project_name) for req in constraints_file_reqs
        }
        # Constraints files must only contain name reqs, not URL reqs (those are already
        # constrained by their very nature). See https://github.com/pypa/pip/issues/8210.
        unconstrained_projects = name_req_projects - constraint_file_projects
        if unconstrained_projects:
            constraints_descr = (
                f"constraints file {constraints_file.path}"
                if python_setup.requirement_constraints
                else f"_python_constraints target {python_setup.requirement_constraints_target}"
            )
            logger.warning(
                f"The {constraints_descr} does not contain entries for the following "
                f"requirements: {', '.join(unconstrained_projects)}"
            )

        if python_setup.resolve_all_constraints:
            if unconstrained_projects:
                logger.warning(
                    "Ignoring `[python_setup].resolve_all_constraints` option because constraints "
                    "file does not cover all requirements."
                )
            else:
                # To get a full set of requirements we must add the URL requirements to the
                # constraints file, since the latter cannot contain URL requirements.
                # NB: We can only add the URL requirements we know about here, i.e., those that
                #  are transitive deps of the targets in play. There may be others in the repo.
                #  So we may end up creating a few different repository pexes, each with identical
                #  name requirements but different subsets of URL requirements. Fortunately since
                #  all these repository pexes will have identical pinned versions of everything,
                #  this is not a correctness issue, only a performance one.
                # TODO: Address this as part of providing proper lockfile support. However we
                #  generate lockfiles, they must be able to include URL requirements.
                all_constraints = {str(req) for req in (constraints_file_reqs | url_reqs)}
                repository_pex = await Get(
                    Pex,
                    PexRequest(
                        description=f"Resolving {python_setup.requirement_constraints}",
                        output_filename="repository.pex",
                        internal_only=request.internal_only,
                        requirements=PexRequirements(all_constraints),
                        interpreter_constraints=interpreter_constraints,
                        platforms=request.platforms,
                        additional_args=["-vvv"],
                    ),
                )
    elif (
        python_setup.resolve_all_constraints
        and python_setup.resolve_all_constraints_was_set_explicitly()
    ):
        raise ValueError(
            "[python-setup].resolve_all_constraints is enabled, so either "
            "[python-setup].requirement_constraints or "
            "[python-setup].requirement_constraints_target must also be provided."
        )

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        main=request.main,
        sources=merged_input_digest,
        additional_inputs=request.additional_inputs,
        repository_pex=repository_pex,
        additional_args=request.additional_args,
        description=description,
    )
Beispiel #23
0
    def sort_goals(self, context, goals):
        goal_info_by_goal = OrderedDict()
        for goal in reversed(FrozenOrderedSet(goals)):
            self._visit_goal(goal, context, goal_info_by_goal)

        return list(reversed(list(self._topological_sort(goal_info_by_goal))))
Beispiel #24
0
 def __init__(self, requested: Sequence[str], allowed: Optional[Sequence[str]] = None):
     self.requested = FrozenOrderedSet(requested)
     self.allowed = None if allowed is None else FrozenOrderedSet(allowed)
Beispiel #25
0
def test_frozen_is_hashable() -> None:
    set1 = FrozenOrderedSet("abcabc")
    assert hash(set1) == hash(copy(set1))

    set2 = FrozenOrderedSet("abcd")
    assert hash(set1) != hash(set2)
Beispiel #26
0
async def run_go_tests(
    field_set: GoTestFieldSet, test_subsystem: TestSubsystem, go_test_subsystem: GoTestSubsystem
) -> TestResult:
    maybe_pkg_info, wrapped_target = await MultiGet(
        Get(FallibleFirstPartyPkgInfo, FirstPartyPkgInfoRequest(field_set.address)),
        Get(WrappedTarget, Address, field_set.address),
    )

    if maybe_pkg_info.info is None:
        assert maybe_pkg_info.stderr is not None
        return TestResult(
            exit_code=maybe_pkg_info.exit_code,
            stdout="",
            stderr=maybe_pkg_info.stderr,
            stdout_digest=EMPTY_FILE_DIGEST,
            stderr_digest=EMPTY_FILE_DIGEST,
            address=field_set.address,
            output_setting=test_subsystem.output,
        )
    pkg_info = maybe_pkg_info.info

    target = wrapped_target.target
    import_path = target[GoImportPathField].value

    testmain = await Get(
        GeneratedTestMain,
        GenerateTestMainRequest(
            pkg_info.digest,
            FrozenOrderedSet(
                os.path.join(".", pkg_info.subpath, name) for name in pkg_info.test_files
            ),
            FrozenOrderedSet(
                os.path.join(".", pkg_info.subpath, name) for name in pkg_info.xtest_files
            ),
            import_path=import_path,
        ),
    )

    if not testmain.has_tests and not testmain.has_xtests:
        # Nothing to do so return an empty result.
        # TODO: There should really be a "skipped entirely" mechanism for `TestResult`.
        return TestResult(
            exit_code=0,
            stdout="",
            stderr="",
            stdout_digest=EMPTY_FILE_DIGEST,
            stderr_digest=EMPTY_FILE_DIGEST,
            address=field_set.address,
            output_setting=test_subsystem.output,
        )

    # Construct the build request for the package under test.
    maybe_test_pkg_build_request = await Get(
        FallibleBuildGoPackageRequest,
        BuildGoPackageTargetRequest(field_set.address, for_tests=True),
    )
    if maybe_test_pkg_build_request.request is None:
        assert maybe_test_pkg_build_request.stderr is not None
        return TestResult(
            exit_code=maybe_test_pkg_build_request.exit_code,
            stdout="",
            stderr=maybe_test_pkg_build_request.stderr,
            stdout_digest=EMPTY_FILE_DIGEST,
            stderr_digest=EMPTY_FILE_DIGEST,
            address=field_set.address,
            output_setting=test_subsystem.output,
        )
    test_pkg_build_request = maybe_test_pkg_build_request.request
    main_direct_deps = [test_pkg_build_request]

    if testmain.has_xtests:
        # Build a synthetic package for xtests where the import path is the same as the package under test
        # but with "_test" appended.
        #
        # Subset the direct dependencies to only the dependencies used by the xtest code. (Dependency
        # inference will have included all of the regular, test, and xtest dependencies of the package in
        # the build graph.) Moreover, ensure that any import of the package under test is on the _test_
        # version of the package that was just built.
        dep_by_import_path = {
            dep.import_path: dep for dep in test_pkg_build_request.direct_dependencies
        }
        direct_dependencies: OrderedSet[BuildGoPackageRequest] = OrderedSet()
        for xtest_import in pkg_info.xtest_imports:
            if xtest_import == pkg_info.import_path:
                direct_dependencies.add(test_pkg_build_request)
            elif xtest_import in dep_by_import_path:
                direct_dependencies.add(dep_by_import_path[xtest_import])

        xtest_pkg_build_request = BuildGoPackageRequest(
            import_path=f"{import_path}_test",
            digest=pkg_info.digest,
            subpath=pkg_info.subpath,
            go_file_names=pkg_info.xtest_files,
            s_file_names=(),  # TODO: Are there .s files for xtest?
            direct_dependencies=tuple(direct_dependencies),
            minimum_go_version=pkg_info.minimum_go_version,
        )
        main_direct_deps.append(xtest_pkg_build_request)

    # Generate the synthetic main package which imports the test and/or xtest packages.
    maybe_built_main_pkg = await Get(
        FallibleBuiltGoPackage,
        BuildGoPackageRequest(
            import_path="main",
            digest=testmain.digest,
            subpath="",
            go_file_names=(GeneratedTestMain.TEST_MAIN_FILE,),
            s_file_names=(),
            direct_dependencies=tuple(main_direct_deps),
            minimum_go_version=pkg_info.minimum_go_version,
        ),
    )
    if maybe_built_main_pkg.output is None:
        assert maybe_built_main_pkg.stderr is not None
        return TestResult(
            exit_code=maybe_built_main_pkg.exit_code,
            stdout="",
            stderr=maybe_built_main_pkg.stderr,
            stdout_digest=EMPTY_FILE_DIGEST,
            stderr_digest=EMPTY_FILE_DIGEST,
            address=field_set.address,
            output_setting=test_subsystem.output,
        )
    built_main_pkg = maybe_built_main_pkg.output

    main_pkg_a_file_path = built_main_pkg.import_paths_to_pkg_a_files["main"]
    import_config = await Get(
        ImportConfig, ImportConfigRequest(built_main_pkg.import_paths_to_pkg_a_files)
    )
    input_digest = await Get(Digest, MergeDigests([built_main_pkg.digest, import_config.digest]))

    binary = await Get(
        LinkedGoBinary,
        LinkGoBinaryRequest(
            input_digest=input_digest,
            archives=(main_pkg_a_file_path,),
            import_config_path=import_config.CONFIG_PATH,
            output_filename="./test_runner",  # TODO: Name test binary the way that `go` does?
            description=f"Link Go test binary for {field_set.address}",
        ),
    )

    cache_scope = (
        ProcessCacheScope.PER_SESSION if test_subsystem.force else ProcessCacheScope.SUCCESSFUL
    )

    result = await Get(
        FallibleProcessResult,
        Process(
            ["./test_runner", *transform_test_args(go_test_subsystem.args)],
            input_digest=binary.digest,
            description=f"Run Go tests: {field_set.address}",
            cache_scope=cache_scope,
            level=LogLevel.INFO,
        ),
    )
    return TestResult.from_fallible_process_result(result, field_set.address, test_subsystem.output)
Beispiel #27
0
    def setup_graph_extended(
        build_configuration: BuildConfiguration,
        execution_options: ExecutionOptions,
        *,
        executor: PyExecutor,
        pants_ignore_patterns: list[str],
        use_gitignore: bool,
        local_store_options: LocalStoreOptions,
        local_execution_root_dir: str,
        named_caches_dir: str,
        use_deprecated_python_macros: bool,
        ca_certs_path: str | None = None,
        build_root: str | None = None,
        include_trace_on_error: bool = True,
        engine_visualize_to: str | None = None,
        watch_filesystem: bool = True,
    ) -> GraphScheduler:
        build_root_path = build_root or get_buildroot()

        rules = build_configuration.rules
        union_membership: UnionMembership
        registered_target_types = RegisteredTargetTypes.create(
            build_configuration.target_types)

        execution_options = execution_options or DEFAULT_EXECUTION_OPTIONS

        @rule
        def parser_singleton() -> Parser:
            return Parser(
                build_root=build_root_path,
                target_type_aliases=registered_target_types.aliases,
                object_aliases=build_configuration.registered_aliases,
                use_deprecated_python_macros=use_deprecated_python_macros,
            )

        @rule
        def build_configuration_singleton() -> BuildConfiguration:
            return build_configuration

        @rule
        def registered_target_types_singleton() -> RegisteredTargetTypes:
            return registered_target_types

        @rule
        def union_membership_singleton() -> UnionMembership:
            return union_membership

        @rule
        def build_root_singleton() -> BuildRoot:
            return cast(BuildRoot, BuildRoot.instance)

        # Create a Scheduler containing graph and filesystem rules, with no installed goals.
        rules = FrozenOrderedSet((
            *collect_rules(locals()),
            *build_files.rules(),
            *fs.rules(),
            *environment.rules(),
            *desktop.rules(),
            *graph.rules(),
            *options_parsing.rules(),
            *process.rules(),
            *system_binaries.rules(),
            *platform.rules(),
            *changed_rules(),
            *streaming_workunit_handler_rules(),
            *specs_calculator.rules(),
            *rules,
        ))

        goal_map = EngineInitializer._make_goal_map_from_rules(rules)

        union_membership = UnionMembership.from_rules((
            *build_configuration.union_rules,
            *(r for r in rules if isinstance(r, UnionRule)),
        ))

        rules = FrozenOrderedSet((
            *rules,
            # Install queries for each Goal.
            *(QueryRule(goal_type, GraphSession.goal_param_types)
              for goal_type in goal_map.values()),
            # Install queries for each request/response pair used by the BSP support.
            # Note: These are necessary because the BSP support is a built-in goal that makes
            # synchronous requests into the engine.
            *(QueryRule(impl.response_type, (impl.request_type, Workspace))
              for impl in union_membership.get(BSPHandlerMapping)),
            QueryRule(Snapshot, [PathGlobs]),  # Used by the SchedulerService.
        ))

        def ensure_absolute_path(v: str) -> str:
            return Path(v).resolve().as_posix()

        def ensure_optional_absolute_path(v: str | None) -> str | None:
            if v is None:
                return None
            return ensure_absolute_path(v)

        scheduler = Scheduler(
            ignore_patterns=pants_ignore_patterns,
            use_gitignore=use_gitignore,
            build_root=build_root_path,
            local_execution_root_dir=ensure_absolute_path(
                local_execution_root_dir),
            named_caches_dir=ensure_absolute_path(named_caches_dir),
            ca_certs_path=ensure_optional_absolute_path(ca_certs_path),
            rules=rules,
            union_membership=union_membership,
            executor=executor,
            execution_options=execution_options,
            local_store_options=local_store_options,
            include_trace_on_error=include_trace_on_error,
            visualize_to_dir=engine_visualize_to,
            watch_filesystem=watch_filesystem,
        )

        return GraphScheduler(scheduler, goal_map)
Beispiel #28
0
 def __init__(self, union_rules: Mapping[Type, Iterable[Type]]) -> None:
     self.union_rules = FrozenDict({
         base: FrozenOrderedSet(members)
         for base, members in union_rules.items()
     })
Beispiel #29
0
def test_build_pex_description() -> None:
    def assert_description(
        requirements: PexRequirements | Lockfile | LockfileContent,
        *,
        description: str | None = None,
        expected: str,
    ) -> None:
        request = PexRequest(
            output_filename="new.pex",
            internal_only=True,
            requirements=requirements,
            description=description,
        )
        assert _build_pex_description(request) == expected

    repo_pex = Pex(EMPTY_DIGEST, "repo.pex", None)

    assert_description(PexRequirements(),
                       description="Custom!",
                       expected="Custom!")
    assert_description(PexRequirements(repository_pex=repo_pex),
                       description="Custom!",
                       expected="Custom!")

    assert_description(PexRequirements(), expected="Building new.pex")
    assert_description(PexRequirements(repository_pex=repo_pex),
                       expected="Building new.pex")

    assert_description(PexRequirements(["req"]),
                       expected="Building new.pex with 1 requirement: req")
    assert_description(
        PexRequirements(["req"], repository_pex=repo_pex),
        expected="Extracting 1 requirement to build new.pex from repo.pex: req",
    )

    assert_description(
        PexRequirements(["req1", "req2"]),
        expected="Building new.pex with 2 requirements: req1, req2",
    )
    assert_description(
        PexRequirements(["req1", "req2"], repository_pex=repo_pex),
        expected=
        "Extracting 2 requirements to build new.pex from repo.pex: req1, req2",
    )

    assert_description(
        LockfileContent(
            file_content=FileContent("lock.txt", b""),
            resolve_name="a",
            req_strings=FrozenOrderedSet(),
        ),
        expected="Building new.pex from lock.txt",
    )

    assert_description(
        Lockfile(
            file_path="lock.txt",
            file_path_description_of_origin="foo",
            resolve_name="a",
            req_strings=FrozenOrderedSet(),
        ),
        expected="Building new.pex from lock.txt",
    )
Beispiel #30
0
 def rules(self) -> FrozenOrderedSet[Rule | UnionRule]:
     return FrozenOrderedSet(
         [*self.build_config.rules, *self.build_config.union_rules])