Esempio n. 1
0
async def dependencies(
        console: Console, addresses: Addresses,
        dependencies_subsystem: DependenciesSubsystem) -> Dependencies:
    if dependencies_subsystem.transitive:
        transitive_targets = await Get(
            TransitiveTargets,
            TransitiveTargetsRequest(addresses,
                                     include_special_cased_deps=True))
        targets = Targets(transitive_targets.dependencies)
    else:
        # NB: We must preserve target generators for the roots, i.e. not replace with their
        # generated targets.
        target_roots = await Get(UnexpandedTargets, Addresses, addresses)
        # NB: When determining dependencies, though, we replace target generators with their
        # generated targets.
        dependencies_per_target_root = await MultiGet(
            Get(
                Targets,
                DependenciesRequest(tgt.get(DependenciesField),
                                    include_special_cased_deps=True),
            ) for tgt in target_roots)
        targets = Targets(
            itertools.chain.from_iterable(dependencies_per_target_root))

    address_strings = {addr.spec
                       for addr in addresses
                       } if dependencies_subsystem.closed else set()
    for tgt in targets:
        address_strings.add(tgt.address.spec)

    with dependencies_subsystem.line_oriented(console) as print_stdout:
        for address in sorted(address_strings):
            print_stdout(address)

    return Dependencies(exit_code=0)
Esempio n. 2
0
async def pylint_lint(request: PylintRequest, pylint: Pylint,
                      python_setup: PythonSetup) -> LintResults:
    if pylint.skip:
        return LintResults([], linter_name="Pylint")

    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs,
                                        pylint.source_plugins)
    plugin_targets_request = Get(
        TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses))
    linted_targets_request = Get(
        Targets,
        Addresses(field_set.address for field_set in request.field_sets))
    plugin_targets, linted_targets = await MultiGet(plugin_targets_request,
                                                    linted_targets_request)

    plugin_targets_compatibility_fields = tuple(
        plugin_tgt[InterpreterConstraintsField]
        for plugin_tgt in plugin_targets.closure
        if plugin_tgt.has_field(InterpreterConstraintsField))

    # Pylint needs direct dependencies in the chroot to ensure that imports are valid. However, it
    # doesn't lint those direct dependencies nor does it care about transitive dependencies.
    per_target_dependencies = await MultiGet(
        Get(Targets, DependenciesRequest(field_set.dependencies))
        for field_set in request.field_sets)

    # We batch targets by their interpreter constraints to ensure, for example, that all Python 2
    # targets run together and all Python 3 targets run together.
    # Note that Pylint uses the AST of the interpreter that runs it. So, we include any plugin
    # targets in this interpreter constraints calculation.
    interpreter_constraints_to_target_setup = defaultdict(set)
    for field_set, tgt, dependencies in zip(request.field_sets, linted_targets,
                                            per_target_dependencies):
        target_setup = PylintTargetSetup(field_set,
                                         Targets([tgt, *dependencies]))
        interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields(
            (
                *(tgt[InterpreterConstraintsField]
                  for tgt in [tgt, *dependencies]
                  if tgt.has_field(InterpreterConstraintsField)),
                *plugin_targets_compatibility_fields,
            ),
            python_setup,
        )
        interpreter_constraints_to_target_setup[interpreter_constraints].add(
            target_setup)

    partitions = (PylintPartition(
        tuple(
            sorted(target_setups,
                   key=lambda tgt_setup: tgt_setup.field_set.address)),
        interpreter_constraints,
        Targets(plugin_targets.closure),
    ) for interpreter_constraints, target_setups in sorted(
        interpreter_constraints_to_target_setup.items()))
    partitioned_results = await MultiGet(
        Get(LintResult, PylintPartition, partition)
        for partition in partitions)
    return LintResults(partitioned_results, linter_name="Pylint")
Esempio n. 3
0
async def pylint_lint(request: PylintRequest, pylint: Pylint,
                      python_setup: PythonSetup) -> LintResults:
    if pylint.skip:
        return LintResults()

    plugin_targets_request = Get[TransitiveTargets](Addresses(
        Address.parse(plugin_addr) for plugin_addr in pylint.source_plugins))
    linted_targets_request = Get[Targets](Addresses(
        field_set.address for field_set in request.field_sets))
    plugin_targets, linted_targets = cast(
        Tuple[TransitiveTargets, Targets],
        await MultiGet([plugin_targets_request, linted_targets_request]),
    )

    plugin_targets_compatibility_fields = tuple(
        plugin_tgt[PythonInterpreterCompatibility]
        for plugin_tgt in plugin_targets.closure
        if plugin_tgt.has_field(PythonInterpreterCompatibility))

    # Pylint needs direct dependencies in the chroot to ensure that imports are valid. However, it
    # doesn't lint those direct dependencies nor does it care about transitive dependencies.
    per_target_dependencies = await MultiGet(
        Get[Targets](DependenciesRequest(field_set.dependencies))
        for field_set in request.field_sets)

    # We batch targets by their interpreter constraints to ensure, for example, that all Python 2
    # targets run together and all Python 3 targets run together.
    interpreter_constraints_to_target_setup = defaultdict(set)
    for field_set, tgt, dependencies in zip(request.field_sets, linted_targets,
                                            per_target_dependencies):
        target_setup = PylintTargetSetup(field_set,
                                         Targets([tgt, *dependencies]))
        interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields(
            (
                *(tgt.get(PythonInterpreterCompatibility)
                  for tgt in [tgt, *dependencies]),
                *plugin_targets_compatibility_fields,
            ),
            python_setup,
        ) or PexInterpreterConstraints(pylint.default_interpreter_constraints)
        interpreter_constraints_to_target_setup[interpreter_constraints].add(
            target_setup)

    partitions = (PylintPartition(
        tuple(
            sorted(target_setups,
                   key=lambda target_setup: target_setup.field_set.address)),
        interpreter_constraints,
        Targets(plugin_targets.closure),
    ) for interpreter_constraints, target_setups in sorted(
        interpreter_constraints_to_target_setup.items()))
    partitioned_results = await MultiGet(
        Get[LintResult](PylintPartition, partition)
        for partition in partitions)
    return LintResults(partitioned_results)
Esempio n. 4
0
    def from_target_plugins(
        cls,
        seq: Iterable[ScalaPluginTargetsForTarget],
        resolve: CoursierResolveKey,
    ) -> ScalaPluginsRequest:
        plugins: set[Target] = set()
        artifacts: set[Target] = set()

        for spft in seq:
            plugins.update(spft.plugins)
            artifacts.update(spft.artifacts)

        return ScalaPluginsRequest(Targets(plugins), Targets(artifacts), resolve)
Esempio n. 5
0
 def __init__(
     self,
     target_setups: Iterable[PylintTargetSetup],
     interpreter_constraints: PexInterpreterConstraints,
     plugin_targets: Iterable[Target],
 ) -> None:
     self.field_sets = tuple(target_setup.field_set
                             for target_setup in target_setups)
     self.targets_with_dependencies = Targets(
         itertools.chain.from_iterable(target_setup.target_with_dependencies
                                       for target_setup in target_setups))
     self.interpreter_constraints = interpreter_constraints
     self.plugin_targets = Targets(plugin_targets)
Esempio n. 6
0
async def resolve_scala_plugins_for_target(
    request: ScalaPluginsForTargetRequest,
    all_scala_plugins: AllScalaPluginTargets,
    jvm: JvmSubsystem,
    scalac: Scalac,
) -> ScalaPluginTargetsForTarget:
    target = request.target
    resolve = request.resolve_name

    plugin_names = target.get(ScalaConsumedPluginNamesField).value
    if plugin_names is None:
        plugin_names_by_resolve = scalac.parsed_default_plugins()
        plugin_names = tuple(plugin_names_by_resolve.get(resolve, ()))

    candidate_plugins = []
    artifact_address_gets = []
    for plugin in all_scala_plugins:
        if _plugin_name(plugin) not in plugin_names:
            continue
        candidate_plugins.append(plugin)
        artifact_field = plugin[ScalacPluginArtifactField]
        address_input = AddressInput.parse(
            artifact_field.value,
            relative_to=target.address.spec_path,
            description_of_origin=
            (f"the `{artifact_field.alias}` field from the target {artifact_field.address}"
             ),
        )
        artifact_address_gets.append(Get(Address, AddressInput, address_input))

    artifact_addresses = await MultiGet(artifact_address_gets)
    candidate_artifacts = await Get(Targets, Addresses(artifact_addresses))

    plugins: dict[str, tuple[Target, Target]] = {
    }  # Maps plugin name to relevant JVM artifact
    for plugin, artifact in zip(candidate_plugins, candidate_artifacts):
        if artifact[JvmResolveField].normalized_value(jvm) != resolve:
            continue

        plugins[_plugin_name(plugin)] = (plugin, artifact)

    for plugin_name in plugin_names:
        if plugin_name not in plugins:
            raise Exception(
                f"Could not find Scala plugin `{plugin_name}` in resolve `{resolve}` "
                f"for target {request.target}")

    plugin_targets, artifact_targets = zip(
        *plugins.values()) if plugins else ((), ())
    return ScalaPluginTargetsForTarget(Targets(plugin_targets),
                                       Targets(artifact_targets))
Esempio n. 7
0
async def resolve_scala_plugins_for_target(
    request: ScalaPluginsForTargetRequest,
    all_scala_plugins: AllScalaPluginTargets,
    jvm: JvmSubsystem,
    scalac: Scalac,
) -> ScalaPluginTargetsForTarget:

    target = request.target
    resolve = request.resolve_name

    plugin_names = target.get(ScalaConsumedPluginNamesField).value
    if plugin_names is None:
        plugin_names_by_resolve = scalac.parsed_default_plugins()
        plugin_names = tuple(plugin_names_by_resolve.get(resolve, ()))

    candidate_plugins: list[Target] = []
    for plugin in all_scala_plugins:
        if _plugin_name(plugin) in plugin_names:
            candidate_plugins.append(plugin)

    artifact_address_inputs = (
        plugin[ScalacPluginArtifactField].value for plugin in candidate_plugins
    )

    artifact_addresses = await MultiGet(
        # `is not None` is solely to satiate mypy. artifact field is required.
        Get(Address, AddressInput, AddressInput.parse(ai))
        for ai in artifact_address_inputs
        if ai is not None
    )

    candidate_artifacts = await Get(Targets, Addresses(artifact_addresses))

    plugins: dict[str, tuple[Target, Target]] = {}  # Maps plugin name to relevant JVM artifact
    for plugin, artifact in zip(candidate_plugins, candidate_artifacts):
        if artifact[JvmResolveField].normalized_value(jvm) != resolve:
            continue

        plugins[_plugin_name(plugin)] = (plugin, artifact)

    for plugin_name in plugin_names:
        if plugin_name not in plugins:
            raise Exception(
                f"Could not find Scala plugin `{plugin_name}` in resolve `{resolve}` "
                f"for target {request.target}"
            )

    plugin_targets, artifact_targets = zip(*plugins.values()) if plugins else ((), ())
    return ScalaPluginTargetsForTarget(Targets(plugin_targets), Targets(artifact_targets))
Esempio n. 8
0
async def dependencies(
    console: Console, addresses: Addresses, options: DependenciesOptions,
) -> Dependencies:
    if options.values.transitive:
        transitive_targets = await Get[TransitiveTargets](Addresses, addresses)
        targets = Targets(transitive_targets.closure - FrozenOrderedSet(transitive_targets.roots))
    else:
        target_roots = await Get[Targets](Addresses, addresses)
        dependencies_per_target_root = await MultiGet(
            Get[Targets](DependenciesRequest(tgt.get(DependenciesField))) for tgt in target_roots
        )
        targets = Targets(itertools.chain.from_iterable(dependencies_per_target_root))

    include_3rdparty = options.values.type in [
        DependencyType.THIRD_PARTY,
        DependencyType.SOURCE_AND_THIRD_PARTY,
    ]
    include_source = options.values.type in [
        DependencyType.SOURCE,
        DependencyType.SOURCE_AND_THIRD_PARTY,
    ]

    address_strings = set()
    third_party_requirements: Set[str] = set()
    for tgt in targets:
        if include_source:
            address_strings.add(tgt.address.spec)
        if include_3rdparty:
            if tgt.has_field(PythonRequirementsField):
                third_party_requirements.update(
                    str(python_req.requirement) for python_req in tgt[PythonRequirementsField].value
                )
            if tgt.has_field(JarsField):
                third_party_requirements.update(
                    (
                        f"{jar.org}:{jar.name}:{jar.rev}"
                        if jar.rev is not None
                        else f"{jar.org}:{jar.name}"
                    )
                    for jar in tgt[JarsField].value
                )

    with options.line_oriented(console) as print_stdout:
        for address in sorted(address_strings):
            print_stdout(address)
        for requirement_string in sorted(third_party_requirements):
            print_stdout(requirement_string)

    return Dependencies(exit_code=0)
Esempio n. 9
0
async def resolve_targets(targets: UnexpandedTargets) -> Targets:
    # TODO: This method duplicates `resolve_targets_with_origins`, because direct expansion of
    # `Addresses` to `Targets` is common in a few places: we can't always assume that we
    # have `AddressesWithOrigins`. One way to dedupe these two methods would be to fake some
    # origins, and then strip them afterward.

    # Split out and expand any base targets.
    # TODO: Should recursively expand alias targets here as well.
    other_targets = []
    base_targets = []
    for target in targets:
        if target.address.is_base_target:
            base_targets.append(target)
        else:
            other_targets.append(target)

    base_targets_subtargets = await MultiGet(
        Get(Subtargets, Address, bt.address) for bt in base_targets)
    # Zip the subtargets back to the base targets and replace them.
    # NB: If a target had no subtargets, we use the base.
    expanded_targets = OrderedSet(other_targets)
    expanded_targets.update(
        target for subtargets in base_targets_subtargets
        for target in (subtargets.subtargets if subtargets.subtargets else (
            subtargets.base, )))
    return Targets(expanded_targets)
Esempio n. 10
0
def run_lint_rule(
    rule_runner: RuleRunner,
    *,
    lint_request_types: List[Type[LintRequest]],
    targets: List[Target],
    per_file_caching: bool,
) -> Tuple[int, str]:
    with mock_console(rule_runner.options_bootstrapper) as (console,
                                                            stdio_reader):
        union_membership = UnionMembership({LintRequest: lint_request_types})
        result: Lint = run_rule_with_mocks(
            lint,
            rule_args=[
                console,
                Workspace(rule_runner.scheduler, _enforce_effects=False),
                Targets(targets),
                create_goal_subsystem(LintSubsystem,
                                      per_file_caching=per_file_caching,
                                      per_target_caching=False),
                union_membership,
                DistDir(relpath=Path("dist")),
            ],
            mock_gets=[
                MockGet(
                    output_type=LintResults,
                    input_type=LintRequest,
                    mock=lambda field_set_collection: field_set_collection.
                    lint_results,
                )
            ],
            union_membership=union_membership,
        )
        assert not stdio_reader.get_stdout()
        return result.exit_code, stdio_reader.get_stderr()
Esempio n. 11
0
async def coursier_resolve_lockfiles(
    console: Console,
    targets: Targets,
    resolve_subsystem: CoursierResolveSubsystem,
    workspace: Workspace,
) -> CoursierResolve:
    jvm_lockfile_targets = Targets(
        target for target in targets if target.has_field(JvmLockfileSources)
    )
    results = await MultiGet(
        Get(CoursierGenerateLockfileResult, CoursierGenerateLockfileRequest(target=target))
        for target in jvm_lockfile_targets
    )
    # For performance reasons, avoid writing out files to the workspace that haven't changed.
    results_to_write = tuple(result for result in results if result.digest != EMPTY_DIGEST)
    if results_to_write:
        merged_digest = await Get(
            Digest, MergeDigests(result.digest for result in results_to_write)
        )
        workspace.write_digest(merged_digest)
        merged_digest_snapshot = await Get(Snapshot, Digest, merged_digest)
        for path in merged_digest_snapshot.files:
            console.print_stderr(f"Updated lockfile at: {path}")

    return CoursierResolve(exit_code=0)
Esempio n. 12
0
def run_goal(
    targets: Sequence[Target],
    *,
    target_type: Optional[List[str]] = None,
    address_regex: Optional[List[str]] = None,
    tag_regex: Optional[List[str]] = None,
) -> str:
    console = MockConsole(use_colors=False)
    run_rule(
        filter_targets,
        rule_args=[
            Targets(targets),
            create_goal_subsystem(
                FilterOptions,
                sep="\\n",
                output_file=None,
                target_type=target_type or [],
                address_regex=address_regex or [],
                tag_regex=tag_regex or [],
            ),
            console,
            RegisteredTargetTypes.create({type(tgt)
                                          for tgt in targets}),
        ],
    )
    assert not console.stderr.getvalue()
    return cast(str, console.stdout.getvalue())
Esempio n. 13
0
def run_goal(
    targets: Sequence[Target],
    *,
    target_type: Optional[List[str]] = None,
    address_regex: Optional[List[str]] = None,
    tag_regex: Optional[List[str]] = None,
    granularity: Optional[TargetGranularity] = None,
) -> str:
    console = MockConsole(use_colors=False)
    run_rule_with_mocks(
        filter_targets,
        rule_args=[
            Targets(targets),
            create_goal_subsystem(
                FilterSubsystem,
                sep="\\n",
                output_file=None,
                target_type=target_type or [],
                address_regex=address_regex or [],
                tag_regex=tag_regex or [],
                granularity=granularity or TargetGranularity.all_targets,
                # Deprecated.
                type=[],
                target=[],
                regex=[],
                ancestor=[],
            ),
            console,
            RegisteredTargetTypes.create({type(tgt) for tgt in targets}),
        ],
    )
    assert not console.stderr.getvalue()
    return cast(str, console.stdout.getvalue())
Esempio n. 14
0
def run_typecheck_rule(
    *,
    request_types: List[Type[TypecheckRequest]],
    targets: List[Target],
    include_sources: bool = True,
) -> Tuple[int, str]:
    union_membership = UnionMembership({TypecheckRequest: request_types})
    with mock_console(create_options_bootstrapper()) as (console,
                                                         stdio_reader):
        result: Typecheck = run_rule_with_mocks(
            typecheck,
            rule_args=[console, Targets(targets), union_membership],
            mock_gets=[
                MockGet(
                    output_type=EnrichedTypecheckResults,
                    input_type=TypecheckRequest,
                    mock=lambda field_set_collection: field_set_collection.
                    typecheck_results,
                ),
                MockGet(
                    output_type=FieldSetsWithSources,
                    input_type=FieldSetsWithSourcesRequest,
                    mock=lambda field_sets: FieldSetsWithSources(
                        field_sets if include_sources else ()),
                ),
            ],
            union_membership=union_membership,
        )
        assert not stdio_reader.get_stdout()
        return result.exit_code, stdio_reader.get_stderr()
Esempio n. 15
0
 def test_filters_out_irrelevant_targets(self) -> None:
     targets = [
         self.create_target(parent_directory="src/python",
                            files=["p.py"],
                            target_cls=PythonTarget),
         self.create_target(parent_directory="src/python",
                            files=["f.txt"],
                            target_cls=Files),
         self.create_target(parent_directory="src/python",
                            files=["r.txt"],
                            target_cls=Resources),
         self.create_target(parent_directory="src/python",
                            files=["j.java"],
                            target_cls=NonPythonTarget),
     ]
     result = self.request_single_product(
         ImportablePythonSources,
         Params(
             Targets(targets),
             create_options_bootstrapper(
                 args=["--source-root-patterns=src/python"]),
         ),
     )
     assert sorted(result.snapshot.files) == sorted(
         ["p.py", "src/python/f.txt", "r.txt"])
Esempio n. 16
0
def run_typecheck_rule(
    *,
    request_types: Sequence[Type[CheckRequest]],
    targets: list[Target],
    only: list[str] | None = None,
) -> Tuple[int, str]:
    union_membership = UnionMembership({CheckRequest: request_types})
    check_subsystem = create_subsystem(CheckSubsystem, only=only or [])
    with mock_console(create_options_bootstrapper()) as (console, stdio_reader):
        rule_runner = RuleRunner()
        result: Check = run_rule_with_mocks(
            check,
            rule_args=[
                console,
                Workspace(rule_runner.scheduler, _enforce_effects=False),
                Targets(targets),
                DistDir(relpath=Path("dist")),
                union_membership,
                check_subsystem,
            ],
            mock_gets=[
                MockGet(
                    output_type=CheckResults,
                    input_type=CheckRequest,
                    mock=lambda field_set_collection: field_set_collection.check_results,
                ),
            ],
            union_membership=union_membership,
        )
        assert not stdio_reader.get_stdout()
        return result.exit_code, stdio_reader.get_stderr()
Esempio n. 17
0
    def assert_sources(
        self,
        expected_files,
        expected_packages,
        expected_namespace_packages,
        expected_package_data,
        addrs,
    ):
        srcs = self.request_single_product(
            SetupPySources,
            Params(
                SetupPySourcesRequest(Targets(
                    [self.tgt(addr) for addr in addrs]),
                                      py2=False),
                SourceRootConfig.global_instance(),
            ),
        )
        chroot_snapshot = self.request_single_product(Snapshot,
                                                      Params(srcs.digest))

        assert sorted(expected_files) == sorted(chroot_snapshot.files)
        assert sorted(expected_packages) == sorted(srcs.packages)
        assert sorted(expected_namespace_packages) == sorted(
            srcs.namespace_packages)
        assert expected_package_data == dict(srcs.package_data)
def run_typecheck_rule(
    *,
    request_types: List[Type[TypecheckRequest]],
    targets: List[Target],
    include_sources: bool = True,
) -> Tuple[int, str]:
    console = MockConsole(use_colors=False)
    union_membership = UnionMembership({TypecheckRequest: request_types})
    result: Typecheck = run_rule_with_mocks(
        typecheck,
        rule_args=[console, Targets(targets), union_membership],
        mock_gets=[
            MockGet(
                output_type=TypecheckResults,
                input_type=TypecheckRequest,
                mock=lambda field_set_collection: field_set_collection.
                typecheck_results,
            ),
            MockGet(
                output_type=FieldSetsWithSources,
                input_type=FieldSetsWithSourcesRequest,
                mock=lambda field_sets: FieldSetsWithSources(
                    field_sets if include_sources else ()),
            ),
        ],
        union_membership=union_membership,
    )
    assert not console.stdout.getvalue()
    return result.exit_code, console.stderr.getvalue()
Esempio n. 19
0
async def resolve_bsp_build_target_addresses(
    bsp_target: BSPBuildTargetInternal,
    union_membership: UnionMembership,
) -> Targets:
    # NB: Using `RawSpecs` directly rather than `RawSpecsWithoutFileOwners` results in a rule graph cycle.
    targets = await Get(
        Targets,
        RawSpecsWithoutFileOwners,
        RawSpecsWithoutFileOwners.from_raw_specs(bsp_target.specs),
    )
    if bsp_target.definition.resolve_filter is None:
        return targets

    resolve_filter = bsp_target.definition.resolve_filter
    resolve_prefix, matched, resolve_value = resolve_filter.partition(":")
    if not resolve_prefix or not matched:
        raise ValueError(
            f"The `resolve` filter for `{bsp_target}` must have a platform or language specific "
            f"prefix like `$lang:$filter`, but the configured value: `{resolve_filter}` did not."
        )

    # TODO: See `BSPResolveFieldFactoryRequest` re: this awkwardness.
    factories = await MultiGet(
        Get(BSPResolveFieldFactoryResult, BSPResolveFieldFactoryRequest,
            request())
        for request in union_membership.get(BSPResolveFieldFactoryRequest)
        if request.resolve_prefix == resolve_prefix)

    return Targets(t for t in targets
                   if any((factory.resolve_field_value)(t) == resolve_value
                          for factory in factories))
Esempio n. 20
0
async def resolve_targets(
    targets: UnexpandedTargets,
    target_types_to_generate_requests: TargetTypesToGenerateTargetsRequests,
) -> Targets:
    # Replace all generating targets with what it generates. Otherwise, keep it. If a target
    # generator does not generate any targets, keep the target generator.
    expanded_targets: OrderedSet[Target] = OrderedSet()
    generator_targets = []
    generate_gets = []
    for tgt in targets:
        if (target_types_to_generate_requests.is_generator(tgt)
                and not tgt.address.is_generated_target):
            generator_targets.append(tgt)
            generate_request = target_types_to_generate_requests[type(tgt)]
            generate_gets.append(
                Get(GeneratedTargets, GenerateTargetsRequest,
                    generate_request(tgt)))
        else:
            expanded_targets.add(tgt)

    all_generated_targets = await MultiGet(generate_gets)
    expanded_targets.update(tgt for generator, generated_targets in zip(
        generator_targets, all_generated_targets) for tgt in (
            generated_targets.values() if generated_targets else {generator}))
    return Targets(expanded_targets)
Esempio n. 21
0
 def test_adds_missing_inits_and_strips_source_roots(self) -> None:
     target_with_init = self.create_target(
         parent_directory="src/python/project",
         files=["lib.py", "__init__.py"])
     target_without_init = self.create_target(
         parent_directory="src/python/test_project",
         files=["f1.py", "f2.py"])
     files_target = self.create_target(
         parent_directory="src/python/project/resources",
         files=["loose_file.txt"],
         target_cls=Files,
     )
     result = self.request_single_product(
         ImportablePythonSources,
         Params(
             Targets([target_with_init, target_without_init, files_target]),
             create_options_bootstrapper(),
         ),
     )
     assert sorted(result.snapshot.files) == sorted([
         "project/lib.py",
         "project/__init__.py",
         "test_project/f1.py",
         "test_project/f2.py",
         "test_project/__init__.py",
         "src/python/project/resources/loose_file.txt",
     ])
Esempio n. 22
0
def run_black_and_isort(
        rule_runner: RuleRunner,
        targets: list[Target],
        *,
        extra_args: list[str] | None = None) -> LanguageFmtResults:
    fmt_targets = PythonFmtTargets(Targets(targets))
    rule_runner.set_options(
        [
            "--backend-packages=['pants.backend.python.lint.black', 'pants.backend.python.lint.isort']",
            *(extra_args or []),
        ],
        # We propagate LANG and LC_ALL to satisfy click, which black depends upon. Without this we
        # see something like the following in CI:
        #
        # RuntimeError: Click will abort further execution because Python was configured to use
        # ASCII as encoding for the environment. Consult
        # https://click.palletsprojects.com/unicode-support/ for mitigation steps.
        #
        # This system supports the C.UTF-8 locale which is recommended. You might be able to
        # resolve your issue by exporting the following environment variables:
        #
        #     export LC_ALL=C.UTF-8
        #     export LANG=C.UTF-8
        #
        env_inherit={"PATH", "PYENV_ROOT", "HOME", "LANG", "LC_ALL"},
    )
    return rule_runner.request(LanguageFmtResults, [fmt_targets])
Esempio n. 23
0
async def classpath(
    coarsened_targets: CoarsenedTargets,
    union_membership: UnionMembership,
) -> Classpath:
    targets = Targets(t for ct in coarsened_targets.closure()
                      for t in ct.members)

    resolve = await Get(CoursierResolveKey, Targets, targets)

    transitive_classpath_entries = await MultiGet(
        Get(
            ClasspathEntry,
            ClasspathEntryRequest,
            ClasspathEntryRequest.for_targets(
                union_membership, component=t, resolve=resolve),
        ) for t in coarsened_targets.closure())
    merged_transitive_classpath_entries_digest = await Get(
        Digest,
        MergeDigests(classfiles.digest
                     for classfiles in transitive_classpath_entries))

    return Classpath(await Get(
        Snapshot,
        AddPrefix(merged_transitive_classpath_entries_digest,
                  _USERCP_RELPATH)))
Esempio n. 24
0
async def resolve_targets(
    targets: UnexpandedTargets,
    target_types_to_generate_requests: TargetTypesToGenerateTargetsRequests,
) -> Targets:
    # Replace all generating targets with what they generate. Otherwise, keep them. If a target
    # generator does not generate any targets, keep the target generator.
    # TODO: This method does not preserve the order of inputs.
    expanded_targets: OrderedSet[Target] = OrderedSet()
    generator_targets = []
    parametrizations_gets = []
    for tgt in targets:
        if (target_types_to_generate_requests.is_generator(tgt)
                and not tgt.address.is_generated_target):
            generator_targets.append(tgt)
            parametrizations_gets.append(
                Get(
                    _TargetParametrizations,
                    _TargetParametrizationsRequest(
                        tgt.address.maybe_convert_to_target_generator(),
                        description_of_origin="TODO(#14468)",
                    ),
                ))
        else:
            expanded_targets.add(tgt)

    all_generated_targets = await MultiGet(parametrizations_gets)
    expanded_targets.update(
        tgt for generator, parametrizations in zip(generator_targets,
                                                   all_generated_targets)
        for tgt in parametrizations.generated_or_generator(generator.address))
    return Targets(expanded_targets)
Esempio n. 25
0
def run_goal(
    targets: Sequence[Target],
    *,
    target_type: list[str] | None = None,
    address_regex: list[str] | None = None,
    tag_regex: list[str] | None = None,
    granularity: TargetGranularity | None = None,
) -> str:
    with mock_console(create_options_bootstrapper()) as (console,
                                                         stdio_reader):
        run_rule_with_mocks(
            filter_targets,
            rule_args=[
                Targets(targets),
                create_goal_subsystem(
                    FilterSubsystem,
                    sep="\\n",
                    output_file=None,
                    target_type=target_type or [],
                    address_regex=address_regex or [],
                    tag_regex=tag_regex or [],
                    granularity=granularity or TargetGranularity.all_targets,
                    # Deprecated.
                    type=[],
                    target=[],
                    regex=[],
                    ancestor=[],
                ),
                console,
                RegisteredTargetTypes.create({type(tgt)
                                              for tgt in targets}),
            ],
        )
        assert not stdio_reader.get_stderr()
        return stdio_reader.get_stdout()
Esempio n. 26
0
async def get_jvm_targets_by_resolve_name(
    all_targets: AllTargets,
    jvm: JvmSubsystem,
) -> JvmTargetsByResolveName:
    # Get all targets that depend on JVM resolves

    targets = [tgt for tgt in all_targets if tgt.has_field(JvmCompatibleResolveNamesField)]

    default_resolve: str | None = jvm.options.default_resolve

    # TODO: simplify this with Py3.9 walrus operator
    flat_targets_ = ((tgt, tgt[JvmCompatibleResolveNamesField].value) for tgt in targets)
    flat_targets__ = (
        (
            tgt,
            names
            if names is not None
            else (default_resolve,)
            if default_resolve is not None
            else None,
        )
        for (tgt, names) in flat_targets_
    )
    flat_targets = [
        (name, tgt) for (tgt, names) in flat_targets__ if names is not None for name in names
    ]

    targets_by_resolve_name = {
        i: Targets(k[1] for k in j) for (i, j) in groupby(flat_targets, lambda x: x[0])
    }

    return JvmTargetsByResolveName(targets_by_resolve_name)
Esempio n. 27
0
    def __init__(
        self,
        target_setups: Iterable[PylintTargetSetup],
        interpreter_constraints: PexInterpreterConstraints,
        plugin_targets: Iterable[Target],
    ) -> None:
        field_sets = []
        targets_with_deps: List[Target] = []
        for target_setup in target_setups:
            field_sets.append(target_setup.field_set)
            targets_with_deps.extend(target_setup.target_with_dependencies)

        self.field_sets = tuple(field_sets)
        self.targets_with_dependencies = Targets(targets_with_deps)
        self.interpreter_constraints = interpreter_constraints
        self.plugin_targets = Targets(plugin_targets)
Esempio n. 28
0
    def assert_sources(
        self,
        expected_files,
        expected_packages,
        expected_namespace_packages,
        expected_package_data,
        addrs,
    ):
        srcs = self.request_single_product(
            SetupPySources,
            Params(
                SetupPySourcesRequest(Targets(
                    [self.tgt(addr) for addr in addrs]),
                                      py2=False),
                create_options_bootstrapper(
                    args=["--source-root-patterns=src/python"]),
            ),
        )
        chroot_snapshot = self.request_single_product(Snapshot,
                                                      Params(srcs.digest))

        assert sorted(expected_files) == sorted(chroot_snapshot.files)
        assert sorted(expected_packages) == sorted(srcs.packages)
        assert sorted(expected_namespace_packages) == sorted(
            srcs.namespace_packages)
        assert expected_package_data == dict(srcs.package_data)
Esempio n. 29
0
async def dependencies(
        console: Console, addresses: Addresses,
        dependencies_subsystem: DependenciesSubsystem) -> Dependencies:
    if dependencies_subsystem.transitive:
        transitive_targets = await Get(
            TransitiveTargets,
            TransitiveTargetsRequest(addresses,
                                     include_special_cased_deps=True))
        targets = Targets(transitive_targets.dependencies)
    else:
        target_roots = await Get(UnexpandedTargets, Addresses, addresses)
        dependencies_per_target_root = await MultiGet(
            Get(
                Targets,
                DependenciesRequest(tgt.get(DependenciesField),
                                    include_special_cased_deps=True),
            ) for tgt in target_roots)
        targets = Targets(
            itertools.chain.from_iterable(dependencies_per_target_root))

    include_source = dependencies_subsystem.type in [
        DependencyType.SOURCE,
        DependencyType.SOURCE_AND_THIRD_PARTY,
    ]
    include_3rdparty = dependencies_subsystem.type in [
        DependencyType.THIRD_PARTY,
        DependencyType.SOURCE_AND_THIRD_PARTY,
    ]

    address_strings = set()
    third_party_requirements: Set[str] = set()
    for tgt in targets:
        if include_source:
            address_strings.add(tgt.address.spec)
        if include_3rdparty:
            if tgt.has_field(PythonRequirementsField):
                third_party_requirements.update(
                    str(python_req)
                    for python_req in tgt[PythonRequirementsField].value)

    with dependencies_subsystem.line_oriented(console) as print_stdout:
        for address in sorted(address_strings):
            print_stdout(address)
        for requirement_string in sorted(third_party_requirements):
            print_stdout(requirement_string)

    return Dependencies(exit_code=0)
Esempio n. 30
0
async def run_repl(
    console: Console,
    workspace: Workspace,
    runner: InteractiveRunner,
    options: ReplOptions,
    transitive_targets: TransitiveTargets,
    build_root: BuildRoot,
    union_membership: UnionMembership,
    global_options: GlobalOptions,
) -> Repl:

    # We can guarantee that we will only even enter this `goal_rule` if there exists an implementer
    # of the `ReplImplementation` union because `LegacyGraphSession.run_goal_rules()` will not
    # execute this rule's body if there are no implementations registered.
    membership: Iterable[Type[
        ReplImplementation]] = union_membership.union_rules[ReplImplementation]
    implementations = {impl.name: impl for impl in membership}

    default_repl = "python"
    repl_shell_name = cast(str, options.values.shell or default_repl)

    repl_implementation_cls = implementations.get(repl_shell_name)
    if repl_implementation_cls is None:
        available = sorted(set(implementations.keys()))
        console.write_stdout(
            f"{repl_shell_name} is not an installed REPL program. Available REPLs: {available}"
        )
        return Repl(-1)

    repl_impl = repl_implementation_cls(targets=Targets(
        tgt for tgt in transitive_targets.closure
        if repl_implementation_cls.is_valid(tgt)))
    repl_binary = await Get[ReplBinary](ReplImplementation, repl_impl)

    with temporary_dir(root_dir=global_options.options.pants_workdir,
                       cleanup=False) as tmpdir:
        path_relative_to_build_root = PurePath(tmpdir).relative_to(
            build_root.path).as_posix()
        workspace.materialize_directory(
            DirectoryToMaterialize(repl_binary.digest,
                                   path_prefix=path_relative_to_build_root))

        full_path = PurePath(tmpdir, repl_binary.binary_name).as_posix()
        run_request = InteractiveProcessRequest(
            argv=(full_path, ),
            run_in_workspace=True,
        )

    result = runner.run_local_interactive_process(run_request)
    exit_code = result.process_exit_code

    if exit_code == 0:
        console.write_stdout("REPL exited successfully.")
    else:
        console.write_stdout(f"REPL exited with error: {exit_code}.")

    return Repl(exit_code)