def test_user_repo_order_is_respected(rule_runner: RuleRunner) -> None: """Tests that the repo resolution order issue found in #14577 is avoided.""" jai_core = Coordinate(group="javax.media", artifact="jai_core", version="1.1.3") # `repo1.maven.org` has a bogus POM that Coursier hits first # `repo.osgeo.org` has a valid POM and should succeed rule_runner.set_options( args=[ """--coursier-repos=['https://repo1.maven.org/maven2', 'https://repo.osgeo.org/repository/release']""" ], env_inherit=PYTHON_BOOTSTRAP_ENV, ) with engine_error(ProcessExecutionFailure): rule_runner.request( CoursierResolvedLockfile, [ ArtifactRequirements.from_coordinates([jai_core]), ], ) rule_runner.set_options( args=[ """--coursier-repos=['https://repo.osgeo.org/repository/release', 'https://repo1.maven.org/maven2']""" ], env_inherit=PYTHON_BOOTSTRAP_ENV, ) rule_runner.request( CoursierResolvedLockfile, [ ArtifactRequirements.from_coordinates([jai_core]), ], )
def make_target( rule_runner: RuleRunner, source_files: List[FileContent], *, package: Optional[str] = None, name: str = "target", interpreter_constraints: Optional[str] = None, ) -> Target: if not package: package = PACKAGE for source_file in source_files: rule_runner.create_file(source_file.path, source_file.content.decode()) source_globs = [PurePath(source_file.path).name for source_file in source_files] rule_runner.add_to_build_file( f"{package}", dedent( f"""\ python_library( name={repr(name)}, sources={source_globs}, interpreter_constraints={[interpreter_constraints] if interpreter_constraints else None}, ) """ ), ) rule_runner.set_options(GLOBAL_ARGS) return rule_runner.get_target(Address(package, target_name=name))
def test_determine_shell_runner(rule_runner: RuleRunner) -> None: addr = Address("", target_name="t") fc = FileContent("tests.sh", b"#!/usr/bin/env sh") rule_runner.set_options([], env_inherit={"PATH"}) # If `shell` field is not set, read the shebang. result = rule_runner.request( Shunit2Runner, [Shunit2RunnerRequest(addr, fc, Shunit2ShellField(None, addr))]) assert result.shell == Shunit2Shell.sh # The `shell` field overrides the shebang. result = rule_runner.request( Shunit2Runner, [Shunit2RunnerRequest(addr, fc, Shunit2ShellField("bash", addr))]) assert result.shell == Shunit2Shell.bash # Error if not set. with pytest.raises(ExecutionError) as exc: rule_runner.request( Shunit2Runner, [ Shunit2RunnerRequest(addr, FileContent("tests.sh", b""), Shunit2ShellField(None, addr)) ], ) assert f"Could not determine which shell to use to run shunit2 on {addr}" in str( exc.value)
def rule_runner() -> RuleRunner: rule_runner = RuleRunner( rules=[ *config_files.rules(), *coursier_fetch_rules(), *coursier_setup_rules(), *dep_inference_rules(), *external_tool_rules(), *scala_parser.rules(), *symbol_mapper.rules(), *scala_target_rules(), *source_files.rules(), *util_rules(), *jdk_rules(), QueryRule(Addresses, [DependenciesRequest]), QueryRule(ExplicitlyProvidedDependencies, [DependenciesRequest]), QueryRule(InferredDependencies, [InferScalaSourceDependencies]), QueryRule(Targets, [UnparsedAddressInputs]), ], target_types=[ScalaSourcesGeneratorTarget], ) rule_runner.set_options( args=[NAMED_RESOLVE_OPTIONS, DEFAULT_RESOLVE_OPTION], env_inherit=PYTHON_BOOTSTRAP_ENV ) return rule_runner
def test_protobuf_mapping(rule_runner: RuleRunner) -> None: rule_runner.set_options( ["--source-root-patterns=['root1', 'root2', 'root3']"]) rule_runner.write_files({ # Two proto files belonging to the same target. We should use two file addresses. "root1/protos/f1.proto": "", "root1/protos/f2.proto": "", "root1/protos/BUILD": "protobuf_library()", # These protos would result in the same stripped file name, so they are ambiguous. "root1/two_owners/f.proto": "", "root1/two_owners/BUILD": "protobuf_library()", "root2/two_owners/f.proto": "", "root2/two_owners/BUILD": "protobuf_library()", }) result = rule_runner.request(ProtobufMapping, []) assert result == ProtobufMapping( mapping=FrozenDict({ "protos/f1.proto": Address("root1/protos", relative_file_path="f1.proto"), "protos/f2.proto": Address("root1/protos", relative_file_path="f2.proto"), }), ambiguous_modules=FrozenDict({ "two_owners/f.proto": ( Address("root1/two_owners", relative_file_path="f.proto"), Address("root2/two_owners", relative_file_path="f.proto"), ) }), )
def run_pytest( rule_runner: RuleRunner, test_target: Target, *, extra_args: list[str] | None = None, env: dict[str, str] | None = None, ) -> TestResult: # pytest-html==1.22.1 has an undeclared dep on setuptools. This, unfortunately, # is the most recent version of pytest-html that works with the low version of # pytest that we pin to. plugins = [ "zipp==1.0.0", "pytest-cov>=2.8.1,<2.9", "pytest-html==1.22.1", "setuptools" ] plugins_str = "['" + "', '".join(plugins) + "']" args = [ "--backend-packages=pants.backend.python", f"--source-root-patterns={SOURCE_ROOT}", # pin to lower versions so that we can run Python 2 tests "--pytest-version=pytest>=4.6.6,<4.7", f"--pytest-pytest-plugins={plugins_str}", *(extra_args or ()), ] rule_runner.set_options(args, env=env, env_inherit={"PATH", "PYENV_ROOT", "HOME"}) inputs = [PythonTestFieldSet.create(test_target)] test_result = rule_runner.request(TestResult, inputs) debug_request = rule_runner.request(TestDebugRequest, inputs) if debug_request.process is not None: with mock_console(rule_runner.options_bootstrapper): debug_result = InteractiveRunner(rule_runner.scheduler).run( debug_request.process) assert test_result.exit_code == debug_result.exit_code return test_result
def test_map_first_party_modules_to_addresses(rule_runner: RuleRunner) -> None: rule_runner.set_options( ["--source-root-patterns=['root1', 'root2', 'root3']"]) # Two proto files belonging to the same target. We should use two file addresses. rule_runner.create_files("root1/protos", ["f1.proto", "f2.proto"]) rule_runner.add_to_build_file("root1/protos", "protobuf_library()") # These protos would result in the same module name, so neither should be used. rule_runner.create_file("root1/two_owners/f.proto") rule_runner.add_to_build_file("root1/two_owners", "protobuf_library()") rule_runner.create_file("root2/two_owners/f.proto") rule_runner.add_to_build_file("root2/two_owners", "protobuf_library()") # A file with grpc. This also uses the `python_source_root` mechanism, which should be # irrelevant to the module mapping because we strip source roots. rule_runner.create_file("root1/tests/f.proto") rule_runner.add_to_build_file( "root1/tests", "protobuf_library(grpc=True, python_source_root='root3')") result = rule_runner.request(FirstPartyPythonMappingImpl, [PythonProtobufMappingMarker()]) assert result == FirstPartyPythonMappingImpl({ "protos.f1_pb2": (Address("root1/protos", relative_file_path="f1.proto"), ), "protos.f2_pb2": (Address("root1/protos", relative_file_path="f2.proto"), ), "tests.f_pb2": (Address("root1/tests", relative_file_path="f.proto"), ), "tests.f_pb2_grpc": (Address("root1/tests", relative_file_path="f.proto"), ), })
def rule_runner() -> RuleRunner: rule_runner = RuleRunner( rules=[ *scala_lockfile_rules(), *scala_dep_inf_rules.rules(), *jdk_rules.rules(), *coursier_fetch_rules(), *coursier_jvm_tool_rules(), *lockfile.rules(), *coursier_setup_rules(), *external_tool.rules(), *source_files.rules(), *util_rules(), *system_binaries.rules(), *graph.rules(), *build_files.rules(), *target_types.rules(), QueryRule(UserGenerateLockfiles, (RequestedJVMUserResolveNames,)), QueryRule(GenerateLockfileResult, (GenerateJvmLockfile,)), ], target_types=[JvmArtifactTarget, ScalaSourceTarget, ScalaSourcesGeneratorTarget], ) rule_runner.set_options( [ '--scala-version-for-resolve={"foo":"2.13.8"}', '--jvm-resolves={"foo": "foo/foo.lock"}', ], env_inherit={"PATH"}, ) return rule_runner
def test_find_thrift_python_requirement(rule_runner: RuleRunner) -> None: rule_runner.write_files({"codegen/dir/f.thrift": "", "codegen/dir/BUILD": "thrift_sources()"}) rule_runner.set_options( ["--python-resolves={'python-default': '', 'another': ''}", "--python-enable-resolves"] ) thrift_tgt = rule_runner.get_target(Address("codegen/dir", relative_file_path="f.thrift")) request = InjectApacheThriftPythonDependencies(thrift_tgt[Dependencies]) # Start with no relevant requirements. with engine_error(MissingPythonCodegenRuntimeLibrary): rule_runner.request(InjectedDependencies, [request]) # If exactly one, match it. rule_runner.write_files({"reqs1/BUILD": "python_requirement(requirements=['thrift'])"}) assert rule_runner.request(InjectedDependencies, [request]) == InjectedDependencies( [Address("reqs1")] ) # Multiple is fine if from other resolve. rule_runner.write_files( {"another_resolve/BUILD": "python_requirement(requirements=['thrift'], resolve='another')"} ) assert rule_runner.request(InjectedDependencies, [request]) == InjectedDependencies( [Address("reqs1")] ) # If multiple from the same resolve, error. rule_runner.write_files({"reqs2/BUILD": "python_requirement(requirements=['thrift'])"}) with engine_error( AmbiguousPythonCodegenRuntimeLibrary, contains="['reqs1:reqs1', 'reqs2:reqs2']" ): rule_runner.request(InjectedDependencies, [request])
def run_black_and_isort( rule_runner: RuleRunner, targets: list[Target], *, extra_args: list[str] | None = None) -> LanguageFmtResults: fmt_targets = PythonFmtTargets(Targets(targets)) rule_runner.set_options( [ "--backend-packages=['pants.backend.python.lint.black', 'pants.backend.python.lint.isort']", *(extra_args or []), ], # We propagate LANG and LC_ALL to satisfy click, which black depends upon. Without this we # see something like the following in CI: # # RuntimeError: Click will abort further execution because Python was configured to use # ASCII as encoding for the environment. Consult # https://click.palletsprojects.com/unicode-support/ for mitigation steps. # # This system supports the C.UTF-8 locale which is recommended. You might be able to # resolve your issue by exporting the following environment variables: # # export LC_ALL=C.UTF-8 # export LANG=C.UTF-8 # env_inherit={"PATH", "PYENV_ROOT", "HOME", "LANG", "LC_ALL"}, ) return rule_runner.request(LanguageFmtResults, [fmt_targets])
def rule_runner() -> RuleRunner: rule_runner = RuleRunner( rules=[ *config_files.rules(), *coursier_fetch_rules(), *coursier_rules(), *coursier_setup_rules(), *external_tool_rules(), *java_dep_inf_rules(), *javac_rules(), *jdk_rules.rules(), *scalac_rules(), *source_files.rules(), *scala_target_types_rules(), *java_target_types_rules(), *util_rules(), *testutil.rules(), QueryRule(CoarsenedTargets, (Addresses, )), QueryRule(ClasspathEntry, (CompileJavaSourceRequest, )), QueryRule(ClasspathEntry, (CompileScalaSourceRequest, )), QueryRule(FallibleClasspathEntry, (CompileJavaSourceRequest, )), QueryRule(FallibleClasspathEntry, (CompileScalaSourceRequest, )), ], target_types=[ScalaSourcesGeneratorTarget, JavaSourcesGeneratorTarget], ) rule_runner.set_options( args=[ NAMED_RESOLVE_OPTIONS, DEFAULT_RESOLVE_OPTION, ], env_inherit=PYTHON_BOOTSTRAP_ENV, ) return rule_runner
def create_python_awslambda( rule_runner: RuleRunner, addr: Address, *, extra_args: list[str] | None = None) -> tuple[str, bytes]: rule_runner.set_options( [ "--backend-packages=pants.backend.awslambda.python", "--source-root-patterns=src/python", *(extra_args or ()), ], env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) target = rule_runner.get_target(addr) built_asset = rule_runner.request(BuiltPackage, [PythonAwsLambdaFieldSet.create(target)]) assert ( " Runtime: python3.7", " Handler: lambdex_handler.handler", ) == built_asset.artifacts[0].extra_log_lines digest_contents = rule_runner.request(DigestContents, [built_asset.digest]) assert len(digest_contents) == 1 relpath = built_asset.artifacts[0].relpath assert relpath is not None return relpath, digest_contents[0].content
def run_pyupgrade( rule_runner: RuleRunner, targets: list[Target], *, extra_args: list[str] | None = None, pyupgrade_arg: str = "--py36-plus", ) -> tuple[tuple[LintResult, ...], FmtResult]: rule_runner.set_options( [ "--backend-packages=pants.backend.python.lint.pyupgrade", f'--pyupgrade-args="{pyupgrade_arg}"', *(extra_args or ()), ], env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) field_sets = [PyUpgradeFieldSet.create(tgt) for tgt in targets] lint_results = rule_runner.request(LintResults, [PyUpgradeRequest(field_sets)]) input_sources = rule_runner.request( SourceFiles, [ SourceFilesRequest(field_set.source for field_set in field_sets), ], ) fmt_result = rule_runner.request( FmtResult, [ PyUpgradeRequest(field_sets, prior_formatter_result=input_sources.snapshot), ], ) return lint_results.results, fmt_result
def rule_runner() -> RuleRunner: rule_runner = RuleRunner( rules=[ *classpath_rules(), *jvm_tool.rules(), *deploy_jar_rules(), *javac_rules(), *jdk_rules.rules(), *java_dep_inf_rules(), *target_types_rules(), *util_rules(), QueryRule(BashBinary, ()), QueryRule(InternalJdk, ()), QueryRule(BuiltPackage, (DeployJarFieldSet,)), QueryRule(ProcessResult, (JvmProcess,)), QueryRule(ProcessResult, (Process,)), ], target_types=[ JavaSourcesGeneratorTarget, JvmArtifactTarget, DeployJarTarget, ], ) rule_runner.set_options(args=[], env_inherit=PYTHON_BOOTSTRAP_ENV) return rule_runner
def rule_runner() -> RuleRunner: rule_runner = RuleRunner( rules=[ *config_files.rules(), *jvm_tool.rules(), *dep_inference_rules(), *java_target_rules(), *java_util_rules(), *javac_rules(), *junit_rules(), *source_files.rules(), *util_rules(), QueryRule(Addresses, [DependenciesRequest]), QueryRule(ExplicitlyProvidedDependencies, [DependenciesRequest]), QueryRule(InferredDependencies, [InferJavaSourceDependencies]), QueryRule(JavaInferredDependencies, [JavaInferredDependenciesAndExportsRequest]), QueryRule(Targets, [UnparsedAddressInputs]), ], target_types=[ JavaSourcesGeneratorTarget, JunitTestsGeneratorTarget, JvmArtifactTarget ], ) rule_runner.set_options(args=[], env_inherit=PYTHON_BOOTSTRAP_ENV) return rule_runner
def run_clangformat( rule_runner: RuleRunner, targets: list[Target], *, extra_args: list[str] | None = None, ) -> FmtResult: rule_runner.set_options( [ "--backend-packages=pants.backend.cc.lint.clangformat", *(extra_args or ()) ], env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) field_sets = [ClangFormatFmtFieldSet.create(tgt) for tgt in targets] input_sources = rule_runner.request( SourceFiles, [ SourceFilesRequest(field_set.sources for field_set in field_sets), ], ) fmt_result = rule_runner.request( FmtResult, [ ClangFormatRequest(field_sets, snapshot=input_sources.snapshot), ], ) return fmt_result
def run_tffmt( rule_runner: RuleRunner, targets: List[Target], *, skip: bool = False, ) -> Tuple[Sequence[LintResult], FmtResult]: args = [ "--backend-packages=pants.backend.experimental.terraform", "--backend-packages=pants.backend.experimental.terraform.lint.tffmt", ] if skip: args.append("--terraform-fmt-skip") rule_runner.set_options(args) field_sets = [TerraformFieldSet.create(tgt) for tgt in targets] lint_results = rule_runner.request(LintResults, [TffmtRequest(field_sets)]) input_sources = rule_runner.request( SourceFiles, [ SourceFilesRequest(field_set.sources for field_set in field_sets), ], ) fmt_result = rule_runner.request( FmtResult, [ TffmtRequest(field_sets, prior_formatter_result=input_sources.snapshot), ], ) return lint_results.results, fmt_result
def rule_runner() -> RuleRunner: rule_runner = RuleRunner( rules=[ *avro_rules(), *avro_java_rules(), *config_files.rules(), *classpath.rules(), *coursier_fetch_rules(), *coursier_setup_rules(), *external_tool_rules(), *source_files.rules(), *util_rules(), *jdk_rules(), *graph.rules(), *jvm_compile_rules(), *stripped_source_files.rules(), QueryRule(HydratedSources, [HydrateSourcesRequest]), QueryRule(GeneratedSources, [GenerateJavaFromAvroRequest]), ], target_types=[ JavaSourceTarget, JavaSourcesGeneratorTarget, AvroSourcesGeneratorTarget, ], ) rule_runner.set_options( [], env_inherit=PYTHON_BOOTSTRAP_ENV, ) return rule_runner
def _run_publish( rule_runner: RuleRunner, address: Address, metadata: HelmChartMetadata, *, registries: dict | None = None, default_repo: str | None = None, ) -> tuple[PublishProcesses, HelmBinary]: opts: dict[str, str] = {} opts.setdefault("--helm-registries", "{}") if registries: opts["--helm-registries"] = repr(registries) if default_repo: opts["--helm-default-registry-repository"] = default_repo rule_runner.set_options([f"{key}={value}" for key, value in opts.items()]) target = cast(HelmChartTarget, rule_runner.get_target(address)) field_set = HelmPublishFieldSet.create(target) packages = _build(metadata) result = rule_runner.request(PublishProcesses, [field_set._request(packages)]) helm = rule_runner.request(HelmBinary, []) return result, helm
def run_black( rule_runner: RuleRunner, targets: list[Target], *, extra_args: list[str] | None = None ) -> FmtResult: rule_runner.set_options( ["--backend-packages=pants.backend.python.lint.black", *(extra_args or ())], # We propagate LANG and LC_ALL to satisfy click, which black depends upon. Without this we # see something like the following in CI: # # RuntimeError: Click will abort further execution because Python was configured to use # ASCII as encoding for the environment. Consult # https://click.palletsprojects.com/unicode-support/ for mitigation steps. # # This system supports the C.UTF-8 locale which is recommended. You might be able to # resolve your issue by exporting the following environment variables: # # export LC_ALL=C.UTF-8 # export LANG=C.UTF-8 # env_inherit={"PATH", "PYENV_ROOT", "HOME", "LANG", "LC_ALL"}, ) field_sets = [BlackFieldSet.create(tgt) for tgt in targets] input_sources = rule_runner.request( SourceFiles, [ SourceFilesRequest(field_set.source for field_set in field_sets), ], ) fmt_result = rule_runner.request( FmtResult, [ BlackRequest(field_sets, snapshot=input_sources.snapshot), ], ) return fmt_result
def rule_runner() -> RuleRunner: rule_runner = RuleRunner( rules=[ *classpath_rules(), *coursier_fetch_rules(), *coursier_setup_rules(), *deploy_jar_rules(), *javac_rules(), *jdk_rules.rules(), *java_dep_inf_rules(), *target_types_rules(), *util_rules(), QueryRule(BashBinary, ()), QueryRule(BuiltPackage, (DeployJarFieldSet, )), QueryRule(JdkSetup, ()), QueryRule(ProcessResult, (Process, )), ], target_types=[ JvmDependencyLockfile, JavaSourcesGeneratorTarget, JvmArtifact, DeployJar, ], ) rule_runner.set_options( args=[ '--jvm-resolves={"test": "coursier_resolve.lockfile"}', "--jvm-default-resolve=test" ], env_inherit=PYTHON_BOOTSTRAP_ENV, ) return rule_runner
def run_pylint( rule_runner: RuleRunner, targets: List[Target], *, config: Optional[str] = None, passthrough_args: Optional[str] = None, skip: bool = False, additional_args: Optional[List[str]] = None, ) -> Sequence[LintResult]: args = list(GLOBAL_ARGS) if config: rule_runner.create_file(relpath="pylintrc", contents=config) args.append("--pylint-config=pylintrc") if passthrough_args: args.append(f"--pylint-args='{passthrough_args}'") if skip: args.append("--pylint-skip") if additional_args: args.extend(additional_args) rule_runner.set_options(args, env_inherit={"PATH", "PYENV_ROOT", "HOME"}) results = rule_runner.request( LintResults, [PylintRequest(PylintFieldSet.create(tgt) for tgt in targets)], ) return results.results
def assert_injected( rule_runner: RuleRunner, *, source_roots: List[str], original_declared_files: List[str], original_undeclared_files: List[str], expected_discovered: List[str], ) -> None: rule_runner.set_options([f"--source-root-patterns={source_roots}"]) for f in original_undeclared_files: rule_runner.create_file(f, "# undeclared") request = AncestorFilesRequest( "__init__.py", rule_runner.make_snapshot({fp: "# declared" for fp in original_declared_files}), ) result = rule_runner.request(AncestorFiles, [request]).snapshot assert list(result.files) == sorted(expected_discovered) materialized_result = rule_runner.request(DigestContents, [result.digest]) for file_content in materialized_result: path = file_content.path if not path.endswith("__init__.py"): continue assert path in original_declared_files or path in expected_discovered expected = b"# declared" if path in original_declared_files else b"# undeclared" assert file_content.content == expected
def rule_runner() -> RuleRunner: rule_runner = RuleRunner( rules=[ *coursier_fetch_rules(), *jdk_rules.rules(), *scalac_check_rules(), *scalac_rules(), *source_files.rules(), *target_types_rules(), *testutil.rules(), *util_rules(), *scala_dep_inf_rules(), QueryRule(CheckResults, (ScalacCheckRequest, )), QueryRule(CoarsenedTargets, (Addresses, )), QueryRule(FallibleClasspathEntry, (CompileScalaSourceRequest, )), QueryRule(RenderedClasspath, (CompileScalaSourceRequest, )), QueryRule(ClasspathEntry, (CompileScalaSourceRequest, )), ], target_types=[ JvmArtifactTarget, ScalaSourcesGeneratorTarget, ScalacPluginTarget ], ) rule_runner.set_options( args=["--scala-version-for-resolve={'jvm-default':'2.13.8'}"], env_inherit=PYTHON_BOOTSTRAP_ENV, ) return rule_runner
def run_mypy( rule_runner: RuleRunner, targets: List[Target], *, config: Optional[str] = None, passthrough_args: Optional[str] = None, skip: bool = False, additional_args: Optional[List[str]] = None, ) -> Sequence[TypecheckResult]: args = list(GLOBAL_ARGS) if config: rule_runner.create_file(relpath="mypy.ini", contents=config) args.append("--mypy-config=mypy.ini") if passthrough_args: args.append(f"--mypy-args='{passthrough_args}'") if skip: args.append("--mypy-skip") if additional_args: args.extend(additional_args) rule_runner.set_options(args, env_inherit={"PATH", "PYENV_ROOT", "HOME"}) result = rule_runner.request( TypecheckResults, [MyPyRequest(MyPyFieldSet.create(tgt) for tgt in targets)], ) return result.results
def assert_files_generated( rule_runner: RuleRunner, address: Address, *, expected_files: list[str], source_roots: list[str], mypy: bool = False, extra_args: list[str] | None = None, ) -> None: args = [ f"--source-root-patterns={repr(source_roots)}", "--no-python-protobuf-infer-runtime-dependency", *(extra_args or ()), ] if mypy: args.append("--python-protobuf-mypy-plugin") rule_runner.set_options(args, env_inherit={"PATH", "PYENV_ROOT", "HOME"}) tgt = rule_runner.get_target(address) protocol_sources = rule_runner.request( HydratedSources, [HydrateSourcesRequest(tgt[ProtobufSourceField])]) generated_sources = rule_runner.request( GeneratedSources, [GeneratePythonFromProtobufRequest(protocol_sources.snapshot, tgt)], ) assert set(generated_sources.snapshot.files) == set(expected_files)
def run_shfmt( rule_runner: RuleRunner, targets: list[Target], *, extra_args: list[str] | None = None, ) -> tuple[tuple[LintResult, ...], FmtResult]: rule_runner.set_options( ["--backend-packages=pants.backend.shell.lint.shfmt", *(extra_args or ())], env_inherit={"PATH"}, ) field_sets = [ShfmtFieldSet.create(tgt) for tgt in targets] lint_results = rule_runner.request(LintResults, [ShfmtRequest(field_sets)]) input_sources = rule_runner.request( SourceFiles, [ SourceFilesRequest(field_set.sources for field_set in field_sets), ], ) fmt_result = rule_runner.request( FmtResult, [ ShfmtRequest(field_sets, prior_formatter_result=input_sources.snapshot), ], ) return lint_results.results, fmt_result
def test_raise_error_when_unknown_dependency_is_found( rule_runner: RuleRunner) -> None: rule_runner.write_files({ "src/foo/BUILD": """helm_chart()""", "src/foo/Chart.yaml": dedent("""\ apiVersion: v2 name: foo version: 0.1.0 dependencies: - name: bar """), }) source_root_patterns = ("/src/*", ) rule_runner.set_options( [f"--source-root-patterns={repr(source_root_patterns)}"]) tgt = rule_runner.get_target(Address("src/foo", target_name="foo")) with pytest.raises( ExecutionError, match="Can not find any declared artifact for dependency 'bar'"): rule_runner.request( InferredDependencies, [InferHelmChartDependenciesRequest(tgt[HelmChartMetaSourceField])])
def run_black( rule_runner: RuleRunner, targets: List[Target], *, config: Optional[str] = None, passthrough_args: Optional[str] = None, skip: bool = False, ) -> Tuple[Sequence[LintResult], FmtResult]: args = ["--backend-packages=pants.backend.python.lint.black"] if config is not None: rule_runner.create_file(relpath="pyproject.toml", contents=config) args.append("--black-config=pyproject.toml") if passthrough_args: args.append(f"--black-args='{passthrough_args}'") if skip: args.append("--black-skip") rule_runner.set_options(args) field_sets = [BlackFieldSet.create(tgt) for tgt in targets] lint_results = rule_runner.request(LintResults, [BlackRequest(field_sets)]) input_sources = rule_runner.request( SourceFiles, [ SourceFilesRequest(field_set.sources for field_set in field_sets), ], ) fmt_result = rule_runner.request( FmtResult, [ BlackRequest(field_sets, prior_formatter_result=input_sources.snapshot), ], ) return lint_results.results, fmt_result
def test_map_module_considers_resolves(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "BUILD": dedent("""\ # Note that both `python_requirements` have the same `dep`, which would normally # result in ambiguity. python_requirement( name="dep1", resolve="a", requirements=["dep"], ) python_requirement( name="dep2", resolve="b", requirements=["dep"], ) """) }) rule_runner.set_options( ["--python-resolves={'a': '', 'b': ''}", "--python-enable-resolves"]) def get_owners(resolve: str | None) -> PythonModuleOwners: return rule_runner.request(PythonModuleOwners, [PythonModuleOwnersRequest("dep", resolve)]) assert get_owners("a").unambiguous == (Address("", target_name="dep1"), ) assert get_owners("b").unambiguous == (Address("", target_name="dep2"), ) assert get_owners(None).ambiguous == ( Address("", target_name="dep1"), Address("", target_name="dep2"), )