def test_report_file(rule_runner: RuleRunner) -> None: target = make_target(rule_runner, [BAD_SOURCE]) result = run_bandit(rule_runner, [target], additional_args=["--lint-reports-dir='.'"]) assert len(result) == 1 assert result[0].exit_code == 1 assert result[0].stdout.strip() == "" assert result[0].report is not None report_files = rule_runner.request(DigestContents, [result[0].report.digest]) assert len(report_files) == 1 assert ("Issue: [B303:blacklist] Use of insecure MD2, MD4, MD5" in report_files[0].content.decode())
def test_gather_scalafmt_config_files(rule_runner: RuleRunner) -> None: rule_runner.write_files({ SCALAFMT_CONF_FILENAME: "", f"foo/bar/{SCALAFMT_CONF_FILENAME}": "", f"hello/{SCALAFMT_CONF_FILENAME}": "", "hello/Foo.scala": "", "hello/world/Foo.scala": "", "foo/bar/Foo.scala": "", "foo/bar/xyyzzy/Foo.scala": "", "foo/blah/Foo.scala": "", }) snapshot = rule_runner.request(Snapshot, [PathGlobs(["**/*.scala"])]) request = rule_runner.request(ScalafmtConfigFiles, [GatherScalafmtConfigFilesRequest(snapshot)]) assert sorted(request.source_dir_to_config_file.items()) == [ ("foo/bar", "foo/bar/.scalafmt.conf"), ("foo/bar/xyyzzy", "foo/bar/.scalafmt.conf"), ("foo/blah", ".scalafmt.conf"), ("hello", "hello/.scalafmt.conf"), ("hello/world", "hello/.scalafmt.conf"), ]
def _run_setup_py( rule_runner: RuleRunner, plugin: str, version: Optional[str], setup_py_args: Iterable[str], install_dir: str, ) -> None: pex_obj = _create_pex(rule_runner) setup_py_file = FileContent( "setup.py", dedent( f""" from setuptools import setup setup(name="{plugin}", version="{version or DEFAULT_VERSION}") """ ).encode(), ) source_digest = rule_runner.request( Digest, [CreateDigest([setup_py_file])], ) merged_digest = rule_runner.request(Digest, [MergeDigests([pex_obj.digest, source_digest])]) process = Process( argv=("python", "setup-py-runner.pex", "setup.py") + tuple(setup_py_args), # We reasonably expect there to be a python interpreter on the test-running # process's path. env={"PATH": os.getenv("PATH", "")}, input_digest=merged_digest, description="Run setup.py", output_directories=("dist/",), ) result = rule_runner.request(ProcessResult, [process]) result_snapshot = rule_runner.request(Snapshot, [result.output_digest]) rule_runner.scheduler.write_digest(result.output_digest, path_prefix="output") safe_mkdir(install_dir) for path in result_snapshot.files: shutil.copy(PurePath(rule_runner.build_root, "output", path), install_dir)
def assert_files_generated( rule_runner: RuleRunner, address: Address, *, expected_files: list[str], source_roots: list[str], mypy: bool = False, extra_args: list[str] | None = None, ) -> None: args = [f"--source-root-patterns={repr(source_roots)}", *(extra_args or ())] if mypy: args.append("--python-protobuf-mypy-plugin") rule_runner.set_options(args, env_inherit={"PATH", "PYENV_ROOT", "HOME"}) tgt = rule_runner.get_target(address) protocol_sources = rule_runner.request( HydratedSources, [HydrateSourcesRequest(tgt[ProtobufSourceField])] ) generated_sources = rule_runner.request( GeneratedSources, [GeneratePythonFromProtobufRequest(protocol_sources.snapshot, tgt)], ) assert set(generated_sources.snapshot.files) == set(expected_files)
def parse(content: str) -> ParsedThrift: rule_runner = RuleRunner(rules=[ *thrift_parser.rules(), QueryRule(ParsedThrift, [ParsedThriftRequest]) ]) rule_runner.write_files({"f.thrift": content}) return rule_runner.request( ParsedThrift, [ ParsedThriftRequest( ThriftSourceField("f.thrift", Address("", target_name="t"))) ], )
def _run_setup_py( rule_runner: RuleRunner, plugin: str, interpreter_constraints: InterpreterConstraints, version: Optional[str], setup_py_args: Iterable[str], install_dir: str, ) -> None: pex_obj = _create_pex(rule_runner, interpreter_constraints) setup_py_file = FileContent( "setup.py", dedent(f""" from setuptools import setup setup(name="{plugin}", version="{version or DEFAULT_VERSION}") """).encode(), ) source_digest = rule_runner.request( Digest, [CreateDigest([setup_py_file])], ) merged_digest = rule_runner.request( Digest, [MergeDigests([pex_obj.digest, source_digest])]) process = PexProcess( pex=pex_obj, argv=("setup.py", *setup_py_args), input_digest=merged_digest, description="Run setup.py", output_directories=("dist/", ), ) result = rule_runner.request(ProcessResult, [process]) result_snapshot = rule_runner.request(Snapshot, [result.output_digest]) rule_runner.scheduler.write_digest(result.output_digest, path_prefix="output") safe_mkdir(install_dir) for path in result_snapshot.files: shutil.copy(PurePath(rule_runner.build_root, "output", path), install_dir)
def test_npx_process(rule_runner: RuleRunner): result = rule_runner.request( ProcessResult, [ nodejs.NpxProcess( npm_package="", args=("--version", ), description="Testing NpxProcess", ) ], ) assert result.stdout.strip() == b"8.5.5"
def _analyze(rule_runner: RuleRunner, source: str) -> ScalaSourceDependencyAnalysis: rule_runner.write_files( { "BUILD": """scala_source(name="source", source="Source.scala")""", "Source.scala": source, } ) target = rule_runner.get_target(address=Address("", target_name="source")) source_files = rule_runner.request( SourceFiles, [ SourceFilesRequest( (target.get(SourcesField),), for_sources_types=(ScalaSourceField,), enable_codegen=True, ) ], ) return rule_runner.request(ScalaSourceDependencyAnalysis, [source_files])
def test_infer_java_imports_same_target_with_cycle( rule_runner: RuleRunner) -> None: rule_runner.write_files({ "BUILD": dedent("""\ java_sources(name = 't') """), "A.java": dedent("""\ package org.pantsbuild.a; import org.pantsbuild.b.B; public class A {} """), "B.java": dedent("""\ package org.pantsbuild.b; import org.pantsbuild.a.A; public class B {} """), }) target_a = rule_runner.get_target( Address("", target_name="t", relative_file_path="A.java")) target_b = rule_runner.get_target( Address("", target_name="t", relative_file_path="B.java")) assert rule_runner.request( InferredDependencies, [InferJavaImportDependencies(target_a[JavaSourceField]) ]) == InferredDependencies(dependencies=[target_b.address]) assert rule_runner.request( InferredDependencies, [InferJavaImportDependencies(target_b[JavaSourceField]) ]) == InferredDependencies(dependencies=[target_a.address])
def assert_files_generated( rule_runner: RuleRunner, address: Address, *, expected_files: list[str], source_roots: list[str], extra_args: Iterable[str] = (), ) -> None: args = [f"--source-root-patterns={repr(source_roots)}", *extra_args] rule_runner.set_options(args, env_inherit=PYTHON_BOOTSTRAP_ENV) tgt = rule_runner.get_target(address) protocol_sources = rule_runner.request( HydratedSources, [HydrateSourcesRequest(tgt[ProtobufSourceField])] ) generated_sources = rule_runner.request( GeneratedSources, [GenerateScalaFromProtobufRequest(protocol_sources.snapshot, tgt)], ) sources_contents = rule_runner.request(DigestContents, [generated_sources.snapshot.digest]) for sc in sources_contents: print(f"{sc.path}:\n{sc.content.decode()}") assert set(generated_sources.snapshot.files) == set(expected_files)
def test_find_go_binary_targets(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "missing_binary_tgt/app.go": "package main", "missing_binary_tgt/BUILD": "go_package()", "tgt_already_exists/app.go": "package main", "tgt_already_exists/BUILD": "go_binary(name='bin')\ngo_package()", "missing_pkg_and_binary_tgt/app.go": "package main", "main_set_to_different_dir/subdir/app.go": "package main", "main_set_to_different_dir/subdir/BUILD": "go_package()", "main_set_to_different_dir/BUILD": "go_binary(main='main_set_to_different_dir/subdir')", }) putative_targets = rule_runner.request( PutativeTargets, [ PutativeGoTargetsRequest(PutativeTargetsSearchPaths(("", ))), AllOwnedSources([ "missing_binary_tgt/app.go", "tgt_already_exists/app.go", "main_set_to_different_dir/subdir/app.go", ]), ], ) assert putative_targets == PutativeTargets([ PutativeTarget.for_target_type( GoBinaryTarget, path="missing_binary_tgt", name="bin", triggering_sources=[], ), PutativeTarget.for_target_type( GoPackageTarget, path="missing_pkg_and_binary_tgt", name="missing_pkg_and_binary_tgt", triggering_sources=["app.go"], kwargs={}, ), PutativeTarget.for_target_type( GoBinaryTarget, path="missing_pkg_and_binary_tgt", name="bin", triggering_sources=[], ), ])
def test_download_artifacts(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "3rdparty/helm/BUILD": dedent("""\ helm_artifact( name="cert-manager", repository="https://charts.jetstack.io/", artifact="cert-manager", version="v1.7.1" ) helm_artifact( name="prometheus-stack", repository="https://prometheus-community.github.io/helm-charts", artifact="kube-prometheus-stack", version="^27.2.0" ) """), }) targets = rule_runner.request(AllHelmArtifactTargets, []) fetched_artifacts = rule_runner.request( FetchedHelmArtifacts, [ FetchHelmArfifactsRequest.for_targets( targets, description_of_origin="test_download_artifacts") ], ) expected_artifacts = [ rule_runner.request(ResolvedHelmArtifact, [HelmArtifact.from_target(tgt)]) for tgt in targets ] assert len(fetched_artifacts) == len(expected_artifacts) for fetched, expected in zip(fetched_artifacts, expected_artifacts): assert fetched.artifact == expected assert f"{expected.name}/Chart.yaml" in fetched.snapshot.files
def test_internal_test_fails_to_compile(rule_runner: RuleRunner) -> None: """A compilation failure should not cause Pants to error, only the test to fail.""" rule_runner.write_files({ "foo/BUILD": "go_mod()", "foo/go.mod": "module foo", # Test itself is bad. "foo/bad_test.go": "invalid!!!", # A dependency of the test is bad. "foo/dep/f.go": "invalid!!!", "foo/uses_dep/f_test.go": textwrap.dedent(""" package uses_dep import ( "foo/dep" "testing" ) func TestAdd(t *testing.T) { if add(2, 3) != 5 { t.Fail() } } """), }) tgt = rule_runner.get_target(Address("foo", generated_name="./")) result = rule_runner.request(TestResult, [GoTestFieldSet.create(tgt)]) assert result.exit_code == 1 assert "bad_test.go:1:1: expected 'package', found invalid\n" in result.stderr tgt = rule_runner.get_target(Address("foo", generated_name="./uses_dep")) result = rule_runner.request(TestResult, [GoTestFieldSet.create(tgt)]) assert result.exit_code == 1 assert "dep/f.go:1:1: expected 'package', found invalid\n" in result.stderr
def run_pytest( rule_runner: RuleRunner, test_target: PythonTests, *, passthrough_args: Optional[str] = None, junit_xml_dir: Optional[str] = None, use_coverage: bool = False, execution_slot_var: Optional[str] = None, extra_env_vars: Optional[str] = None, env: Optional[Mapping[str, str]] = None, ) -> TestResult: args = [ "--backend-packages=pants.backend.python", f"--source-root-patterns={SOURCE_ROOT}", # pin to lower versions so that we can run Python 2 tests "--pytest-version=pytest>=4.6.6,<4.7", "--pytest-pytest-plugins=['zipp==1.0.0', 'pytest-cov>=2.8.1,<2.9']", ] if passthrough_args: args.append(f"--pytest-args='{passthrough_args}'") if extra_env_vars: args.append(f"--test-extra-env-vars={extra_env_vars}") if junit_xml_dir: args.append(f"--pytest-junit-xml-dir={junit_xml_dir}") if use_coverage: args.append("--test-use-coverage") if execution_slot_var: args.append(f"--pytest-execution-slot-var={execution_slot_var}") rule_runner.set_options(args, env=env) inputs = [PythonTestFieldSet.create(test_target)] test_result = rule_runner.request(TestResult, inputs) debug_request = rule_runner.request(TestDebugRequest, inputs) if debug_request.process is not None: debug_result = InteractiveRunner(rule_runner.scheduler).run( debug_request.process) assert test_result.exit_code == debug_result.exit_code return test_result
def run_gofmt( rule_runner: RuleRunner, targets: list[Target], *, extra_args: list[str] | None = None, ) -> tuple[tuple[LintResult, ...], FmtResult]: rule_runner.set_options(extra_args or (), env_inherit={"PATH"}) field_sets = [GofmtFieldSet.create(tgt) for tgt in targets] lint_results = rule_runner.request(LintResults, [GofmtRequest(field_sets)]) input_sources = rule_runner.request( SourceFiles, [ SourceFilesRequest(field_set.sources for field_set in field_sets), ], ) fmt_result = rule_runner.request( FmtResult, [ GofmtRequest(field_sets, prior_formatter_result=input_sources.snapshot), ], ) return lint_results.results, fmt_result
def create_mock_run_request(rule_runner: RuleRunner, program_text: bytes) -> RunRequest: digest = rule_runner.request( Digest, [ CreateDigest([ FileContent(path="program.py", content=program_text, is_executable=True) ]) ], ) return RunRequest(digest=digest, args=(os.path.join("{chroot}", "program.py"), ))
def test_compile_jdk_12_file_fails_with_jdk_11( rule_runner: RuleRunner) -> None: rule_runner.write_files({ "BUILD": dedent("""\ java_sources( name = 'lib', jdk = 'adopt:1.11', ) """), "3rdparty/jvm/default.lock": TestCoursierWrapper.new(entries=()).serialize(), "ExampleLib.java": JAVA_LIB_JDK12_SOURCE, }) request = CompileJavaSourceRequest( component=expect_single_expanded_coarsened_target( rule_runner, Address(spec_path="", target_name="lib")), resolve=make_resolve(rule_runner), ) with pytest.raises(ExecutionError): rule_runner.request(RenderedClasspath, [request])
def run_hadolint( rule_runner: RuleRunner, targets: list[Target], *, extra_args: list[str] | None = None) -> tuple[LintResult, ...]: rule_runner.set_options( extra_args or (), env_inherit={"PATH"}, ) results = rule_runner.request( LintResults, [HadolintRequest(HadolintFieldSet.create(tgt) for tgt in targets)], ) return results.results
def assert_files_generated( rule_runner: RuleRunner, address: Address, *, expected_files: list[str], source_roots: list[str], extra_args: list[str] | None = None, ) -> None: args = [ f"--source-root-patterns={repr(source_roots)}", "--no-python-thrift-infer-runtime-dependency", *(extra_args or ()), ] rule_runner.set_options(args, env_inherit={"PATH", "PYENV_ROOT", "HOME"}) tgt = rule_runner.get_target(address) thrift_sources = rule_runner.request( HydratedSources, [HydrateSourcesRequest(tgt[ThriftSourceField])] ) generated_sources = rule_runner.request( GeneratedSources, [GeneratePythonFromThriftRequest(thrift_sources.snapshot, tgt)], ) assert set(generated_sources.snapshot.files) == set(expected_files)
def assert_sources_resolved( rule_runner: RuleRunner, sources_fields: Iterable[SourcesField], *, expected: Iterable[TargetSources], expected_unrooted: Iterable[str] = (), ) -> None: result = rule_runner.request(SourceFiles, [SourceFilesRequest(sources_fields)]) assert list(result.snapshot.files) == sorted( set( itertools.chain.from_iterable(sources.full_paths for sources in expected))) assert list(result.unrooted_files) == sorted(expected_unrooted)
def test_report_file(rule_runner: RuleRunner) -> None: rule_runner.write_files({ f"{PACKAGE}/f.py": GOOD_FILE, f"{PACKAGE}/BUILD": "python_sources()" }) tgt = rule_runner.get_target(Address(PACKAGE, relative_file_path="f.py")) result = run_mypy(rule_runner, [tgt], extra_args=["--mypy-args='--linecount-report=reports'"]) assert len(result) == 1 assert result[0].exit_code == 0 assert "Success: no issues found" in result[0].stdout.strip() report_files = rule_runner.request(DigestContents, [result[0].report]) assert len(report_files) == 1 assert "4 4 1 1 f" in report_files[0].content.decode()
def _deploy_jar_test(rule_runner: RuleRunner, target_name: str) -> None: tgt = rule_runner.get_target(Address("", target_name=target_name)) jdk = rule_runner.request(InternalJdk, []) fat_jar = rule_runner.request( BuiltPackage, [DeployJarFieldSet.create(tgt)], ) process_result = rule_runner.request( ProcessResult, [ JvmProcess( jdk=jdk, argv=("-jar", "dave.jar"), classpath_entries=[], description="Run that test jar", input_digest=fat_jar.digest, use_nailgun=False, ) ], ) assert process_result.stdout.decode("utf-8").strip() == "Hello, World!"
def test_generate_source_targets() -> None: rule_runner = RuleRunner( rules=[ *target_types.rules(), QueryRule(_TargetParametrizations, [_TargetParametrizationsRequest]), ], target_types=[AvroSourcesGeneratorTarget], ) rule_runner.write_files({ "src/avro/BUILD": dedent("""\ avro_sources( name='lib', sources=['**/*.avsc', '**/*.avpr'], overrides={'f1.avsc': {'tags': ['overridden']}}, ) """), "src/avro/f1.avsc": "", "src/avro/f2.avpr": "", "src/avro/subdir/f.avsc": "", }) def gen_tgt(rel_fp: str, tags: list[str] | None = None) -> AvroSourceTarget: return AvroSourceTarget( { SingleSourceField.alias: rel_fp, Tags.alias: tags }, Address("src/avro", target_name="lib", relative_file_path=rel_fp), residence_dir=os.path.dirname(os.path.join("src/avro", rel_fp)), ) generated = rule_runner.request( _TargetParametrizations, [ _TargetParametrizationsRequest(Address("src/avro", target_name="lib"), description_of_origin="tests") ], ).parametrizations assert set(generated.values()) == { gen_tgt("f1.avsc", tags=["overridden"]), gen_tgt("f2.avpr"), gen_tgt("subdir/f.avsc"), }
def test_import_path(rule_runner: RuleRunner, mod_dir: str) -> None: rule_runner.write_files( { f"{mod_dir}BUILD": "go_mod(name='mod')\ngo_package(name='pkg')", f"{mod_dir}go.mod": "module go.example.com/foo", f"{mod_dir}f.go": "", f"{mod_dir}dir/f.go": "", f"{mod_dir}dir/BUILD": "go_package()", } ) info = rule_runner.request( FirstPartyPkgImportPath, [FirstPartyPkgImportPathRequest(Address(mod_dir, target_name="pkg"))], ) assert info.import_path == "go.example.com/foo" assert info.dir_path_rel_to_gomod == "" info = rule_runner.request( FirstPartyPkgImportPath, [FirstPartyPkgImportPathRequest(Address(os.path.join(mod_dir, "dir")))], ) assert info.import_path == "go.example.com/foo/dir" assert info.dir_path_rel_to_gomod == "dir"
def assert_path_globs( rule_runner: RuleRunner, globs: Iterable[str], *, expected_files: Iterable[str], expected_dirs: Iterable[str], ) -> None: snapshot = rule_runner.request(Snapshot, [PathGlobs(globs)]) assert snapshot.files == tuple(sorted(expected_files)) assert snapshot.dirs == tuple(sorted(expected_dirs)) if expected_files or expected_dirs: assert snapshot.digest != EMPTY_DIGEST else: assert snapshot.digest == EMPTY_DIGEST
def test_map_third_party_modules_to_addresses(rule_runner: RuleRunner) -> None: rule_runner.add_to_build_file( "3rdparty/python", dedent( """\ python_requirement_library( name='ansicolors', requirements=['ansicolors==1.21'], module_mapping={'ansicolors': ['colors']}, ) python_requirement_library( name='req1', requirements=['req1', 'two_owners'], ) python_requirement_library( name='un_normalized', requirements=['Un-Normalized-Project>3', 'two_owners'], ) python_requirement_library( name='direct_references', requirements=[ 'pip@ git+https://github.com/pypa/pip.git', 'local_dist@ file:///path/to/dist.whl', ], ) """ ), ) result = rule_runner.request(ThirdPartyPythonModuleMapping, []) assert result == ThirdPartyPythonModuleMapping( mapping=FrozenDict( { "colors": Address("3rdparty/python", target_name="ansicolors"), "local_dist": Address("3rdparty/python", target_name="direct_references"), "pip": Address("3rdparty/python", target_name="direct_references"), "req1": Address("3rdparty/python", target_name="req1"), "un_normalized_project": Address("3rdparty/python", target_name="un_normalized"), } ), ambiguous_modules=FrozenDict( { "two_owners": ( Address("3rdparty/python", target_name="req1"), Address("3rdparty/python", target_name="un_normalized"), ), } ), )
def test_venv_pex_resolve_info(rule_runner: RuleRunner, pex_type: type[Pex | VenvPex]) -> None: venv_pex = create_pex_and_get_all_data( rule_runner, pex_type=pex_type, requirements=PexRequirements(["requests==2.23.0"]) )["pex"] dists = rule_runner.request(PexResolveInfo, [venv_pex]) assert dists[0] == PexDistributionInfo("certifi", Version("2020.12.5"), None, ()) assert dists[1] == PexDistributionInfo("chardet", Version("3.0.4"), None, ()) assert dists[2] == PexDistributionInfo( "idna", Version("2.10"), SpecifierSet("!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"), () ) assert dists[3].project_name == "requests" assert dists[3].version == Version("2.23.0") assert Requirement.parse('PySocks!=1.5.7,>=1.5.6; extra == "socks"') in dists[3].requires_dists assert dists[4].project_name == "urllib3"
def build_package( rule_runner: RuleRunner, main_target: Target, ) -> BuiltPackage: args = ["--backend-packages=pants.backend.go"] rule_runner.set_options(args) rule_runner.add_to_build_file( "", f"go_binary(name='bin', binary_name='foo', main='{main_target.address.spec}')\n" ) go_binary_target = rule_runner.get_target(Address("", target_name="bin")) built_package = rule_runner.request( BuiltPackage, (GoBinaryFieldSet.create(go_binary_target), )) return built_package
def _assert_build_package(rule_runner: RuleRunner, *, chart_name: str, chart_version: str) -> None: rule_runner.set_options(["--source-root-patterns=['src/*']"]) target = rule_runner.get_target( Address(f"src/{chart_name}", target_name=chart_name)) field_set = HelmPackageFieldSet.create(target) dest_dir = field_set.output_path.value_or_default(file_ending=None) result = rule_runner.request(BuiltPackage, [field_set]) assert len(result.artifacts) == 1 assert result.artifacts[0].relpath == os.path.join( dest_dir, f"{chart_name}-{chart_version}.tgz")
def test_all_owned_sources(rule_runner: RuleRunner) -> None: for path in [ "dir/a.f90", "dir/b.f90", "dir/a_test.f90", "dir/unowned.txt", "unowned.txt", "unowned.f90", ]: rule_runner.create_file(path) rule_runner.add_to_build_file("dir", "fortran_library()\nfortran_tests(name='tests')") assert rule_runner.request(AllOwnedSources, []) == AllOwnedSources( ["dir/a.f90", "dir/b.f90", "dir/a_test.f90"] )