def test_uses_correct_python_version(rule_runner: RuleRunner) -> None: rule_runner.write_files( { f"{PACKAGE}/f.py": "'''docstring'''\nCONSTANT: str = ''\n", # NB: Avoid Python 3.8+ for this test due to issues with astroid/ast. # See https://github.com/pantsbuild/pants/issues/10547. f"{PACKAGE}/BUILD": dedent( """\ python_library(name='py2', interpreter_constraints=['==2.7.*']) python_library(name='py3', interpreter_constraints=['CPython>=3.6,<3.8']) """ ), } ) py2_args = [ "--pylint-version=pylint<2", "--pylint-extra-requirements=['setuptools<45', 'isort>=4.3.21,<4.4']", ] py2_tgt = rule_runner.get_target(Address(PACKAGE, target_name="py2", relative_file_path="f.py")) py2_result = run_pylint(rule_runner, [py2_tgt], extra_args=py2_args) assert len(py2_result) == 1 assert py2_result[0].exit_code == 2 assert "invalid syntax (<string>, line 2) (syntax-error)" in py2_result[0].stdout py3_tgt = rule_runner.get_target(Address(PACKAGE, target_name="py3", relative_file_path="f.py")) py3_result = run_pylint(rule_runner, [py3_tgt]) assert len(py3_result) == 1 assert py3_result[0].exit_code == 0 assert "Your code has been rated at 10.00/10" in py3_result[0].stdout.strip() combined_result = run_pylint(rule_runner, [py2_tgt, py3_tgt], extra_args=py2_args) assert len(combined_result) == 2 batched_py3_result, batched_py2_result = sorted( combined_result, key=lambda result: result.exit_code ) assert batched_py2_result.exit_code == 2 assert batched_py2_result.partition_description == "['CPython==2.7.*']" assert "invalid syntax (<string>, line 2) (syntax-error)" in batched_py2_result.stdout assert batched_py3_result.exit_code == 0 assert batched_py3_result.partition_description == "['CPython<3.8,>=3.6']" assert "Your code has been rated at 10.00/10" in batched_py3_result.stdout.strip()
def test_uses_correct_python_version(rule_runner: RuleRunner) -> None: """We set `--python-version` automatically for the user, and also batch based on interpreter constraints. This batching must consider transitive dependencies, so we use a more complex setup where the dependencies are what have specific constraints that influence the batching. """ rule_runner.write_files( { f"{PACKAGE}/py2/__init__.py": dedent( """\ def add(x, y): # type: (int, int) -> int return x + y """ ), f"{PACKAGE}/py2/BUILD": "python_sources(interpreter_constraints=['==2.7.*'])", f"{PACKAGE}/py3/__init__.py": dedent( """\ def add(x: int, y: int) -> int: return x + y """ ), f"{PACKAGE}/py3/BUILD": "python_sources(interpreter_constraints=['>=3.6'])", f"{PACKAGE}/__init__.py": "", f"{PACKAGE}/uses_py2.py": "from project.py2 import add\nassert add(2, 2) == 4\n", f"{PACKAGE}/uses_py3.py": "from project.py3 import add\nassert add(2, 2) == 4\n", f"{PACKAGE}/BUILD": "python_sources(interpreter_constraints=['==2.7.*', '>=3.6'])", } ) py2_tgt = rule_runner.get_target(Address(PACKAGE, relative_file_path="uses_py2.py")) py3_tgt = rule_runner.get_target(Address(PACKAGE, relative_file_path="uses_py3.py")) result = run_mypy(rule_runner, [py2_tgt, py3_tgt]) assert len(result) == 2 py2_result, py3_result = sorted(result, key=lambda res: res.partition_description or "") assert py2_result.exit_code == 0 assert py2_result.partition_description == "['CPython==2.7.*', 'CPython==2.7.*,>=3.6']" assert "Success: no issues found" in py2_result.stdout assert py3_result.exit_code == 0 assert py3_result.partition_description == "['CPython==2.7.*,>=3.6', 'CPython>=3.6']" assert "Success: no issues found" in py3_result.stdout
def test_java_parser_fallible_error(rule_runner: RuleRunner) -> None: rule_runner.write_files( { "BUILD": dedent( """\ java_source( name='simple-source', source='SimpleSource.java', ) """ ), "SimpleSource.java": dedent( """ syntax error! """ ), } ) target = rule_runner.get_target(address=Address(spec_path="", target_name="simple-source")) source_files = rule_runner.request( SourceFiles, [ SourceFilesRequest( (target.get(SourcesField),), for_sources_types=(JavaSourceField,), enable_codegen=True, ) ], ) fallible_result = rule_runner.request( FallibleJavaSourceDependencyAnalysisResult, [source_files], ) assert fallible_result.process_result.exit_code != 0 with pytest.raises(ExecutionError) as exc_info: rule_runner.request( JavaSourceDependencyAnalysis, [source_files], ) assert isinstance(exc_info.value.wrapped_exceptions[0], ProcessExecutionFailure)
def test_compile_with_deps(rule_runner: RuleRunner, jvm_lockfile: JVMLockfileFixture) -> None: rule_runner.write_files({ "BUILD": dedent("""\ kotlin_sources( name = 'main', dependencies = [ 'lib:lib', ] ) """), "3rdparty/jvm/BUILD": jvm_lockfile.requirements_as_jvm_artifact_targets(), "3rdparty/jvm/default.lock": jvm_lockfile.serialized_lockfile, "Example.kt": KOTLIN_LIB_MAIN_SOURCE, "lib/BUILD": dedent("""\ kotlin_sources( name = 'lib', ) """), "lib/ExampleLib.kt": KOTLIN_LIB_SOURCE, }) classpath = rule_runner.request( RenderedClasspath, [ CompileKotlinSourceRequest( component=expect_single_expanded_coarsened_target( rule_runner, Address(spec_path="", target_name="main")), resolve=make_resolve(rule_runner), ) ], ) assert classpath.content == { ".Example.kt.main.kotlin.jar": { "META-INF/MANIFEST.MF", "META-INF/main.kotlin_module", "org/pantsbuild/example/ExampleKt.class", } }
def test_first_party_plugins(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "BUILD": dedent("""\ python_requirement(name='mypy', requirements=['mypy==0.81']) python_requirement(name='colors', requirements=['ansicolors']) """), "mypy-plugins/subdir1/util.py": "", "mypy-plugins/subdir1/BUILD": "python_sources(dependencies=['mypy-plugins/subdir2'])", "mypy-plugins/subdir2/another_util.py": "", "mypy-plugins/subdir2/BUILD": "python_sources()", "mypy-plugins/plugin.py": "", "mypy-plugins/BUILD": dedent("""\ python_sources( dependencies=['//:mypy', '//:colors', "mypy-plugins/subdir1"] ) """), }) rule_runner.set_options( [ "--source-root-patterns=mypy-plugins", "--mypy-source-plugins=mypy-plugins/plugin.py", ], env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) first_party_plugins = rule_runner.request(MyPyFirstPartyPlugins, []) assert first_party_plugins.requirement_strings == FrozenOrderedSet( ["ansicolors", "mypy==0.81"]) assert (first_party_plugins.sources_digest == rule_runner.make_snapshot({ "mypy-plugins/plugin.py": "", "mypy-plugins/subdir1/util.py": "", "mypy-plugins/subdir2/another_util.py": "", }).digest) assert first_party_plugins.source_roots == ("mypy-plugins", )
def test_includes_direct_dependencies(rule_runner: RuleRunner) -> None: rule_runner.write_files( { "BUILD": dedent( """\ python_requirement_library(name='transitive_req', requirements=['fake']) python_requirement_library(name='direct_req', requirements=['ansicolors']) """ ), f"{PACKAGE}/transitive_dep.py": "", f"{PACKAGE}/direct_dep.py": dedent( """\ # No docstring - Pylint doesn't lint dependencies. from project.transitive_dep import doesnt_matter_if_variable_exists THIS_VARIABLE_EXISTS = '' """ ), f"{PACKAGE}/f.py": dedent( """\ '''Pylint will check that variables exist and are used.''' from colors import green from project.direct_dep import THIS_VARIABLE_EXISTS print(green(THIS_VARIABLE_EXISTS)) """ ), f"{PACKAGE}/BUILD": dedent( """\ python_library(name='transitive_dep', sources=['transitive_dep.py']) python_library( name='direct_dep', sources=['direct_dep.py'], dependencies=['//:transitive_req', ':transitive_dep'] ) python_library(sources=['f.py'], dependencies=['//:direct_req', ':direct_dep']) """ ), } ) tgt = rule_runner.get_target(Address(PACKAGE, relative_file_path="f.py")) assert_success(rule_runner, tgt)
def test_compile_with_deps(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "BUILD": dedent("""\ scala_sources( name = 'main', dependencies = [ 'lib:lib', ] ) """), "3rdparty/jvm/BUILD": DEFAULT_SCALA_LIBRARY_TARGET, "3rdparty/jvm/default.lock": DEFAULT_LOCKFILE, "Example.scala": SCALA_LIB_MAIN_SOURCE, "lib/BUILD": dedent("""\ scala_sources( name = 'lib', ) """), "lib/ExampleLib.scala": SCALA_LIB_SOURCE, }) classpath = rule_runner.request( RenderedClasspath, [ CompileScalaSourceRequest( component=expect_single_expanded_coarsened_target( rule_runner, Address(spec_path="", target_name="main")), resolve=make_resolve(rule_runner), ) ], ) assert classpath.content == { ".Example.scala.main.scalac.jar": { "META-INF/MANIFEST.MF", "org/pantsbuild/example/Main$.class", "org/pantsbuild/example/Main.class", } }
def test_config_file(rule_runner: RuleRunner, path: str, section: str, extra_args: list[str]) -> None: rule_runner.write_files({ "f.py": NEEDS_CONFIG_FILE, "BUILD": "python_sources(name='t', interpreter_constraints=['==3.9.*'])", path: f"[{section}]\nindent_width = 2\n", }) tgt = rule_runner.get_target( Address("", target_name="t", relative_file_path="f.py")) lint_results, fmt_result = run_yapf(rule_runner, [tgt], extra_args=extra_args) assert len(lint_results) == 1 assert lint_results[0].exit_code == 1 assert all(msg in lint_results[0].stdout for msg in ("reformatted", "original", "f.py")) assert fmt_result.output == get_snapshot( rule_runner, {"f.py": FIXED_NEEDS_CONFIG_FILE_INDENT2}) assert fmt_result.did_change is True
def test_lint_non_strict_chart_failing(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "BUILD": "helm_chart(name='mychart')", "Chart.yaml": gen_chart_file("mychart", version="0.1.0", icon="wrong URL"), "values.yaml": HELM_VALUES_FILE, "templates/_helpers.tpl": HELM_TEMPLATE_HELPERS_FILE, "templates/service.yaml": K8S_SERVICE_FILE, }) tgt = rule_runner.get_target(Address("", target_name="mychart")) lint_results = run_helm_lint(rule_runner, [tgt]) assert len(lint_results) == 1 assert lint_results[0].exit_code == 1
def test_internal_test_fails(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "foo/BUILD": "go_mod(name='mod')\ngo_package()", "foo/go.mod": "module foo", "foo/bar_test.go": textwrap.dedent(""" package foo import "testing" func TestAdd(t *testing.T) { t.Fail() } """), }) tgt = rule_runner.get_target(Address("foo")) result = rule_runner.request(TestResult, [GoTestFieldSet.create(tgt)]) assert result.exit_code == 1 assert "FAIL: TestAdd" in result.stdout
def test_target_generator() -> None: rule_runner = RuleRunner( rules=( *pants_requirements.rules(), QueryRule(_TargetParametrizations, [Address]), ), target_types=[PantsRequirementsTargetGenerator], ) rule_runner.write_files({ "BUILD": ("pants_requirements(name='default')\n" "pants_requirements(\n" " name='no_testutil', testutil=False, resolve='a'\n" ")") }) result = rule_runner.request( _TargetParametrizations, [Address("", target_name="default")]).parametrizations assert len(result) == 2 pants_req = next(t for t in result.values() if t.address.generated_name == "pantsbuild.pants") testutil_req = next( t for t in result.values() if t.address.generated_name == "pantsbuild.pants.testutil") assert pants_req[PythonRequirementModulesField].value == ("pants", ) assert testutil_req[PythonRequirementModulesField].value == ( "pants.testutil", ) assert pants_req[PythonRequirementsField].value == ( PipRequirement.parse(f"pantsbuild.pants{determine_version()}"), ) assert testutil_req[PythonRequirementsField].value == ( PipRequirement.parse( f"pantsbuild.pants.testutil{determine_version()}"), ) for t in (pants_req, testutil_req): assert not t[PythonRequirementResolveField].value result = rule_runner.request( _TargetParametrizations, [Address("", target_name="no_testutil")]).parametrizations assert len(result) == 1 assert next(iter(result.keys())).generated_name == "pantsbuild.pants" pants_req = next(iter(result.values())) assert pants_req[PythonRequirementResolveField].value == "a"
def test_fingerprint_dict_with_files_order(rule_runner: RuleRunner) -> None: f1, f2 = (rule_runner.write_files({f: c})[0] for (f, c) in ( ("foo/bar.config", "blah blah blah"), ("foo/bar.config", "meow meow meow"), )) fp1 = OptionsFingerprinter().fingerprint(dict_with_files_option, {"properties": f"{f1},{f2}"}) fp2 = OptionsFingerprinter().fingerprint(dict_with_files_option, {"properties": f"{f2},{f1}"}) assert fp1 == fp2
def test_sources_expected_num_files(sources_rule_runner: RuleRunner) -> None: sources_rule_runner.write_files({ f: "" for f in [ "f1.txt", "f2.txt", "dirA/f3.txt", "dirB/f4.txt", "dirC/f5.txt", "dirC/f6.txt", ] }) def hydrate(sources_cls: Type[DebianSources], sources: Iterable[str]) -> HydratedSources: return sources_rule_runner.request( HydratedSources, [ HydrateSourcesRequest( sources_cls(sources, Address("", target_name="example"))), ], ) with engine_error(contains="must resolve to at least one file"): hydrate(DebianSources, []) with engine_error(contains="must resolve to at least one file"): hydrate(DebianSources, ["non-existing-dir/*"]) with engine_error(contains="Individual files were found"): hydrate(DebianSources, ["f1.txt", "f2.txt"]) with engine_error(contains="Multiple directories were found"): hydrate(DebianSources, ["dirA/f3.txt", "dirB/f4.txt"]) # Also check that we support valid sources declarations. assert hydrate(DebianSources, ["dirC/f5.txt", "dirC/f6.txt"]).snapshot.files == ( "dirC/f5.txt", "dirC/f6.txt", ) assert hydrate(DebianSources, ["dirC/*"]).snapshot.files == ("dirC/f5.txt", "dirC/f6.txt")
def test_thirdparty_dependency(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "BUILD": ("python_requirement(name='more-itertools', requirements=['more-itertools==8.4.0'])" ), f"{PACKAGE}/f.py": dedent("""\ from more_itertools import flatten assert flatten(42) == [4, 2] """), f"{PACKAGE}/BUILD": "python_sources()", }) tgt = rule_runner.get_target(Address(PACKAGE, relative_file_path="f.py")) result = run_mypy(rule_runner, [tgt]) assert len(result) == 1 assert result[0].exit_code == 1 assert f"{PACKAGE}/f.py:3" in result[0].stdout
def test_find_putative_targets() -> None: rule_runner = RuleRunner( rules=[ *tailor.rules(), QueryRule(PutativeTargets, (PutativePythonTargetsRequest, AllOwnedSources)), ], target_types=[], ) rule_runner.write_files({ f"src/python/foo/{fp}": "" for fp in ( "__init__.py", "bar/__init__.py", "bar/baz1.py", "bar/baz1_test.py", "bar/baz2.py", "bar/baz2_test.py", "bar/baz3.py", ) }) pts = rule_runner.request( PutativeTargets, [ PutativePythonTargetsRequest(), AllOwnedSources([ "src/python/foo/bar/__init__.py", "src/python/foo/bar/baz1.py" ]), ], ) assert (PutativeTargets([ PutativeTarget.for_target_type(PythonLibrary, "src/python/foo", "foo", ["__init__.py"]), PutativeTarget.for_target_type(PythonLibrary, "src/python/foo/bar", "bar", ["baz2.py", "baz3.py"]), PutativeTarget.for_target_type( PythonTests, "src/python/foo/bar", "tests", ["baz1_test.py", "baz2_test.py"], kwargs={"name": "tests"}, ), ]) == pts)
def test_find_go_mod_targets(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "unowned/go.mod": "", "owned/go.mod": "", "owned/BUILD": "go_mod()" }) putative_targets = rule_runner.request( PutativeTargets, [ PutativeGoTargetsRequest(PutativeTargetsSearchPaths(("", ))), AllOwnedSources(["owned/go.mod"]), ], ) assert putative_targets == PutativeTargets([ PutativeTarget.for_target_type(GoModTarget, path="unowned", name=None, triggering_sources=["go.mod"]) ])
def test_multiple_targets(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "go.mod": GO_MOD, "BUILD": "go_mod(name='mod')", "good/f.go": GOOD_FILE, "good/BUILD": "go_package()", "bad/f.go": BAD_FILE, "bad/BUILD": "go_package()", }) tgts = [ rule_runner.get_target(Address("good")), rule_runner.get_target(Address("bad")) ] fmt_result = run_gofmt(rule_runner, tgts) assert fmt_result.output == get_snapshot(rule_runner, { "good/f.go": GOOD_FILE, "bad/f.go": FIXED_BAD_FILE }) assert fmt_result.did_change is True
def test_grpc_mypy_plugin(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "src/protobuf/dir1/f.proto": dedent(GRPC_PROTO_STANZA), "src/protobuf/dir1/BUILD": "protobuf_sources(grpc=True)", }) assert_files_generated( rule_runner, Address("src/protobuf/dir1", relative_file_path="f.proto"), source_roots=["src/protobuf"], mypy=True, expected_files=[ "src/protobuf/dir1/f_pb2.py", "src/protobuf/dir1/f_pb2.pyi", "src/protobuf/dir1/f_pb2_grpc.py", "src/protobuf/dir1/f_pb2_grpc.pyi", ], )
def test_lint_strict_chart_failing(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "BUILD": "helm_chart(name='mychart', lint_strict=True)", "Chart.yaml": gen_chart_file("mychart", version="0.1.0", icon=None), "values.yaml": HELM_VALUES_FILE, "templates/_helpers.tpl": HELM_TEMPLATE_HELPERS_FILE, "templates/ingress.yaml": K8S_INGRESS_FILE_WITH_LINT_WARNINGS, }) tgt = rule_runner.get_target(Address("", target_name="mychart")) lint_results = run_helm_lint(rule_runner, [tgt]) assert len(lint_results) == 1 assert lint_results[0].exit_code == 1
def test_vintage_scala_simple_success(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "3rdparty/jvm/default.lock": JUNIT4_RESOLVED_LOCKFILE.serialize( [ArtifactRequirement(coordinate=JUNIT_COORD)]), "BUILD": dedent(f"""\ jvm_artifact( name = 'junit_junit', group = '{JUNIT_COORD.group}', artifact = '{JUNIT_COORD.artifact}', version = '{JUNIT_COORD.version}', ) scala_junit_tests( name='example-test', dependencies= [ ':junit_junit', ], ) """), "SimpleTest.scala": dedent(""" package org.pantsbuild.example import junit.framework.TestCase import junit.framework.Assert._ class SimpleTest extends TestCase { def testHello(): Unit = { assertTrue("Hello!" == "Hello!") } } """), }) test_result = run_junit_test(rule_runner, "example-test", "SimpleTest.scala") assert test_result.exit_code == 0 assert re.search(r"Finished:\s+testHello", test_result.stdout) is not None assert re.search(r"1 tests successful", test_result.stdout) is not None assert re.search(r"1 tests found", test_result.stdout) is not None
def test_putative_target_addresses(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "test/BUILD": "docker_image()", "test/Dockerfile": dedent("""\ FROM base COPY some.target/binary.pex some.target/tool.pex /bin COPY --from=scratch this.is/ignored.pex /opt COPY binary another/cli.pex tool /bin """), }) tgt = rule_runner.get_target(Address("test")) info = rule_runner.request(DockerfileInfo, [tgt[DockerImageSourceField]]) assert info.putative_target_addresses == ( "some/target:binary", "some/target:tool", "another:cli", )
def test_java_parser_unnamed_package(rule_runner: RuleRunner) -> None: rule_runner.write_files( { "BUILD": dedent( """\ java_source( name='simple-source', source='SimpleSource.java', ) """ ), "SimpleSource.java": dedent( """ public class SimpleSource { public void hello() { System.out.println("hello"); } } class Foo {} """ ), } ) target = rule_runner.get_target(address=Address(spec_path="", target_name="simple-source")) source_files = rule_runner.request( SourceFiles, [ SourceFilesRequest( (target.get(SourcesField),), for_sources_types=(JavaSourceField,), enable_codegen=True, ) ], ) analysis = rule_runner.request(JavaSourceDependencyAnalysis, [source_files]) assert analysis.declared_package is None assert analysis.imports == () assert analysis.top_level_types == ("SimpleSource", "Foo") assert analysis.consumed_types == ("System",)
def test_mixed_sources(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "good.go": GOOD_FILE, "bad.go": BAD_FILE, "go.mod": GO_MOD, "BUILD": "go_mod(name='mod')" }) tgt = rule_runner.get_target( Address("", target_name="mod", generated_name="./")) lint_results, fmt_result = run_gofmt(rule_runner, [tgt]) assert len(lint_results) == 1 assert lint_results[0].exit_code == 1 assert "bad.go" in lint_results[0].stdout assert "good.go" not in lint_results[0].stdout assert fmt_result.output == get_digest(rule_runner, { "good.go": GOOD_FILE, "bad.go": FIXED_BAD_FILE }) assert fmt_result.did_change is True
def test_config_files(rule_runner: RuleRunner) -> None: rule_runner.write_files( { "a/f.sh": BAD_FILE, "a/BUILD": "shell_library()", "a/.shellcheckrc": "disable=SC2148", "b/f.sh": BAD_FILE, "b/BUILD": "shell_library()", } ) tgts = [ rule_runner.get_target(Address("a", relative_file_path="f.sh")), rule_runner.get_target(Address("b", relative_file_path="f.sh")), ] result = run_shellcheck(rule_runner, tgts) assert len(result) == 1 assert result[0].exit_code == 1 assert "a/f.sh" not in result[0].stdout assert "In b/f.sh line 1:" in result[0].stdout
def test_compile_with_undeclared_jvm_artifact_dependency_fails( rule_runner: RuleRunner) -> None: rule_runner.write_files({ "BUILD": dedent("""\ jvm_artifact( name = "joda-time_joda-time", group = "joda-time", artifact = "joda-time", version = "2.10.10", ) scala_sources( name = 'main', dependencies = [], # `joda-time` needs to be here for compile to succeed ) """), "3rdparty/jvm/BUILD": DEFAULT_SCALA_LIBRARY_TARGET, "3rdparty/jvm/default.lock": DEFAULT_LOCKFILE, "Example.scala": dedent(""" package org.pantsbuild.example import org.joda.time.DateTime object Main { def main(args: Array[String]): Unit = { val dt = new DateTime() println(dt.getYear) } } """), }) request = CompileScalaSourceRequest( component=expect_single_expanded_coarsened_target( rule_runner, Address(spec_path="", target_name="main")), resolve=make_resolve(rule_runner), ) fallible_result = rule_runner.request(FallibleClasspathEntry, [request]) assert fallible_result.result == CompileResult.FAILED and fallible_result.stderr assert "error: object joda is not a member of package org" in fallible_result.stderr
def test_junit(rule_runner: RuleRunner) -> None: rule_runner.write_files({ f"{PACKAGE}/tests.py": GOOD_TEST, f"{PACKAGE}/BUILD": "python_tests()" }) tgt = rule_runner.get_target( Address(PACKAGE, relative_file_path="tests.py")) result = run_pytest( rule_runner, tgt, extra_args=["--pytest-junit-xml-dir=dist/test-results"]) assert result.exit_code == 0 assert f"{PACKAGE}/tests.py ." in result.stdout assert result.xml_results is not None digest_contents = rule_runner.request(DigestContents, [result.xml_results.digest]) file = digest_contents[0] assert file.path.startswith("dist/test-results") assert b"pants_test.tests" in file.content
def test_build_arg_defaults_from_dockerfile(rule_runner: RuleRunner) -> None: # Test that only explicitly defined build args in the BUILD file or pants configuration use the # environment for its values. rule_runner.write_files({ "src/docker/BUILD": dedent("""\ docker_image( extra_build_args=[ "base_version", ] ) """), "src/docker/Dockerfile": dedent("""\ ARG base_name=python ARG base_version=3.8 FROM ${base_name}:${base_version} ARG NO_DEF ENV opt=${NO_DEF} """), }) assert_build_context( rule_runner, Address("src/docker"), runner_options={ "env": { "base_name": "no-effect", "base_version": "3.9", }, }, expected_files=["src/docker/Dockerfile"], expected_interpolation_context={ "tags": { "baseimage": "${base_version}", "stage0": "${base_version}", }, "build_args": { # `base_name` is not listed here, as it was not an explicitly defined build arg. "base_version": "3.9", }, }, )
def test_multiple_targets(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "good.py": GOOD_FILE, "bad.py": BAD_FILE, "BUILD": "python_library(name='t')" }) tgts = [ rule_runner.get_target( Address("", target_name="t", relative_file_path="good.py")), rule_runner.get_target( Address("", target_name="t", relative_file_path="bad.py")), ] result = run_bandit(rule_runner, tgts) assert len(result) == 1 assert result[0].exit_code == 1 assert "good.py" not in result[0].stdout assert "Issue: [B303:blacklist] Use of insecure MD2, MD4, MD5" in result[ 0].stdout assert result[0].report == EMPTY_DIGEST
def assert_pipenv_requirements( rule_runner: RuleRunner, build_file_entry: str, pipfile_lock: dict, *, expected_targets: set[Target], ) -> None: rule_runner.write_files({ "BUILD": build_file_entry, "Pipfile.lock": dumps(pipfile_lock) }) result = rule_runner.request( _TargetParametrizations, [ _TargetParametrizationsRequest(Address("", target_name="reqs"), description_of_origin="tests") ], ) assert set(result.parametrizations.values()) == expected_targets
def test_multiple_targets(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "foo/v1/good.proto": GOOD_FILE, "foo/v1/bad.proto": BAD_FILE, "foo/v1/BUILD": "protobuf_sources(name='t')", }) tgts = [ rule_runner.get_target( Address("foo/v1", target_name="t", relative_file_path="good.proto")), rule_runner.get_target( Address("foo/v1", target_name="t", relative_file_path="bad.proto")), ] result = run_buf(rule_runner, tgts) assert len(result) == 1 assert result[0].exit_code == 100 assert "good.proto" not in result[0].stdout assert "foo/v1/bad.proto:" in result[0].stdout