def test_options_parse_scoped(self): options_bootstrapper = self._ob( args=( "./pants", "-ldebug", "--backend-packages=pants.backend.python", "binary", "src/python::", ), env=dict(PANTS_PANTSD="True", PANTS_BUILD_IGNORE='["ignoreme/"]'), ) global_options_params = Params(Scope(str(GLOBAL_SCOPE)), options_bootstrapper) python_setup_options_params = Params(Scope(str("python-setup")), options_bootstrapper) global_options, python_setup_options = self.scheduler.product_request( ScopedOptions, [global_options_params, python_setup_options_params], ) self.assertEqual(global_options.options.level, LogLevel.DEBUG) self.assertEqual(global_options.options.pantsd, True) self.assertEqual(global_options.options.build_ignore, ["ignoreme/"]) self.assertEqual(python_setup_options.options.platforms, ["current"])
def run_docformatter( self, targets: List[Target], *, passthrough_args: Optional[str] = None, skip: bool = False, ) -> Tuple[LintResults, FmtResult]: args = ["--backend-packages=pants.backend.python.lint.docformatter"] if passthrough_args: args.append(f"--docformatter-args='{passthrough_args}'") if skip: args.append("--docformatter-skip") options_bootstrapper = create_options_bootstrapper(args=args) field_sets = [DocformatterFieldSet.create(tgt) for tgt in targets] lint_results = self.request_single_product( LintResults, Params(DocformatterRequest(field_sets), options_bootstrapper)) input_sources = self.request_single_product( SourceFiles, Params( SourceFilesRequest(field_set.sources for field_set in field_sets), options_bootstrapper, ), ) fmt_result = self.request_single_product( FmtResult, Params( DocformatterRequest( field_sets, prior_formatter_result=input_sources.snapshot), options_bootstrapper, ), ) return lint_results, fmt_result
def assert_sources( self, expected_files, expected_packages, expected_namespace_packages, expected_package_data, addrs, ): srcs = self.request_single_product( SetupPySources, Params( SetupPySourcesRequest(Targets( [self.tgt(addr) for addr in addrs]), py2=False), create_options_bootstrapper( args=["--source-root-patterns=src/python"]), ), ) chroot_snapshot = self.request_single_product(Snapshot, Params(srcs.digest)) assert sorted(expected_files) == sorted(chroot_snapshot.files) assert sorted(expected_packages) == sorted(srcs.packages) assert sorted(expected_namespace_packages) == sorted( srcs.namespace_packages) assert expected_package_data == dict(srcs.package_data)
def run_dep_inference(address: Address) -> InferredDependencies: target = self.request_single_product( WrappedTarget, Params(address, options_bootstrapper)).target return self.request_single_product( InferredDependencies, Params(InferConftestDependencies(target[PythonSources]), options_bootstrapper), )
def assert_bfa_resolved(address: Address) -> None: expected_bfa = BuildFileAddress(rel_path="helloworld/BUILD.ext", address=address) bfa = self.request_single_product(BuildFileAddress, Params(address, bootstrapper)) assert bfa == expected_bfa bfas = self.request_single_product( BuildFileAddresses, Params(Addresses([address]), bootstrapper)) assert bfas == BuildFileAddresses([bfa])
def test_union_rules(self): with self.assertDoesNotRaise(): _ = self.request_single_product(A, Params(UnionWrapper(UnionA()))) with self.assertDoesNotRaise(): _ = self.request_single_product(A, Params(UnionWrapper(UnionB()))) # Fails due to no union relationship from A -> UnionBase. with self._assert_execution_error( "Type A is not a member of the UnionBase @union"): self.request_single_product(A, Params(UnionWrapper(A())))
def test_non_archive(self) -> None: input_snapshot = self.make_snapshot({"test.sh": b"# A shell script"}) extracted_digest = self.request_single_product( ExtractedDigest, Params(MaybeExtractable(input_snapshot.digest))) digest_contents = self.request_single_product( DigestContents, Params(extracted_digest.digest)) assert DigestContents([FileContent("test.sh", b"# A shell script") ]) == digest_contents
def test_filesystem_specs_nonexistent_file(self) -> None: specs = FilesystemSpecs([FilesystemLiteralSpec("demo/fake.txt")]) with pytest.raises(ExecutionError) as exc: self.request_single_product( AddressesWithOrigins, Params(specs, create_options_bootstrapper()), ) assert 'Unmatched glob from file arguments: "demo/fake.txt"' in str(exc.value) ignore_errors_result = self.request_single_product( AddressesWithOrigins, Params(specs, create_options_bootstrapper(args=["--owners-not-found-behavior=ignore"])), ) assert not ignore_errors_result
def _do_test_extract_zip(self, compression) -> None: io = BytesIO() with zipfile.ZipFile(io, "w", compression=compression) as zf: for name, content in self.files.items(): zf.writestr(name, content) io.flush() input_snapshot = self.make_snapshot({"test.zip": io.getvalue()}) extracted_digest = self.request_single_product( ExtractedDigest, Params(MaybeExtractable(input_snapshot.digest))) digest_contents = self.request_single_product( DigestContents, Params(extracted_digest.digest)) assert self.expected_digest_contents == digest_contents
def assert_chroot(self, expected_files, expected_setup_kwargs, addr): chroot = self.request_single_product( SetupPyChroot, Params( SetupPyChrootRequest(ExportedTarget(self.tgt(addr)), py2=False), create_options_bootstrapper(), ), ) snapshot = self.request_single_product(Snapshot, Params(chroot.digest)) assert sorted(expected_files) == sorted(snapshot.files) kwargs = json.loads(chroot.setup_keywords_json) assert expected_setup_kwargs == kwargs
def test_target_adaptor_not_found(self) -> None: bootstrapper = create_options_bootstrapper() with pytest.raises(ExecutionError) as exc: self.request_single_product( TargetAdaptor, Params(Address("helloworld"), bootstrapper)) assert "Directory \\'helloworld\\' does not contain any BUILD files" in str( exc) self.add_to_build_file("helloworld", "mock_tgt(name='other_tgt')") expected_rx_str = re.escape( "'helloworld' was not found in namespace 'helloworld'. Did you mean one of:\n :other_tgt" ) with pytest.raises(ExecutionError, match=expected_rx_str): self.request_single_product( TargetAdaptor, Params(Address("helloworld"), bootstrapper))
def test_use_params(self): # Confirm that we can pass in Params in order to provide multiple inputs to an execution. a, b = A(), B() result_str = self.request_single_product(str, Params(a, b)) self.assertEqual(result_str, consumes_a_and_b(a, b)) # And confirm that a superset of Params is also accepted. result_str = self.request_single_product(str, Params(a, b, self)) self.assertEqual(result_str, consumes_a_and_b(a, b)) # But not a subset. expected_msg = "No installed @rules can compute {} given input Params(A), but".format( str.__name__) with self.assertRaisesRegex(Exception, re.escape(expected_msg)): self.request_single_product(str, Params(a))
def test_transitive_params(self): # Test that C can be provided and implicitly converted into a B with transitive_b_c() to satisfy # the selectors of consumes_a_and_b(). a, c = A(), C() result_str = self.request_single_product(str, Params(a, c)) self.assertEqual( remove_locations_from_traceback(result_str), remove_locations_from_traceback( consumes_a_and_b(a, transitive_b_c(c))), ) # Test that an inner Get in transitive_coroutine_rule() is able to resolve B from C due to the # existence of transitive_b_c(). with self.assertDoesNotRaise(): _ = self.request_single_product(D, Params(c))
def create_python_awslambda(self, addr: str) -> Tuple[str, bytes]: bootstrapper = create_options_bootstrapper(args=[ "--backend-packages=pants.backend.awslambda.python", "--source-root-patterns=src/python", ]) target = self.request_single_product( WrappedTarget, Params(Address.parse(addr), bootstrapper)).target created_awslambda = self.request_single_product( CreatedAWSLambda, Params(PythonAwsLambdaFieldSet.create(target), bootstrapper)) created_awslambda_digest_contents = self.request_single_product( DigestContents, created_awslambda.digest) assert len(created_awslambda_digest_contents) == 1 return created_awslambda.zip_file_relpath, created_awslambda_digest_contents[ 0].content
def run_mypy( self, targets: List[Target], *, config: Optional[str] = None, passthrough_args: Optional[str] = None, skip: bool = False, additional_args: Optional[List[str]] = None, ) -> Sequence[TypecheckResult]: args = list(self.global_args) if config: self.create_file(relpath="mypy.ini", contents=config) args.append("--mypy-config=mypy.ini") if passthrough_args: args.append(f"--mypy-args='{passthrough_args}'") if skip: args.append("--mypy-skip") if additional_args: args.extend(additional_args) result = self.request_single_product( TypecheckResults, Params( MyPyRequest(MyPyFieldSet.create(tgt) for tgt in targets), create_options_bootstrapper(args=args), ), ) return result.results
def test_map_third_party_modules_to_addresses(self) -> None: self.add_to_build_file( "3rdparty/python", dedent("""\ python_requirement_library( name='ansicolors', requirements=['ansicolors==1.21'], module_mapping={'ansicolors': ['colors']}, ) python_requirement_library( name='req1', requirements=['req1', 'two_owners'], ) python_requirement_library( name='un_normalized', requirements=['Un-Normalized-Project>3', 'two_owners'], ) """), ) result = self.request_single_product( ThirdPartyModuleToAddressMapping, Params(create_options_bootstrapper())) assert result.mapping == FrozenDict({ "colors": Address.parse("3rdparty/python:ansicolors"), "req1": Address.parse("3rdparty/python:req1"), "un_normalized_project": Address.parse("3rdparty/python:un_normalized"), })
def assert_injected( self, *, source_roots: List[str], original_declared_files: List[str], original_undeclared_files: List[str], expected_discovered: List[str], ) -> None: for f in original_undeclared_files: self.create_file(f, "# undeclared") request = AncestorFilesRequest( "__init__.py", self.make_snapshot( {fp: "# declared" for fp in original_declared_files}), ) bootstrapper = create_options_bootstrapper( args=[f"--source-root-patterns={source_roots}"]) result = self.request_single_product(AncestorFiles, Params(request, bootstrapper)).snapshot assert list(result.files) == sorted(expected_discovered) materialized_result = self.request_single_product( DigestContents, result.digest) for file_content in materialized_result: path = file_content.path if not path.endswith("__init__.py"): continue assert path in original_declared_files or path in expected_discovered expected = b"# declared" if path in original_declared_files else b"# undeclared" assert file_content.content == expected
def test_target_adaptor_parsed_correctly(self) -> None: self.add_to_build_file( "helloworld", dedent("""\ mock_tgt( fake_field=42, dependencies=[ # Because we don't follow dependencies or even parse dependencies, this # self-cycle should be fine. "helloworld", ":sibling", "helloworld/util", "helloworld/util:tests", ], ) """), ) addr = Address("helloworld") target_adaptor = self.request_single_product( TargetAdaptor, Params(addr, create_options_bootstrapper())) assert target_adaptor.name == "helloworld" assert target_adaptor.type_alias == "mock_tgt" assert target_adaptor.kwargs["dependencies"] == [ "helloworld", ":sibling", "helloworld/util", "helloworld/util:tests", ] # NB: TargetAdaptors do not validate what fields are valid. The Target API should error # when encountering this, but it's fine at this stage. assert target_adaptor.kwargs["fake_field"] == 42
def run_pylint( self, targets: List[Target], *, config: Optional[str] = None, passthrough_args: Optional[str] = None, skip: bool = False, additional_args: Optional[List[str]] = None, ) -> LintResults: args = [ "--backend-packages=pants.backend.python.lint.pylint", "--source-root-patterns=['src/python', 'tests/python']", ] if config: self.create_file(relpath="pylintrc", contents=config) args.append("--pylint-config=pylintrc") if passthrough_args: args.append(f"--pylint-args='{passthrough_args}'") if skip: args.append("--pylint-skip") if additional_args: args.extend(additional_args) return self.request_single_product( LintResults, Params( PylintRequest(PylintFieldSet.create(tgt) for tgt in targets), create_options_bootstrapper(args=args), ), )
def run_bandit( self, targets: List[Target], *, config: Optional[str] = None, passthrough_args: Optional[str] = None, skip: bool = False, additional_args: Optional[List[str]] = None, ) -> LintResults: args = ["--backend-packages=pants.backend.python.lint.bandit"] if config: self.create_file(relpath=".bandit", contents=config) args.append("--bandit-config=.bandit") if passthrough_args: args.append(f"--bandit-args={passthrough_args}") if skip: args.append("--bandit-skip") if additional_args: args.extend(additional_args) return self.request_single_product( LintResults, Params( BanditRequest(BanditFieldSet.create(tgt) for tgt in targets), create_options_bootstrapper(args=args), ), )
def run_pylint( self, targets: List[Target], *, config: Optional[str] = None, passthrough_args: Optional[str] = None, skip: bool = False, additional_args: Optional[List[str]] = None, ) -> Sequence[LintResult]: args = list(self.global_args) if config: self.create_file(relpath="pylintrc", contents=config) args.append("--pylint-config=pylintrc") if passthrough_args: args.append(f"--pylint-args='{passthrough_args}'") if skip: args.append("--pylint-skip") if additional_args: args.extend(additional_args) results = self.request_single_product( LintResults, Params( PylintRequest(PylintFieldSet.create(tgt) for tgt in targets), create_options_bootstrapper(args=args), ), ) return results.results
def make_target( self, source_files: List[FileContent], *, package: Optional[str] = None, name: str = "target", interpreter_constraints: Optional[str] = None, dependencies: Optional[List[Address]] = None, ) -> Target: if not package: package = self.package for source_file in source_files: self.create_file(source_file.path, source_file.content.decode()) source_globs = [ PurePath(source_file.path).name for source_file in source_files ] self.add_to_build_file( package, dedent(f"""\ python_library( name={repr(name)}, sources={source_globs}, dependencies={[str(dep) for dep in dependencies or ()]}, compatibility={repr(interpreter_constraints)}, ) """), ) return self.request_single_product( WrappedTarget, Params( Address(package, target_name=name), create_options_bootstrapper(args=self.global_args), ), ).target
def test_filter_field_sets(self) -> None: @dataclass(frozen=True) class MockFieldSet(FieldSet): sources: Sources # Another field to demo that we will preserve the whole FieldSet data structure. tags: Tags self.create_file("f1.txt") valid_addr = Address("", target_name="valid") valid_field_set = MockFieldSet( valid_addr, Sources(["f1.txt"], address=valid_addr), Tags(None, address=valid_addr) ) empty_addr = Address("", target_name="empty") empty_field_set = MockFieldSet( empty_addr, Sources(None, address=empty_addr), Tags(None, address=empty_addr) ) result = self.request_single_product( FieldSetsWithSources, Params( FieldSetsWithSourcesRequest([valid_field_set, empty_field_set]), create_options_bootstrapper(), ), ) assert tuple(result) == (valid_field_set,)
def test_transitive_targets_tolerates_subtarget_cycles(self) -> None: """For generated subtargets, we should tolerate cycles between targets. This only works with generated subtargets, so we use explicit file dependencies in this test. """ self.create_files("", ["dep.txt", "t1.txt", "t2.txt"]) self.add_to_build_file( "", dedent( """\ target(name='dep', sources=['dep.txt']) target(name='t1', sources=['t1.txt'], dependencies=['dep.txt:dep', 't2.txt:t2']) target(name='t2', sources=['t2.txt'], dependencies=['t1.txt:t1']) """ ), ) result = self.request_single_product( TransitiveTargets, Params(Addresses([Address("", target_name="t2")]), create_options_bootstrapper()), ) assert len(result.roots) == 1 assert result.roots[0].address == Address("", relative_file_path="t2.txt", target_name="t2") assert [tgt.address for tgt in result.dependencies] == [ Address("", relative_file_path="t1.txt", target_name="t1"), Address("", relative_file_path="dep.txt", target_name="dep"), Address("", relative_file_path="t2.txt", target_name="t2"), ]
def assert_requirements(self, expected_req_strs, addr): reqs = self.request_single_product( ExportedTargetRequirements, Params(DependencyOwner(ExportedTarget(self.tgt(addr))), create_options_bootstrapper()), ) assert sorted(expected_req_strs) == list(reqs)
def run_mypy( self, targets: List[TargetWithOrigin], *, config: Optional[str] = None, passthrough_args: Optional[str] = None, skip: bool = False, additional_args: Optional[List[str]] = None, ) -> TypecheckResults: args = [ "--backend-packages=pants.backend.python", "--backend-packages=pants.backend.python.typecheck.mypy", "--source-root-patterns=['src/python', 'tests/python']", ] if config: self.create_file(relpath="mypy.ini", contents=config) args.append("--mypy-config=mypy.ini") if passthrough_args: args.append(f"--mypy-args='{passthrough_args}'") if skip: args.append("--mypy-skip") if additional_args: args.extend(additional_args) return self.request_single_product( TypecheckResults, Params( MyPyRequest(MyPyFieldSet.create(tgt) for tgt in targets), create_options_bootstrapper(args=args), ), )
def _do_test_extract_tar(self, compression) -> None: io = BytesIO() mode = f"w:{compression}" if compression else "w" with tarfile.open(mode=mode, fileobj=io) as tf: for name, content in self.files.items(): tarinfo = tarfile.TarInfo(name) tarinfo.size = len(content) tf.addfile(tarinfo, BytesIO(content)) ext = f"tar.{compression}" if compression else "tar" input_snapshot = self.make_snapshot({f"test.{ext}": io.getvalue()}) extracted_digest = self.request_single_product( ExtractedDigest, Params(MaybeExtractable(input_snapshot.digest))) digest_contents = self.request_single_product( DigestContents, Params(extracted_digest.digest)) assert self.expected_digest_contents == digest_contents
def run_dep_inference( address: Address, *, enable_string_imports: bool = False) -> InferredDependencies: args = [ "--backend-packages=pants.backend.python", "--source-root-patterns=src/python" ] if enable_string_imports: args.append("--python-infer-string-imports") options_bootstrapper = create_options_bootstrapper(args=args) target = self.request_single_product( WrappedTarget, Params(address, options_bootstrapper)).target return self.request_single_product( InferredDependencies, Params(InferPythonDependencies(target[PythonSources]), options_bootstrapper), )
def assert_owned(self, owned: Iterable[str], exported: str): assert sorted(owned) == sorted( od.target.address.spec for od in self.request_single_product( OwnedDependencies, Params( DependencyOwner(ExportedTarget(self.tgt(exported))), create_options_bootstrapper(), ), ))
def assert_injected(deps_cls: Type[Dependencies], *, injected: List[Address]) -> None: provided_deps = ["//:provided"] if injected: provided_deps.append("!//:injected2") deps_field = deps_cls(provided_deps, address=Address("", target_name="target")) result = self.request_single_product( Addresses, Params(DependenciesRequest(deps_field), create_options_bootstrapper()) ) assert result == Addresses(sorted([*injected, Address("", target_name="provided")]))