def find_valid_field_sets( superclass: Type, specs: Iterable[Spec], *, no_applicable_behavior: NoApplicableTargetsBehavior = NoApplicableTargetsBehavior.ignore, expect_single_config: bool = False, ) -> TargetRootsToFieldSets: request = TargetRootsToFieldSetsRequest( superclass, goal_description="fake", no_applicable_targets_behavior=no_applicable_behavior, expect_single_field_set=expect_single_config, ) return rule_runner.request( TargetRootsToFieldSets, [ request, Specs( includes=RawSpecs.create( specs, convert_dir_literal_to_address_literal=True, description_of_origin="tests", ), ignores=RawSpecs(description_of_origin="tests"), ), ], )
async def infer_terraform_module_dependencies( request: InferTerraformModuleDependenciesRequest, ) -> InferredDependencies: hydrated_sources = await Get(HydratedSources, HydrateSourcesRequest(request.sources_field)) paths = OrderedSet( filename for filename in hydrated_sources.snapshot.files if filename.endswith(".tf") ) result = await Get( ProcessResult, ParseTerraformModuleSources( sources_digest=hydrated_sources.snapshot.digest, paths=tuple(paths), ), ) candidate_spec_paths = [line for line in result.stdout.decode("utf-8").split("\n") if line] # For each path, see if there is a `terraform_module` target at the specified spec_path. candidate_targets = await Get( Targets, RawSpecs( dir_globs=tuple(DirGlobSpec(path) for path in candidate_spec_paths), unmatched_glob_behavior=GlobMatchErrorBehavior.ignore, description_of_origin="the `terraform_module` dependency inference rule", ), ) # TODO: Need to either implement the standard ambiguous dependency logic or ban >1 terraform_module # per directory. terraform_module_addresses = [ tgt.address for tgt in candidate_targets if tgt.has_field(TerraformModuleSourcesField) ] return InferredDependencies(terraform_module_addresses)
def test_resolve_with_a_jar(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "BUILD": textwrap.dedent("""\ jvm_artifact( name="jeremy", group="jeremy", artifact="jeremy", version="4.13.2", jar="jeremy.jar", ) """), "jeremy.jar": "hello dave", }) targets = rule_runner.request(Targets, [ RawSpecs(recursive_globs=(RecursiveGlobSpec(""), ), description_of_origin="tests") ]) jeremy_target = targets[0] jar_field = jeremy_target[JvmArtifactJarSourceField] requirement = ArtifactRequirement( coordinate=Coordinate( group="jeremy", artifact="jeremy", version="4.13.2", ), jar=jar_field, ) resolved_lockfile = rule_runner.request( CoursierResolvedLockfile, [ArtifactRequirements([requirement])], ) coordinate = requirement.coordinate assert resolved_lockfile == CoursierResolvedLockfile( entries=(CoursierLockfileEntry( coord=Coordinate(group=coordinate.group, artifact=coordinate.artifact, version=coordinate.version), file_name= f"{coordinate.group}_{coordinate.artifact}_{coordinate.version}.jar", direct_dependencies=Coordinates([]), dependencies=Coordinates([]), file_digest=FileDigest( fingerprint= "55b9afa8d7776cd6c318eec51f506e9c7f66c247dcec343d4667f5f269714f86", serialized_bytes_length=10, ), pants_address=jar_field.address.spec, ), ))
def test_get_target_data(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "foo/BUILD": dedent("""\ target(name="bar", dependencies=[":baz"]) files(name="baz", sources=["*.txt"]) """), "foo/a.txt": "", "foo/b.txt": "", }) tds = rule_runner.request( TargetDatas, [ RawSpecs(recursive_globs=(RecursiveGlobSpec("foo"), ), description_of_origin="tests") ], ) assert list(tds) == [ TargetData( GenericTarget({"dependencies": [":baz"]}, Address("foo", target_name="bar")), None, ("foo/a.txt:baz", "foo/b.txt:baz"), ), TargetData( FilesGeneratorTarget({"sources": ["*.txt"]}, Address("foo", target_name="baz")), ("foo/a.txt", "foo/b.txt"), ("foo/a.txt:baz", "foo/b.txt:baz"), ), TargetData( FileTarget({"source": "a.txt"}, Address("foo", relative_file_path="a.txt", target_name="baz")), ("foo/a.txt", ), (), ), TargetData( FileTarget({"source": "b.txt"}, Address("foo", relative_file_path="b.txt", target_name="baz")), ("foo/b.txt", ), (), ), ]
def run(enable_resolves: bool) -> ExportResults: rule_runner.set_options( [ f"--python-interpreter-constraints=['=={current_interpreter}']", "--python-resolves={'a': 'lock.txt', 'b': 'lock.txt'}", f"--python-enable-resolves={enable_resolves}", # Turn off lockfile validation to make the test simpler. "--python-invalid-lockfile-behavior=ignore", ], env_inherit={"PATH", "PYENV_ROOT"}, ) targets = rule_runner.request( Targets, [ RawSpecs(recursive_globs=(RecursiveGlobSpec("src/foo"), ), description_of_origin="tests") ], ) all_results = rule_runner.request(ExportResults, [ExportVenvsRequest(targets)]) for result in all_results: assert len(result.post_processing_cmds) == 2 ppc0 = result.post_processing_cmds[0] assert ppc0.argv[1:] == ( # The first arg is the full path to the python interpreter, which we # don't easily know here, so we ignore it in this comparison. os.path.join("{digest_root}", ".", "pex"), os.path.join("{digest_root}", "requirements.pex"), "venv", "--pip", "--collisions-ok", "--remove=all", f"{{digest_root}}/{current_interpreter}", ) assert ppc0.extra_env == FrozenDict({"PEX_MODULE": "pex.tools"}) ppc1 = result.post_processing_cmds[1] assert ppc1.argv == ( "rm", "-f", os.path.join("{digest_root}", ".", "pex"), ) assert ppc1.extra_env == FrozenDict() return all_results
def test_specs_to_dirs() -> None: assert specs_to_dirs(RawSpecs(description_of_origin="tests")) == ("", ) assert specs_to_dirs( RawSpecs(address_literals=(AddressLiteralSpec("src/python/foo"), ), description_of_origin="tests")) == ("src/python/foo", ) assert specs_to_dirs( RawSpecs(dir_literals=(DirLiteralSpec("src/python/foo"), ), description_of_origin="tests")) == ("src/python/foo", ) assert specs_to_dirs( RawSpecs( address_literals=( AddressLiteralSpec("src/python/foo"), AddressLiteralSpec("src/python/bar"), ), description_of_origin="tests", )) == ("src/python/foo", "src/python/bar") with pytest.raises(ValueError): specs_to_dirs( RawSpecs(file_literals=(FileLiteralSpec("src/python/foo.py"), ), description_of_origin="tests")) with pytest.raises(ValueError): specs_to_dirs( RawSpecs( address_literals=(AddressLiteralSpec("src/python/bar", "tgt"), ), description_of_origin="tests", )) with pytest.raises(ValueError): specs_to_dirs( RawSpecs( address_literals=(AddressLiteralSpec( "src/python/bar", target_component=None, generated_component="gen"), ), description_of_origin="tests", ))
async def find_nearest_go_mod(request: OwningGoModRequest) -> OwningGoMod: # We don't expect `go_mod` targets to be generated, so we can use UnexpandedTargets. candidate_targets = await Get( UnexpandedTargets, RawSpecs( ancestor_globs=(AncestorGlobSpec(request.address.spec_path), ), description_of_origin="the `OwningGoMod` rule", ), ) # Sort by address.spec_path in descending order so the nearest go_mod target is sorted first. go_mod_targets = sorted( (tgt for tgt in candidate_targets if tgt.has_field(GoModSourcesField)), key=lambda tgt: tgt.address.spec_path, reverse=True, ) if not go_mod_targets: raise InvalidTargetException( f"The target {request.address} does not have a `go_mod` target in its BUILD file or " "any ancestor BUILD files. To fix, please make sure your project has a `go.mod` file " f"and add a `go_mod` target (you can run `{bin_name()} tailor` to do this)." ) nearest_go_mod_target = go_mod_targets[0] return OwningGoMod(nearest_go_mod_target.address)
async def restrict_conflicting_sources( ptgt: PutativeTarget) -> DisjointSourcePutativeTarget: source_paths = await Get( Paths, PathGlobs( SourcesField.prefix_glob_with_dirpath(ptgt.path, glob) for glob in ptgt.owned_sources), ) source_path_set = set(source_paths.files) source_dirs = {os.path.dirname(path) for path in source_path_set} possible_owners = await Get( UnexpandedTargets, RawSpecs( ancestor_globs=tuple(AncestorGlobSpec(d) for d in source_dirs), description_of_origin="the `tailor` goal", ), ) possible_owners_sources = await MultiGet( Get(SourcesPaths, SourcesPathsRequest(t.get(SourcesField))) for t in possible_owners) conflicting_targets = [] for tgt, sources in zip(possible_owners, possible_owners_sources): if source_path_set.intersection(sources.files): conflicting_targets.append(tgt) if conflicting_targets: conflicting_addrs = sorted(tgt.address.spec for tgt in conflicting_targets) explicit_srcs_str = ", ".join(ptgt.kwargs.get("sources") or []) # type: ignore[arg-type] orig_sources_str = (f"[{explicit_srcs_str}]" if explicit_srcs_str else f"the default for {ptgt.type_alias}") ptgt = ptgt.restrict_sources().add_comments([ f"# NOTE: Sources restricted from {orig_sources_str} due to conflict with" ] + [f"# - {caddr}" for caddr in conflicting_addrs]) return DisjointSourcePutativeTarget(ptgt)
async def find_putative_targets( req: PutativePythonTargetsRequest, all_owned_sources: AllOwnedSources, python_setup: PythonSetup, ) -> PutativeTargets: pts = [] if python_setup.tailor_source_targets: # Find library/test/test_util targets. all_py_files_globs: PathGlobs = req.path_globs("*.py", "*.pyi") all_py_files = await Get(Paths, PathGlobs, all_py_files_globs) unowned_py_files = set(all_py_files.files) - set(all_owned_sources) classified_unowned_py_files = classify_source_files(unowned_py_files) for tgt_type, paths in classified_unowned_py_files.items(): for dirname, filenames in group_by_dir(paths).items(): name: str | None if issubclass(tgt_type, PythonTestsGeneratorTarget): name = "tests" elif issubclass(tgt_type, PythonTestUtilsGeneratorTarget): name = "test_utils" else: name = None if (python_setup.tailor_ignore_solitary_init_files and tgt_type == PythonSourcesGeneratorTarget and filenames == {"__init__.py"}): continue pts.append( PutativeTarget.for_target_type( tgt_type, path=dirname, name=name, triggering_sources=sorted(filenames))) if python_setup.tailor_requirements_targets: # Find requirements files. ( all_requirements_files, all_pipenv_lockfile_files, all_pyproject_toml_contents, ) = await MultiGet( Get(DigestContents, PathGlobs, req.path_globs("*requirements*.txt")), Get(DigestContents, PathGlobs, req.path_globs("Pipfile.lock")), Get(DigestContents, PathGlobs, req.path_globs("pyproject.toml")), ) def add_req_targets(files: Iterable[FileContent], alias: str, target_name: str) -> None: contents = {i.path: i.content for i in files} unowned_files = set(contents) - set(all_owned_sources) for fp in unowned_files: path, name = os.path.split(fp) try: validate(fp, contents[fp], alias) except Exception as e: logger.warning( f"An error occurred when validating `{fp}`: {e}.\n\n" "You'll need to create targets for its contents manually.\n" "To silence this error in future, see " "https://www.pantsbuild.org/docs/reference-tailor#section-ignore-paths \n" ) continue pts.append( PutativeTarget( path=path, name=target_name, type_alias=alias, triggering_sources=[fp], owned_sources=[name], kwargs=({} if alias != "python_requirements" or name == "requirements.txt" else { "source": name }), )) def validate(path: str, contents: bytes, alias: str) -> None: if alias == "python_requirements": return validate_python_requirements(path, contents) elif alias == "pipenv_requirements": return validate_pipenv_requirements(contents) elif alias == "poetry_requirements": return validate_poetry_requirements(contents) def validate_python_requirements(path: str, contents: bytes) -> None: for _ in parse_requirements_file(contents.decode(), rel_path=path): pass def validate_pipenv_requirements(contents: bytes) -> None: parse_pipenv_requirements(contents) def validate_poetry_requirements(contents: bytes) -> None: p = PyProjectToml(PurePath(), PurePath(), contents.decode()) parse_pyproject_toml(p) add_req_targets(all_requirements_files, "python_requirements", "reqs") add_req_targets(all_pipenv_lockfile_files, "pipenv_requirements", "pipenv") add_req_targets( { fc for fc in all_pyproject_toml_contents if b"[tool.poetry" in fc.content }, "poetry_requirements", "poetry", ) if python_setup.tailor_pex_binary_targets: # Find binary targets. # Get all files whose content indicates that they are entry points or are __main__.py files. digest_contents = await Get(DigestContents, PathGlobs, all_py_files_globs) all_main_py = await Get(Paths, PathGlobs, req.path_globs("__main__.py")) entry_points = [ file_content.path for file_content in digest_contents if is_entry_point(file_content.content) ] + list(all_main_py.files) # Get the modules for these entry points. src_roots = await Get(SourceRootsResult, SourceRootsRequest, SourceRootsRequest.for_files(entry_points)) module_to_entry_point = {} for entry_point in entry_points: entry_point_path = PurePath(entry_point) src_root = src_roots.path_to_root[entry_point_path] stripped_entry_point = entry_point_path.relative_to(src_root.path) module = module_from_stripped_path(stripped_entry_point) module_to_entry_point[module] = entry_point # Get existing binary targets for these entry points. entry_point_dirs = { os.path.dirname(entry_point) for entry_point in entry_points } possible_existing_binary_targets = await Get( UnexpandedTargets, RawSpecs( ancestor_globs=tuple( AncestorGlobSpec(d) for d in entry_point_dirs), description_of_origin="the `pex_binary` tailor rule", ), ) possible_existing_binary_entry_points = await MultiGet( Get(ResolvedPexEntryPoint, ResolvePexEntryPointRequest(t[PexEntryPointField])) for t in possible_existing_binary_targets if t.has_field(PexEntryPointField)) possible_existing_entry_point_modules = { rep.val.module for rep in possible_existing_binary_entry_points if rep.val } unowned_entry_point_modules = (module_to_entry_point.keys() - possible_existing_entry_point_modules) # Generate new targets for entry points that don't already have one. for entry_point_module in unowned_entry_point_modules: entry_point = module_to_entry_point[entry_point_module] path, fname = os.path.split(entry_point) name = os.path.splitext(fname)[0] pts.append( PutativeTarget.for_target_type( target_type=PexBinary, path=path, name=name, triggering_sources=tuple(), kwargs={"entry_point": fname}, )) return PutativeTargets(pts)
async def get_exporting_owner(owned_dependency: OwnedDependency) -> ExportedTarget: """Find the exported target that owns the given target (and therefore exports it). The owner of T (i.e., the exported target in whose artifact T's code is published) is: 1. An exported target that depends on T (or is T itself). 2. Is T's closest filesystem ancestor among those satisfying 1. If there are multiple such exported targets at the same degree of ancestry, the ownership is ambiguous and an error is raised. If there is no exported target that depends on T and is its ancestor, then there is no owner and an error is raised. """ target = owned_dependency.target ancestor_addrs = AncestorGlobSpec(target.address.spec_path) ancestor_tgts = await Get( Targets, RawSpecs( ancestor_globs=(ancestor_addrs,), description_of_origin="the `python_distribution` `package` rules", ), ) # Note that addresses sort by (spec_path, target_name), and all these targets are # ancestors of the given target, i.e., their spec_paths are all prefixes. So sorting by # address will effectively sort by closeness of ancestry to the given target. exported_ancestor_tgts = sorted( (t for t in ancestor_tgts if t.has_field(PythonProvidesField)), key=lambda t: t.address, reverse=True, ) exported_ancestor_iter = iter(exported_ancestor_tgts) for exported_ancestor in exported_ancestor_iter: transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest([exported_ancestor.address]) ) if target in transitive_targets.closure: owner = exported_ancestor # Find any exported siblings of owner that also depend on target. They have the # same spec_path as it, so they must immediately follow it in ancestor_iter. sibling_owners = [] sibling = next(exported_ancestor_iter, None) while sibling and sibling.address.spec_path == owner.address.spec_path: transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest([sibling.address]) ) if target in transitive_targets.closure: sibling_owners.append(sibling) sibling = next(exported_ancestor_iter, None) if sibling_owners: all_owners = [exported_ancestor] + sibling_owners raise AmbiguousOwnerError( softwrap( f""" Found multiple sibling python_distribution targets that are the closest ancestor dependees of {target.address} and are therefore candidates to own it: {', '.join(o.address.spec for o in all_owners)}. Only a single such owner is allowed, to avoid ambiguity. """ ) ) return ExportedTarget(owner) raise NoOwnerError( softwrap( f""" No python_distribution target found to own {target.address}. Note that the owner must be in or above the owned target's directory, and must depend on it (directly or indirectly). """ ) )
def calculate_specs( options_bootstrapper: OptionsBootstrapper, options: Options, session: SchedulerSession, ) -> Specs: """Determine the specs for a given Pants run.""" global_options = options.for_global_scope() unmatched_cli_globs = global_options.unmatched_cli_globs.to_glob_match_error_behavior( ) convert_dir_literal_to_address_literal = ( global_options.use_deprecated_directory_cli_args_semantics) if global_options.is_default( "use_deprecated_directory_cli_args_semantics"): warn_or_error( "2.14.0.dev0", "`use_deprecated_directory_cli_args_semantics` defaulting to True", softwrap(f""" Currently, a directory argument like `{bin_name()} test dir` is shorthand for the target `dir:dir`, i.e. the target that leaves off `name=`. In Pants 2.14, by default, a directory argument will instead match all targets/files in the directory. To opt into the new and more intuitive semantics early, set `use_deprecated_directory_cli_args_semantics = false` in the `[GLOBAL]` section in `pants.toml`. Otherwise, set to `true` to silence this warning. """), ) specs = SpecsParser().parse_specs( options.specs, description_of_origin="CLI arguments", unmatched_glob_behavior=unmatched_cli_globs, convert_dir_literal_to_address_literal= convert_dir_literal_to_address_literal, ) changed_options = ChangedOptions.from_options(options.for_scope("changed")) logger.debug("specs are: %s", specs) logger.debug("changed_options are: %s", changed_options) if specs and changed_options.provided: changed_name = "--changed-since" if changed_options.since else "--changed-diffspec" specs_description = specs.arguments_provided_description() assert specs_description is not None raise InvalidSpecConstraint( f"You used `{changed_name}` at the same time as using {specs_description}. You can " f"only use `{changed_name}` or use normal arguments.") if not changed_options.provided: return specs (git_binary, ) = session.product_request(GitBinary, [Params(GitBinaryRequest())]) (maybe_git_worktree, ) = session.product_request( MaybeGitWorktree, [Params(GitWorktreeRequest(), git_binary)]) if not maybe_git_worktree.git_worktree: raise InvalidSpecConstraint( "The `--changed-*` options are only available if Git is used for the repository." ) changed_files = tuple( changed_options.changed_files(maybe_git_worktree.git_worktree)) file_literal_specs = tuple(FileLiteralSpec(f) for f in changed_files) changed_request = ChangedRequest(changed_files, changed_options.dependees) (changed_addresses, ) = session.product_request( ChangedAddresses, [Params(changed_request, options_bootstrapper)]) logger.debug("changed addresses: %s", changed_addresses) address_literal_specs = [] for address in cast(ChangedAddresses, changed_addresses): address_input = AddressInput.parse( address.spec, description_of_origin="`--changed-since`") address_literal_specs.append( AddressLiteralSpec( path_component=address_input.path_component, target_component=address_input.target_component, generated_component=address_input.generated_component, parameters=address_input.parameters, )) return Specs( includes=RawSpecs( # We need both address_literals and file_literals to cover all our edge cases, including # target-aware vs. target-less goals, e.g. `list` vs `count-loc`. address_literals=tuple(address_literal_specs), file_literals=file_literal_specs, unmatched_glob_behavior=unmatched_cli_globs, filter_by_global_options=True, from_change_detection=True, description_of_origin="`--changed-since`", ), ignores=RawSpecs(description_of_origin="`--changed-since`"), )
async def find_putative_go_targets( request: PutativeGoTargetsRequest, all_owned_sources: AllOwnedSources, golang_subsystem: GolangSubsystem, ) -> PutativeTargets: putative_targets = [] _all_go_mod_paths = await Get(Paths, PathGlobs, request.path_globs("go.mod")) all_go_mod_files = set(_all_go_mod_paths.files) all_go_mod_dirs = {os.path.dirname(fp) for fp in all_go_mod_files} if golang_subsystem.tailor_go_mod_targets: unowned_go_mod_files = all_go_mod_files - set(all_owned_sources) for dirname, filenames in group_by_dir(unowned_go_mod_files).items(): putative_targets.append( PutativeTarget.for_target_type( GoModTarget, path=dirname, name=None, triggering_sources=sorted(filenames), )) if golang_subsystem.tailor_package_targets: all_go_files = await Get(Paths, PathGlobs, request.path_globs("*.go")) unowned_go_files = set(all_go_files.files) - set(all_owned_sources) for dirname, filenames in group_by_dir(unowned_go_files).items(): # Ignore paths that have `testdata` or `vendor` in them. # From `go help packages`: Note, however, that a directory named vendor that itself # contains code is not a vendored package: cmd/vendor would be a command named vendor. dirname_parts = PurePath(dirname).parts if "testdata" in dirname_parts or "vendor" in dirname_parts[0:-1]: continue if not has_go_mod_ancestor(dirname, all_go_mod_dirs): continue putative_targets.append( PutativeTarget.for_target_type( GoPackageTarget, path=dirname, name=None, triggering_sources=sorted(filenames), )) if golang_subsystem.tailor_binary_targets: all_go_files_digest_contents = await Get(DigestContents, PathGlobs, request.path_globs("*.go")) main_package_dirs = [] for file_content in all_go_files_digest_contents: dirname = os.path.dirname(file_content.path) if has_package_main(file_content.content) and has_go_mod_ancestor( dirname, all_go_mod_dirs): main_package_dirs.append(dirname) existing_targets = await Get( UnexpandedTargets, RawSpecs( ancestor_globs=tuple( AncestorGlobSpec(d) for d in main_package_dirs), description_of_origin="the `go_binary` tailor rule", ), ) owned_main_packages = await MultiGet( Get(GoBinaryMainPackage, GoBinaryMainPackageRequest(t[GoBinaryMainPackageField])) for t in existing_targets if t.has_field(GoBinaryMainPackageField)) unowned_main_package_dirs = set(main_package_dirs) - { # NB: We assume the `go_package` lives in the directory it's defined, which we validate # by e.g. banning `**` in its sources field. pkg.address.spec_path for pkg in owned_main_packages } putative_targets.extend( PutativeTarget.for_target_type( GoBinaryTarget, path=main_pkg_dir, name="bin", triggering_sources=tuple(), ) for main_pkg_dir in unowned_main_package_dirs) return PutativeTargets(putative_targets)
def test_filtered_targets(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "addr_specs/f1.txt": "", "addr_specs/f2.txt": "", "addr_specs/BUILD": dedent("""\ file_generator( sources=["*.txt"], tags=["a"], overrides={"f2.txt": {"tags": ["b"]}}, ) nonfile_generator(name="nonfile", tags=["b"]) target(name='t', tags=["a"]) """), "fs_specs/f1.txt": "", "fs_specs/f2.txt": "", "fs_specs/BUILD": dedent("""\ file_generator( sources=["*.txt"], tags=["a"], overrides={"f2.txt": {"tags": ["b"]}}, ) target(name='t', sources=["f1.txt"], tags=["a"]) """), }) specs = RawSpecs( recursive_globs=(RecursiveGlobSpec("addr_specs"), ), file_globs=(FileGlobSpec("fs_specs/*.txt"), ), filter_by_global_options=True, description_of_origin="tests", ) def check(tags_option: str | None, expected: set[Address]) -> None: if tags_option: rule_runner.set_options([f"--tag={tags_option}"]) addresses = rule_runner.request(Addresses, [specs]) result = rule_runner.request(FilteredTargets, [addresses]) assert {t.address for t in result} == expected addr_f1 = Address("addr_specs", relative_file_path="f1.txt") addr_f2 = Address("addr_specs", relative_file_path="f2.txt") addr_gen = Address("addr_specs", target_name="nonfile", generated_name="gen") addr_direct = Address("addr_specs", target_name="t") fs_f1 = Address("fs_specs", relative_file_path="f1.txt") fs_f2 = Address("fs_specs", relative_file_path="f2.txt") fs_direct = Address("fs_specs", target_name="t") all_a_tags = {addr_f1, addr_direct, fs_f1, fs_direct} all_b_tags = {addr_gen, addr_f2, fs_f2} check(None, {*all_a_tags, *all_b_tags}) check("a", all_a_tags) check("b", all_b_tags) check("-a", all_b_tags) check("-b", all_a_tags)
def test_no_applicable_targets_exception() -> None: # Check that we correctly render the error message. class Tgt1(Target): alias = "tgt1" core_fields = () class Tgt2(Target): alias = "tgt2" core_fields = (MultipleSourcesField, ) class Tgt3(Target): alias = "tgt3" core_fields = () # No targets/files specified. Because none of the relevant targets have a sources field, we do # not give the filedeps command. exc = NoApplicableTargetsException( [], Specs.empty(), UnionMembership({}), applicable_target_types=[Tgt1], goal_description="the `foo` goal", ) remedy = ( "Please specify relevant file and/or target arguments. Run `./pants " "--filter-target-type=tgt1 list ::` to find all applicable targets in your project." ) assert (dedent(f"""\ No files or targets specified. The `foo` goal works with these target types: * tgt1 {remedy}""") in str(exc)) invalid_tgt = Tgt3({}, Address("blah")) exc = NoApplicableTargetsException( [invalid_tgt], Specs( includes=RawSpecs(file_literals=(FileLiteralSpec("foo.ext"), ), description_of_origin="tests"), ignores=RawSpecs(description_of_origin="tests"), ), UnionMembership({}), applicable_target_types=[Tgt1, Tgt2], goal_description="the `foo` goal", ) remedy = ( "Please specify relevant file and/or target arguments. Run `./pants " "--filter-target-type=tgt1,tgt2 list ::` to find all applicable targets in your project, " "or run `./pants --filter-target-type=tgt1,tgt2 filedeps ::` to find all " "applicable files.") assert (dedent(f"""\ No applicable files or targets matched. The `foo` goal works with these target types: * tgt1 * tgt2 However, you only specified file arguments with these target types: * tgt3 {remedy}""") in str(exc)) # Test handling of `Specs`. exc = NoApplicableTargetsException( [invalid_tgt], Specs( includes=RawSpecs(address_literals=(AddressLiteralSpec( "foo", "bar"), ), description_of_origin="tests"), ignores=RawSpecs(description_of_origin="tests"), ), UnionMembership({}), applicable_target_types=[Tgt1], goal_description="the `foo` goal", ) assert "However, you only specified target arguments with these target types:" in str( exc) exc = NoApplicableTargetsException( [invalid_tgt], Specs( includes=RawSpecs( address_literals=(AddressLiteralSpec("foo", "bar"), ), file_literals=(FileLiteralSpec("foo.ext"), ), description_of_origin="tests", ), ignores=RawSpecs(description_of_origin="tests"), ), UnionMembership({}), applicable_target_types=[Tgt1], goal_description="the `foo` goal", ) assert "However, you only specified target and file arguments with these target types:" in str( exc)
async def determine_main_pkg_for_go_binary( request: GoBinaryMainPackageRequest, ) -> GoBinaryMainPackage: addr = request.field.address if request.field.value: description_of_origin = ( f"the `{request.field.alias}` field from the target {request.field.address}" ) specified_address = await Get( Address, AddressInput, AddressInput.parse( request.field.value, relative_to=addr.spec_path, description_of_origin=description_of_origin, ), ) wrapped_specified_tgt = await Get( WrappedTarget, WrappedTargetRequest(specified_address, description_of_origin=description_of_origin), ) if not wrapped_specified_tgt.target.has_field(GoPackageSourcesField): raise InvalidFieldException( f"The {repr(GoBinaryMainPackageField.alias)} field in target {addr} must point to " "a `go_package` target, but was the address for a " f"`{wrapped_specified_tgt.target.alias}` target.\n\n" "Hint: you should normally not specify this field so that Pants will find the " "`go_package` target for you." ) return GoBinaryMainPackage(wrapped_specified_tgt.target.address) candidate_targets = await Get( Targets, RawSpecs( dir_globs=(DirGlobSpec(addr.spec_path),), description_of_origin="the `go_binary` dependency inference rule", ), ) relevant_pkg_targets = [ tgt for tgt in candidate_targets if tgt.has_field(GoPackageSourcesField) and tgt.residence_dir == addr.spec_path ] if len(relevant_pkg_targets) == 1: return GoBinaryMainPackage(relevant_pkg_targets[0].address) if not relevant_pkg_targets: raise ResolveError( f"The target {addr} requires that there is a `go_package` " f"target defined in its directory {addr.spec_path}, but none were found.\n\n" "To fix, add a target like `go_package()` or `go_package(name='pkg')` to the BUILD " f"file in {addr.spec_path}." ) raise ResolveError( f"There are multiple `go_package` targets for the same directory of the " f"target {addr}: {addr.spec_path}. It is ambiguous what to use as the `main` " "package.\n\n" f"To fix, please either set the `main` field for `{addr} or remove these " "`go_package` targets so that only one remains: " f"{sorted(tgt.address.spec for tgt in relevant_pkg_targets)}" )