def _set_start_time(self, start_time: float) -> None: self._run_tracker.start(self.options, run_start_time=start_time) spec_parser = SpecsParser(get_buildroot()) specs = [str(spec_parser.parse_spec(spec)) for spec in self.options.specs] # Note: This will not include values from `--changed-*` flags. self._run_tracker.run_info.add_info("specs_from_command_line", specs, stringify=False)
def run(self, start_time: float) -> ExitCode: with maybe_profiled(self.profile_path): spec_parser = SpecsParser() specs = [] for spec_str in self.options.specs: spec, is_ignore = spec_parser.parse_spec(spec_str) specs.append(f"-{spec}" if is_ignore else str(spec)) self.run_tracker.start(run_start_time=start_time, specs=specs) global_options = self.options.for_global_scope() streaming_reporter = StreamingWorkunitHandler( self.graph_session.scheduler_session, run_tracker=self.run_tracker, specs=self.specs, options_bootstrapper=self.options_bootstrapper, callbacks=self._get_workunits_callbacks(), report_interval_seconds=global_options. streaming_workunits_report_interval, allow_async_completion=( global_options.pantsd and global_options.streaming_workunits_complete_async), max_workunit_verbosity=global_options. streaming_workunits_level, ) with streaming_reporter: engine_result = PANTS_FAILED_EXIT_CODE try: engine_result = self._run_inner() finally: metrics = self.graph_session.scheduler_session.metrics() self.run_tracker.set_pantsd_scheduler_metrics(metrics) self.run_tracker.end_run(engine_result) return engine_result
def assert_resolved(specs: Iterable[str], expected: set[str]) -> None: specs_obj = SpecsParser().parse_specs( specs, convert_dir_literal_to_address_literal=False, description_of_origin="tests") result = rule_runner.request(Addresses, [specs_obj]) assert {addr.spec for addr in result} == expected
def run(self, start_time: float) -> ExitCode: with maybe_profiled(self.profile_path): spec_parser = SpecsParser(get_buildroot()) specs = [ str(spec_parser.parse_spec(spec)) for spec in self.options.specs ] self.run_tracker.start(run_start_time=start_time, specs=specs) global_options = self.options.for_global_scope() streaming_reporter = StreamingWorkunitHandler( self.graph_session.scheduler_session, run_tracker=self.run_tracker, specs=self.specs, options_bootstrapper=self.options_bootstrapper, callbacks=self._get_workunits_callbacks(), report_interval_seconds=global_options. streaming_workunits_report_interval, pantsd=global_options.pantsd, ) with streaming_reporter: engine_result = PANTS_FAILED_EXIT_CODE try: engine_result = self._run_inner() finally: metrics = self.graph_session.scheduler_session.metrics() self.run_tracker.set_pantsd_scheduler_metrics(metrics) self.run_tracker.end_run(engine_result) return engine_result
def run_goal_rule( self, goal: Type[Goal], *, global_args: Optional[Iterable[str]] = None, args: Optional[Iterable[str]] = None, env: Optional[Mapping[str, str]] = None, ) -> GoalRuleResult: options_bootstrapper = create_options_bootstrapper( args=(*(global_args or []), goal.name, *(args or [])), env=env, ) raw_specs = options_bootstrapper.get_full_options([ *GlobalOptions.known_scope_infos(), *goal.subsystem_cls.known_scope_infos() ]).specs specs = SpecsParser(self.build_root).parse_specs(raw_specs) stdout, stderr = StringIO(), StringIO() console = Console(stdout=stdout, stderr=stderr) exit_code = self.scheduler.run_goal_rule( goal, Params( specs, console, options_bootstrapper, Workspace(self.scheduler), InteractiveRunner(self.scheduler), ), ) console.flush() return GoalRuleResult(exit_code, stdout.getvalue(), stderr.getvalue())
def test_streaming_workunits_expanded_specs(run_tracker: RunTracker) -> None: rule_runner = RuleRunner( target_types=[PythonLibrary], rules=[ QueryRule(ProcessResult, (Process,)), ], ) rule_runner.set_options(["--backend-packages=pants.backend.python"]) rule_runner.create_file("src/python/somefiles/BUILD", "python_library()") rule_runner.create_file("src/python/somefiles/a.py", "print('')") rule_runner.create_file("src/python/somefiles/b.py", "print('')") rule_runner.create_file("src/python/others/BUILD", "python_library()") rule_runner.create_file("src/python/others/a.py", "print('')") rule_runner.create_file("src/python/others/b.py", "print('')") specs = SpecsParser(get_buildroot()).parse_specs( ["src/python/somefiles::", "src/python/others/b.py"] ) class Callback(WorkunitsCallback): @property def can_finish_async(self) -> bool: return False def __call__(self, **kwargs) -> None: context = kwargs["context"] assert isinstance(context, StreamingWorkunitContext) expanded = context.get_expanded_specs() targets = expanded.targets assert len(targets.keys()) == 2 assert targets["src/python/others/b.py"] == [ TargetInfo(filename="src/python/others/b.py") ] assert set(targets["src/python/somefiles"]) == { TargetInfo(filename="src/python/somefiles/a.py"), TargetInfo(filename="src/python/somefiles/b.py"), } handler = StreamingWorkunitHandler( scheduler=rule_runner.scheduler, run_tracker=run_tracker, callbacks=[Callback()], report_interval_seconds=0.01, max_workunit_verbosity=LogLevel.INFO, specs=specs, options_bootstrapper=create_options_bootstrapper( ["--backend-packages=pants.backend.python"] ), pantsd=False, ) stdout_process = Process( argv=("/bin/bash", "-c", "/bin/echo 'stdout output'"), description="Stdout process" ) with handler: rule_runner.request(ProcessResult, [stdout_process])
def run_goal_rule( self, goal: Type[Goal], *, global_args: Iterable[str] | None = None, args: Iterable[str] | None = None, env: Mapping[str, str] | None = None, env_inherit: set[str] | None = None, ) -> GoalRuleResult: merged_args = (*(global_args or []), goal.name, *(args or [])) self.set_options(merged_args, env=env, env_inherit=env_inherit) raw_specs = self.options_bootstrapper.full_options_for_scopes([ GlobalOptions.get_scope_info(), goal.subsystem_cls.get_scope_info() ]).specs specs = SpecsParser(self.build_root).parse_specs(raw_specs) stdout, stderr = StringIO(), StringIO() console = Console(stdout=stdout, stderr=stderr) exit_code = self.scheduler.run_goal_rule( goal, Params( specs, console, Workspace(self.scheduler), InteractiveRunner(self.scheduler), ), ) console.flush() return GoalRuleResult(exit_code, stdout.getvalue(), stderr.getvalue())
def run(self, start_time: float) -> ExitCode: spec_parser = SpecsParser(get_buildroot()) specs = [str(spec_parser.parse_spec(spec)) for spec in self.options.specs] self.run_tracker.start(run_start_time=start_time, specs=specs) with maybe_profiled(self.profile_path): global_options = self.options.for_global_scope() if self.options.help_request: return self._print_help(self.options.help_request) streaming_reporter = StreamingWorkunitHandler( self.graph_session.scheduler_session, run_tracker=self.run_tracker, callbacks=self._get_workunits_callbacks(), report_interval_seconds=global_options.streaming_workunits_report_interval, ) goals = tuple(self.options.goals) with streaming_reporter.session(): if not goals: return PANTS_SUCCEEDED_EXIT_CODE engine_result = PANTS_FAILED_EXIT_CODE try: engine_result = self._perform_run(goals) except Exception as e: ExceptionSink.log_exception(e) metrics = self.graph_session.scheduler_session.metrics() self.run_tracker.set_pantsd_scheduler_metrics(metrics) self.run_tracker.end_run(engine_result) return engine_result
async def parse_one_bsp_mapping( request: _ParseOneBSPMappingRequest) -> BSPBuildTargetInternal: specs_parser = SpecsParser() specs = specs_parser.parse_specs( request.definition.addresses, description_of_origin=f"the BSP mapping {request.name}", convert_dir_literal_to_address_literal=False, ).includes return BSPBuildTargetInternal(request.name, specs, request.definition)
def assert_paths(specs: Iterable[str], expected_files: set[str], expected_dirs: set[str]) -> None: specs_obj = SpecsParser().parse_specs( specs, convert_dir_literal_to_address_literal=False, description_of_origin="tests") result = rule_runner.request(SpecsPaths, [specs_obj]) assert set(result.files) == expected_files assert set(result.dirs) == expected_dirs
def calculate_specs( options_bootstrapper: OptionsBootstrapper, options: Options, session: SchedulerSession, *, build_root: Optional[str] = None, ) -> Specs: """Determine the specs for a given Pants run.""" build_root = build_root or get_buildroot() specs = SpecsParser(build_root).parse_specs(options.specs) changed_options = ChangedOptions.from_options(options.for_scope("changed")) logger.debug("specs are: %s", specs) logger.debug("changed_options are: %s", changed_options) if specs.provided and changed_options.provided: changed_name = "--changed-since" if changed_options.since else "--changed-diffspec" if specs.filesystem_specs and specs.address_specs: specs_description = "target and file arguments" elif specs.filesystem_specs: specs_description = "file arguments" else: specs_description = "target arguments" raise InvalidSpecConstraint( f"You used `{changed_name}` at the same time as using {specs_description}. Please " "use only one.") if not changed_options.provided: return specs git = get_git() if not git: raise InvalidSpecConstraint( "The `--changed-*` options are only available if Git is used for the repository." ) changed_request = ChangedRequest( sources=tuple(changed_options.changed_files(git)), dependees=changed_options.dependees, ) (changed_addresses, ) = session.product_request( ChangedAddresses, [Params(changed_request, options_bootstrapper)]) logger.debug("changed addresses: %s", changed_addresses) address_specs = [] for address in cast(ChangedAddresses, changed_addresses): address_input = AddressInput.parse(address.spec) address_specs.append( AddressLiteralSpec( path_component=address_input.path_component, # NB: AddressInput.target_component may be None, but AddressLiteralSpec expects a # string. target_component=address_input.target_component or address.target_name, )) return Specs(AddressSpecs(address_specs, filter_by_global_options=True), FilesystemSpecs([]))
def assert_spec_parsed(build_root: Path, spec_str: str, expected_spec: Spec) -> None: parser = SpecsParser(str(build_root)) spec, is_ignore = parser.parse_spec(spec_str) assert isinstance(spec, type(expected_spec)) assert spec == expected_spec assert is_ignore is False # Check ignores are also parsed correctly. spec, is_ignore = parser.parse_spec(f"-{spec_str}") assert isinstance(spec, type(expected_spec)) assert spec == expected_spec assert is_ignore is True
def _set_start_time(self, start_time: float) -> None: # Propagates parent_build_id to pants runs that may be called from this pants run. os.environ["PANTS_PARENT_BUILD_ID"] = self._run_tracker.run_id self._run_tracker.start(self.options, run_start_time=start_time) spec_parser = SpecsParser(get_buildroot()) specs = [ str(spec_parser.parse_spec(spec)) for spec in self.options.specs ] # Note: This will not include values from `--changed-*` flags. self._run_tracker.run_info.add_info("specs_from_command_line", specs, stringify=False)
async def paths(console: Console, paths_subsystem: PathsSubsystem) -> PathsGoal: path_from = paths_subsystem.path_from path_to = paths_subsystem.path_to if path_from is None: raise ValueError("Must set --from") if path_to is None: raise ValueError("Must set --to") specs_parser = SpecsParser() from_tgts, to_tgts = await MultiGet([ Get(Targets, Specs, specs_parser.parse_specs([path_from])), Get(Targets, Specs, specs_parser.parse_specs([path_to])), ]) root = from_tgts.expect_single() destination = to_tgts.expect_single() transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest([root.address], include_special_cased_deps=True)) adjacent_targets_per_target = await MultiGet( Get( Targets, DependenciesRequest(tgt.get(Dependencies), include_special_cased_deps=True), ) for tgt in transitive_targets.closure) transitive_targets_closure_addresses = ( t.address for t in transitive_targets.closure) adjacency_lists = dict( zip(transitive_targets_closure_addresses, adjacent_targets_per_target)) spec_paths = [] for path in find_paths_breadth_first(adjacency_lists, root.address, destination.address): spec_path = [address.spec for address in path] spec_paths.append(spec_path) with paths_subsystem.output(console) as write_stdout: write_stdout(json.dumps(spec_paths, indent=2)) return PathsGoal(exit_code=0)
def execute_rule( self, *, args: Optional[Iterable[str]] = None, global_args: Optional[Iterable[str]] = None, env: Optional[Dict[str, str]] = None, exit_code: int = 0, additional_params: Optional[Iterable[Any]] = None, ) -> GoalRuleResult: """Executes the @goal_rule for this test class. Returns the return code, stdout, and stderr of the goal. """ # Create an OptionsBootstrapper for these args/env, and a captured Console instance. options_bootstrapper = create_options_bootstrapper( args=(*(global_args or []), self.goal_cls.name, *(args or [])), env=env, ) BuildConfigInitializer.get(options_bootstrapper) full_options = options_bootstrapper.get_full_options([ *GlobalOptions.known_scope_infos(), *self.goal_cls.subsystem_cls.known_scope_infos() ]) stdout, stderr = StringIO(), StringIO() console = Console(stdout=stdout, stderr=stderr) # Run for the specs parsed from the args. specs = SpecsParser(self.build_root).parse_specs(full_options.specs) params = Params( specs, console, options_bootstrapper, Workspace(self.scheduler), *(additional_params or []), ) actual_exit_code = self.scheduler.run_goal_rule(self.goal_cls, params) # Flush and capture console output. console.flush() stdout_val = stdout.getvalue() stderr_val = stderr.getvalue() assert ( exit_code == actual_exit_code ), f"Exited with {actual_exit_code} (expected {exit_code}):\nstdout:\n{stdout_val}\nstderr:\n{stderr_val}" return GoalRuleResult(actual_exit_code, stdout_val, stderr_val)
def run_goal_rule( self, goal: Type[Goal], *, global_args: Optional[Iterable[str]] = None, args: Optional[Iterable[str]] = None, env: Optional[Mapping[str, str]] = None, ) -> GoalRuleResult: options_bootstrapper = create_options_bootstrapper( args=(*(global_args or []), goal.name, *(args or [])), env=env, ) raw_specs = options_bootstrapper.get_full_options([ *GlobalOptions.known_scope_infos(), *goal.subsystem_cls.known_scope_infos() ]).specs specs = SpecsParser(self.build_root).parse_specs(raw_specs) stdout, stderr = StringIO(), StringIO() console = Console(stdout=stdout, stderr=stderr) session = self.scheduler.scheduler.new_session( build_id="buildid_for_test", should_report_workunits=True, session_values=SessionValues({ OptionsBootstrapper: options_bootstrapper, PantsEnvironment: PantsEnvironment(env) }), ) exit_code = session.run_goal_rule( goal, Params( specs, console, Workspace(self.scheduler), InteractiveRunner(self.scheduler), ), ) console.flush() return GoalRuleResult(exit_code, stdout.getvalue(), stderr.getvalue())
def test_resolve_addresses_from_raw_specs(rule_runner: RuleRunner) -> None: """This tests that we correctly handle resolving from both specs with and without owners.""" rule_runner.write_files({ "fs_spec/f.txt": "", "fs_spec/BUILD": "file_generator(sources=['f.txt'])", "address_spec/f.txt": "", "address_spec/BUILD": dedent("""\ file_generator(sources=['f.txt']) nonfile_generator(name='nonfile') """), "multiple_files/f1.txt": "", "multiple_files/f2.txt": "", "multiple_files/BUILD": "file_generator(sources=['*.txt'])", }) no_interaction_specs = [ "fs_spec/f.txt", "address_spec:address_spec", "address_spec:nonfile#gen", ] multiple_files_specs = [ "multiple_files/f2.txt", "multiple_files:multiple_files" ] specs = SpecsParser(rule_runner.build_root).parse_specs( [*no_interaction_specs, *multiple_files_specs], convert_dir_literal_to_address_literal=False, description_of_origin="tests", ) result = rule_runner.request(Addresses, [specs]) assert set(result) == { Address("fs_spec", relative_file_path="f.txt"), Address("address_spec"), Address("address_spec", target_name="nonfile", generated_name="gen"), Address("multiple_files"), Address("multiple_files", relative_file_path="f2.txt"), }
async def paths( console: Console, paths_subsystem: PathsSubsystem, global_options: GlobalOptions ) -> PathsGoal: path_from = paths_subsystem.path_from path_to = paths_subsystem.path_to if path_from is None: raise ValueError("Must set --from") if path_to is None: raise ValueError("Must set --to") specs_parser = SpecsParser() convert_dir_literals = global_options.use_deprecated_directory_cli_args_semantics from_tgts, to_tgts = await MultiGet( Get( Targets, Specs, specs_parser.parse_specs( [path_from], description_of_origin="the option `--paths-from`", convert_dir_literal_to_address_literal=convert_dir_literals, ), ), Get( Targets, Specs, specs_parser.parse_specs( [path_to], description_of_origin="the option `--paths-to`", convert_dir_literal_to_address_literal=convert_dir_literals, ), ), ) root = from_tgts.expect_single() destination = to_tgts.expect_single() transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest([root.address], include_special_cased_deps=True) ) adjacent_targets_per_target = await MultiGet( Get( Targets, DependenciesRequest(tgt.get(Dependencies), include_special_cased_deps=True), ) for tgt in transitive_targets.closure ) transitive_targets_closure_addresses = (t.address for t in transitive_targets.closure) adjacency_lists = dict(zip(transitive_targets_closure_addresses, adjacent_targets_per_target)) spec_paths = [] for path in find_paths_breadth_first(adjacency_lists, root.address, destination.address): spec_path = [address.spec for address in path] spec_paths.append(spec_path) with paths_subsystem.output(console) as write_stdout: write_stdout(json.dumps(spec_paths, indent=2)) return PathsGoal(exit_code=0)
def assert_filesystem_spec_parsed(build_root: Path, spec_str: str, expected_spec: FilesystemSpec) -> None: parser = SpecsParser(str(build_root)) spec = parser.parse_spec(spec_str) assert isinstance(spec, FilesystemSpec) assert spec == expected_spec
def assert_address_spec_parsed(build_root: Path, spec_str: str, expected_spec: AddressSpec) -> None: parser = SpecsParser(str(build_root)) spec = parser.parse_spec(spec_str) assert isinstance(spec, AddressSpec) assert spec == expected_spec
def setUp(self) -> None: super().setUp() self._spec_parser = SpecsParser(self.build_root)
import re from dataclasses import asdict from typing import Iterable, Iterator, List, Optional import strawberry from strawberry.types import Info from pants.backend.explorer.graphql.context import GraphQLContext from pants.backend.explorer.graphql.field_types import JSONScalar from pants.backend.project_info.peek import TargetData, TargetDatas from pants.base.specs_parser import SpecsParser from pants.engine.target import AllUnexpandedTargets, UnexpandedTargets from pants.help.help_info_extracter import TargetTypeHelpInfo from pants.util.strutil import softwrap specs_parser = SpecsParser() @strawberry.type(description="Describes a target field type.") class TargetTypeField: alias: str = strawberry.field( description= "The field name, as used in a target definition in a BUILD file.") provider: str = strawberry.field( description="Backend that registered the field type.") description: str = strawberry.field(description="Field documentation.") type_hint: str = strawberry.field(description="Field type hint.") required: bool = strawberry.field(description="Field required flag.") default: Optional[str] = strawberry.field( description="Field default value.")
async def parse_one_bsp_mapping( request: _ParseOneBSPMappingRequest) -> BSPBuildTargetInternal: specs_parser = SpecsParser() specs = specs_parser.parse_specs(request.raw_specs) return BSPBuildTargetInternal(request.name, specs)
def calculate_specs( options_bootstrapper: OptionsBootstrapper, options: Options, session: SchedulerSession, ) -> Specs: """Determine the specs for a given Pants run.""" global_options = options.for_global_scope() unmatched_cli_globs = global_options.unmatched_cli_globs.to_glob_match_error_behavior( ) convert_dir_literal_to_address_literal = ( global_options.use_deprecated_directory_cli_args_semantics) if global_options.is_default( "use_deprecated_directory_cli_args_semantics"): warn_or_error( "2.14.0.dev0", "`use_deprecated_directory_cli_args_semantics` defaulting to True", softwrap(f""" Currently, a directory argument like `{bin_name()} test dir` is shorthand for the target `dir:dir`, i.e. the target that leaves off `name=`. In Pants 2.14, by default, a directory argument will instead match all targets/files in the directory. To opt into the new and more intuitive semantics early, set `use_deprecated_directory_cli_args_semantics = false` in the `[GLOBAL]` section in `pants.toml`. Otherwise, set to `true` to silence this warning. """), ) specs = SpecsParser().parse_specs( options.specs, description_of_origin="CLI arguments", unmatched_glob_behavior=unmatched_cli_globs, convert_dir_literal_to_address_literal= convert_dir_literal_to_address_literal, ) changed_options = ChangedOptions.from_options(options.for_scope("changed")) logger.debug("specs are: %s", specs) logger.debug("changed_options are: %s", changed_options) if specs and changed_options.provided: changed_name = "--changed-since" if changed_options.since else "--changed-diffspec" specs_description = specs.arguments_provided_description() assert specs_description is not None raise InvalidSpecConstraint( f"You used `{changed_name}` at the same time as using {specs_description}. You can " f"only use `{changed_name}` or use normal arguments.") if not changed_options.provided: return specs (git_binary, ) = session.product_request(GitBinary, [Params(GitBinaryRequest())]) (maybe_git_worktree, ) = session.product_request( MaybeGitWorktree, [Params(GitWorktreeRequest(), git_binary)]) if not maybe_git_worktree.git_worktree: raise InvalidSpecConstraint( "The `--changed-*` options are only available if Git is used for the repository." ) changed_files = tuple( changed_options.changed_files(maybe_git_worktree.git_worktree)) file_literal_specs = tuple(FileLiteralSpec(f) for f in changed_files) changed_request = ChangedRequest(changed_files, changed_options.dependees) (changed_addresses, ) = session.product_request( ChangedAddresses, [Params(changed_request, options_bootstrapper)]) logger.debug("changed addresses: %s", changed_addresses) address_literal_specs = [] for address in cast(ChangedAddresses, changed_addresses): address_input = AddressInput.parse( address.spec, description_of_origin="`--changed-since`") address_literal_specs.append( AddressLiteralSpec( path_component=address_input.path_component, target_component=address_input.target_component, generated_component=address_input.generated_component, parameters=address_input.parameters, )) return Specs( includes=RawSpecs( # We need both address_literals and file_literals to cover all our edge cases, including # target-aware vs. target-less goals, e.g. `list` vs `count-loc`. address_literals=tuple(address_literal_specs), file_literals=file_literal_specs, unmatched_glob_behavior=unmatched_cli_globs, filter_by_global_options=True, from_change_detection=True, description_of_origin="`--changed-since`", ), ignores=RawSpecs(description_of_origin="`--changed-since`"), )
def assert_spec_parsed(build_root: Path, spec_str: str, expected_spec: Spec) -> None: parser = SpecsParser(str(build_root)) spec = parser.parse_spec(spec_str) assert isinstance(spec, type(expected_spec)) assert spec == expected_spec