async def run( run_subsystem: RunSubsystem, global_options: GlobalOptions, workspace: Workspace, build_root: BuildRoot, complete_env: CompleteEnvironment, ) -> Run: targets_to_valid_field_sets = await Get( TargetRootsToFieldSets, TargetRootsToFieldSetsRequest( RunFieldSet, goal_description="the `run` goal", no_applicable_targets_behavior=NoApplicableTargetsBehavior.error, expect_single_field_set=True, ), ) field_set = targets_to_valid_field_sets.field_sets[0] request = await Get(RunRequest, RunFieldSet, field_set) wrapped_target = await Get(WrappedTarget, Address, field_set.address) restartable = wrapped_target.target.get(RestartableField).value with temporary_dir(root_dir=global_options.pants_workdir, cleanup=run_subsystem.cleanup) as tmpdir: if not run_subsystem.cleanup: logger.info(f"Preserving running binary chroot {tmpdir}") workspace.write_digest( request.digest, path_prefix=PurePath(tmpdir).relative_to( build_root.path).as_posix(), # We don't want to influence whether the InteractiveProcess is able to restart. Because # we're writing into a temp directory, we can safely mark this side_effecting=False. side_effecting=False, ) args = (arg.format(chroot=tmpdir) for arg in request.args) env = { **complete_env, **{ k: v.format(chroot=tmpdir) for k, v in request.extra_env.items() } } result = await Effect( InteractiveProcessResult, InteractiveProcess( argv=(*args, *run_subsystem.args), env=env, run_in_workspace=True, restartable=restartable, ), ) exit_code = result.exit_code return Run(exit_code)
async def run_repl( console: Console, workspace: Workspace, repl_subsystem: ReplSubsystem, all_specified_addresses: Addresses, build_root: BuildRoot, union_membership: UnionMembership, global_options: GlobalOptions, complete_env: CompleteEnvironment, ) -> Repl: transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest(all_specified_addresses)) # TODO: When we support multiple languages, detect the default repl to use based # on the targets. For now we default to the python repl. repl_shell_name = repl_subsystem.shell or "python" implementations = { impl.name: impl for impl in union_membership[ReplImplementation] } repl_implementation_cls = implementations.get(repl_shell_name) if repl_implementation_cls is None: available = sorted(implementations.keys()) console.print_stderr( f"{repr(repl_shell_name)} is not a registered REPL. Available REPLs (which may " f"be specified through the option `--repl-shell`): {available}") return Repl(-1) with temporary_dir(root_dir=global_options.options.pants_workdir, cleanup=False) as tmpdir: repl_impl = repl_implementation_cls(targets=Targets( transitive_targets.closure), chroot=tmpdir) request = await Get(ReplRequest, ReplImplementation, repl_impl) workspace.write_digest( request.digest, path_prefix=PurePath(tmpdir).relative_to( build_root.path).as_posix(), # We don't want to influence whether the InteractiveProcess is able to restart. Because # we're writing into a temp directory, we can safely mark this side_effecting=False. side_effecting=False, ) env = {**complete_env, **request.extra_env} result = await Effect( InteractiveProcessResult, InteractiveProcess( argv=request.args, env=env, run_in_workspace=True, restartable=repl_subsystem.restartable, ), ) return Repl(result.exit_code)
async def generate_user_lockfile_goal( addresses: Addresses, python_setup: PythonSetup, workspace: Workspace, ) -> GenerateUserLockfileGoal: if python_setup.lockfile is None: logger.warning( "You ran `./pants generate-user-lockfile`, but `[python].experimental_lockfile` " "is not set. Please set this option to the path where you'd like the lockfile for " "your code's dependencies to live." ) return GenerateUserLockfileGoal(exit_code=1) transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(addresses)) reqs = PexRequirements.create_from_requirement_fields( tgt[PythonRequirementsField] # NB: By looking at the dependencies, rather than the closure, we only generate for # requirements that are actually used in the project. for tgt in transitive_targets.dependencies if tgt.has_field(PythonRequirementsField) ) if not reqs: logger.warning( "No third-party requirements found for the transitive closure, so a lockfile will not " "be generated." ) return GenerateUserLockfileGoal(exit_code=0) result = await Get( PythonLockfile, PythonLockfileRequest( reqs.req_strings, # TODO(#12314): Use interpreter constraints from the transitive closure. InterpreterConstraints(python_setup.interpreter_constraints), resolve_name="not yet implemented", lockfile_dest=python_setup.lockfile, _description=( f"Generate lockfile for {pluralize(len(reqs.req_strings), 'requirement')}: " f"{', '.join(reqs.req_strings)}" ), # TODO(12382): Make this command actually accurate once we figure out the semantics # for user lockfiles. This is currently misleading. _regenerate_command="./pants generate-user-lockfile ::", ), ) workspace.write_digest(result.digest) logger.info(f"Wrote lockfile to {result.path}") return GenerateUserLockfileGoal(exit_code=0)
async def bsp_dependency_modules( request: DependencyModulesParams, workspace: Workspace) -> DependencyModulesResult: responses = await MultiGet( Get(ResolveOneDependencyModuleResult, ResolveOneDependencyModuleRequest(btgt)) for btgt in request.targets) output_digest = await Get(Digest, MergeDigests([r.digest for r in responses])) workspace.write_digest(output_digest, path_prefix=".pants.d/bsp") return DependencyModulesResult( tuple( DependencyModulesItem(target=r.bsp_target_id, modules=r.modules) for r in responses))
async def run_repl( console: Console, workspace: Workspace, runner: InteractiveRunner, options: ReplOptions, transitive_targets: TransitiveTargets, build_root: BuildRoot, union_membership: UnionMembership, global_options: GlobalOptions, ) -> Repl: # We can guarantee that we will only even enter this `goal_rule` if there exists an implementer # of the `ReplImplementation` union because `LegacyGraphSession.run_goal_rules()` will not # execute this rule's body if there are no implementations registered. membership: Iterable[Type[ ReplImplementation]] = union_membership.union_rules[ReplImplementation] implementations = {impl.name: impl for impl in membership} default_repl = "python" repl_shell_name = cast(str, options.values.shell or default_repl) repl_implementation_cls = implementations.get(repl_shell_name) if repl_implementation_cls is None: available = sorted(set(implementations.keys())) console.print_stderr( f"{repr(repl_shell_name)} is not a registered REPL. Available REPLs (which may " f"be specified through the option `--repl-shell`): {available}") return Repl(-1) repl_impl = repl_implementation_cls(targets=Targets( tgt for tgt in transitive_targets.closure if repl_implementation_cls.is_valid(tgt))) repl_binary = await Get[ReplBinary](ReplImplementation, repl_impl) with temporary_dir(root_dir=global_options.options.pants_workdir, cleanup=False) as tmpdir: path_relative_to_build_root = PurePath(tmpdir).relative_to( build_root.path).as_posix() workspace.materialize_directory( DirectoryToMaterialize(repl_binary.digest, path_prefix=path_relative_to_build_root)) full_path = PurePath(tmpdir, repl_binary.binary_name).as_posix() run_request = InteractiveProcessRequest( argv=(full_path, ), run_in_workspace=True, ) result = runner.run_local_interactive_process(run_request) return Repl(result.process_exit_code)
async def run( run_subsystem: RunSubsystem, global_options: GlobalOptions, console: Console, interactive_runner: InteractiveRunner, workspace: Workspace, build_root: BuildRoot, complete_env: CompleteEnvironment, ) -> Run: targets_to_valid_field_sets = await Get( TargetRootsToFieldSets, TargetRootsToFieldSetsRequest( RunFieldSet, goal_description="the `run` goal", no_applicable_targets_behavior=NoApplicableTargetsBehavior.error, expect_single_field_set=True, ), ) field_set = targets_to_valid_field_sets.field_sets[0] request = await Get(RunRequest, RunFieldSet, field_set) with temporary_dir(root_dir=global_options.options.pants_workdir, cleanup=True) as tmpdir: workspace.write_digest(request.digest, path_prefix=PurePath(tmpdir).relative_to( build_root.path).as_posix()) args = (arg.format(chroot=tmpdir) for arg in request.args) env = { **complete_env, **{ k: v.format(chroot=tmpdir) for k, v in request.extra_env.items() } } try: result = interactive_runner.run( InteractiveProcess( argv=(*args, *run_subsystem.args), env=env, run_in_workspace=True, )) exit_code = result.exit_code except Exception as e: console.print_stderr( f"Exception when attempting to run {field_set.address}: {e!r}") exit_code = -1 return Run(exit_code)
async def run( console: Console, workspace: Workspace, runner: InteractiveRunner, build_root: BuildRoot, options: RunOptions, global_options: GlobalOptions, ) -> Run: targets_to_valid_configs = await Get[TargetsToValidConfigurations]( TargetsToValidConfigurationsRequest( BinaryConfiguration, goal_description=f"the `{options.name}` goal", error_if_no_valid_targets=True, expect_single_config=True, )) config = targets_to_valid_configs.configurations[0] binary = await Get[CreatedBinary](BinaryConfiguration, config) workdir = global_options.options.pants_workdir with temporary_dir(root_dir=workdir, cleanup=True) as tmpdir: path_relative_to_build_root = PurePath(tmpdir).relative_to( build_root.path).as_posix() workspace.materialize_directory( DirectoryToMaterialize(binary.digest, path_prefix=path_relative_to_build_root)) console.write_stdout(f"Running target: {config.address}\n") full_path = PurePath(tmpdir, binary.binary_name).as_posix() run_request = InteractiveProcessRequest( argv=(full_path, *options.values.args), run_in_workspace=True, ) try: result = runner.run_local_interactive_process(run_request) exit_code = result.process_exit_code if result.process_exit_code == 0: console.write_stdout(f"{config.address} ran successfully.\n") else: console.write_stderr( f"{config.address} failed with code {result.process_exit_code}!\n" ) except Exception as e: console.write_stderr( f"Exception when attempting to run {config.address}: {e!r}\n") exit_code = -1 return Run(exit_code)
async def create_binary( console: Console, workspace: Workspace, options: BinaryOptions, distdir: DistDir, buildroot: BuildRoot, ) -> Binary: targets_to_valid_configs = await Get[TargetsToValidConfigurations]( TargetsToValidConfigurationsRequest( BinaryConfiguration, goal_description=f"the `{options.name}` goal", error_if_no_valid_targets=True, )) binaries = await MultiGet( Get[CreatedBinary](BinaryConfiguration, config) for config in targets_to_valid_configs.configurations) merged_digest = await Get[Digest](DirectoriesToMerge( tuple(binary.digest for binary in binaries))) result = workspace.materialize_directory( DirectoryToMaterialize(merged_digest, path_prefix=str(distdir.relpath))) with options.line_oriented(console) as print_stdout: for path in result.output_paths: print_stdout(f"Wrote {os.path.relpath(path, buildroot.path)}") return Binary(exit_code=0)
def _handle_inbound_message(self, *, method_name: str, params: Any): # If the connection is not yet initialized and this is not the initialization request, BSP requires # returning an error for methods (and to discard all notifications). # # Concurrency: This method can be invoked from multiple threads (for each individual request). By returning # an error for all other requests, only the thread running the initialization RPC should be able to proceed. # This ensures that we can safely call `initialize_connection` on the BSPContext with the client-supplied # init parameters without worrying about multiple threads. (Not entirely true though as this does not handle # the client making multiple concurrent initialization RPCs, but which would violate the protocol in any case.) if (not self._context.is_connection_initialized and method_name != self._INITIALIZE_METHOD_NAME): return _make_error_future( JsonRpcException( code=-32002, message= f"Client must first call `{self._INITIALIZE_METHOD_NAME}`." )) # Handle the `build/shutdown` method and `build/exit` notification. if method_name == self._SHUTDOWN_METHOD_NAME: # Return no-op success for the `build/shutdown` method. This doesn't actually cause the server to # exit. That will occur once the client sends the `build/exit` notification. return None elif method_name == self._EXIT_NOTIFCATION_NAME: # The `build/exit` notification directs the BSP server to immediately exit. # The read-dispatch loop will exit once it notices that the inbound handle is closed. So close the # inbound handle (and outbound handle for completeness) and then return to the dispatch loop # to trigger the exit. self._inbound.close() self._outbound.close() return None method_mapping = self._handler_mappings.get(method_name) if not method_mapping: return _make_error_future(JsonRpcMethodNotFound.of(method_name)) try: request = method_mapping.request_type.from_json_dict(params) except Exception: return _make_error_future(JsonRpcInvalidRequest()) workspace = Workspace(self._scheduler_session) params = Params(request, workspace) execution_request = self._scheduler_session.execution_request( products=[method_mapping.response_type], subjects=[params], ) returns, throws = self._scheduler_session.execute(execution_request) if len(returns) == 1 and len(throws) == 0: # Initialize the BSPContext with the client-supplied init parameters. See earlier comment on why this # call to `BSPContext.initialize_connection` is safe. if method_name == self._INITIALIZE_METHOD_NAME: self._context.initialize_connection(request, self.notify_client) return returns[0][1].value.to_json_dict() elif len(returns) == 0 and len(throws) == 1: raise throws[0][1].exc else: raise AssertionError( f"Received unexpected result from engine: returns={returns}; throws={throws}" )
async def create_binary(addresses: BuildFileAddresses, console: Console, workspace: Workspace, options: Binary.Options, options_bootstrapper: OptionsBootstrapper, build_root: BuildRoot) -> Binary: with Binary.line_oriented(options, console) as print_stdout: global_options = options_bootstrapper.bootstrap_options.for_global_scope( ) pants_distdir = Path(global_options.pants_distdir) if not is_child_of(pants_distdir, build_root.pathlib_path): console.print_stderr( f"When set to an absolute path, `--pants-distdir` must be relative to the build root." "You set it to {pants_distdir}. Instead, use a relative path or an absolute path relative to the build root." ) return Binary(exit_code=1) relative_distdir = pants_distdir.relative_to( build_root.pathlib_path) if pants_distdir.is_absolute( ) else pants_distdir print_stdout(f"Generating binaries in `./{relative_distdir}`") binaries = await MultiGet( Get[CreatedBinary](Address, address.to_address()) for address in addresses) merged_digest = await Get[Digest](DirectoriesToMerge( tuple(binary.digest for binary in binaries))) result = workspace.materialize_directory( DirectoryToMaterialize(merged_digest, path_prefix=str(relative_distdir))) for path in result.output_paths: print_stdout(f"Wrote {path}") return Binary(exit_code=0)
async def create_binary( targets_with_origins: TargetsWithOrigins, console: Console, workspace: Workspace, options: BinaryOptions, distdir: DistDir, buildroot: BuildRoot, union_membership: UnionMembership, registered_target_types: RegisteredTargetTypes, ) -> Binary: valid_config_types_by_target = gather_valid_binary_configuration_types( goal_subsytem=options, targets_with_origins=targets_with_origins, union_membership=union_membership, registered_target_types=registered_target_types, ) binaries = await MultiGet( Get[CreatedBinary](BinaryConfiguration, valid_config_type.create( target)) for target, valid_config_types in valid_config_types_by_target.items() for valid_config_type in valid_config_types) merged_digest = await Get[Digest](DirectoriesToMerge( tuple(binary.digest for binary in binaries))) result = workspace.materialize_directory( DirectoryToMaterialize(merged_digest, path_prefix=str(distdir.relpath))) with options.line_oriented(console) as print_stdout: for path in result.output_paths: print_stdout(f"Wrote {os.path.relpath(path, buildroot.path)}") return Binary(exit_code=0)
def single_target_run( self, *, console: MockConsole, program_text: bytes, address_spec: str, ) -> Run: workspace = Workspace(self.scheduler) interactive_runner = InteractiveRunner(self.scheduler) BuildRoot().path = self.build_root res = run_rule( run, rule_args=[ console, workspace, interactive_runner, BuildRoot(), Addresses([Address.parse(address_spec)]), MockOptions(args=[]), ], mock_gets=[ MockGet( product_type=CreatedBinary, subject_type=Address, mock=lambda _: self.create_mock_binary(program_text), ), ], ) return cast(Run, res)
def single_target_run( self, *, console: MockConsole, program_text: bytes, address_spec: str, ) -> Run: workspace = Workspace(self.scheduler) interactive_runner = InteractiveRunner(self.scheduler) address = Address.parse(address_spec) bfa = BuildFileAddress(build_file=None, target_name=address.target_name, rel_path=f'{address.spec_path}/BUILD') BuildRoot().path = self.build_root res = run_rule( run, rule_args=[ console, workspace, interactive_runner, BuildRoot(), bfa, MockOptions(args=[]) ], mock_gets=[ MockGet(product_type=CreatedBinary, subject_type=Address, mock=lambda _: self.create_mock_binary(program_text)), ], ) return cast(Run, res)
def run_typecheck_rule( *, request_types: Sequence[Type[CheckRequest]], targets: list[Target], only: list[str] | None = None, ) -> Tuple[int, str]: union_membership = UnionMembership({CheckRequest: request_types}) check_subsystem = create_subsystem(CheckSubsystem, only=only or []) with mock_console(create_options_bootstrapper()) as (console, stdio_reader): rule_runner = RuleRunner() result: Check = run_rule_with_mocks( check, rule_args=[ console, Workspace(rule_runner.scheduler, _enforce_effects=False), Targets(targets), DistDir(relpath=Path("dist")), union_membership, check_subsystem, ], mock_gets=[ MockGet( output_type=CheckResults, input_type=CheckRequest, mock=lambda field_set_collection: field_set_collection.check_results, ), ], union_membership=union_membership, ) assert not stdio_reader.get_stdout() return result.exit_code, stdio_reader.get_stderr()
async def bsp_workspace_build_targets( _: WorkspaceBuildTargetsParams, bsp_build_targets: BSPBuildTargets, workspace: Workspace, ) -> WorkspaceBuildTargetsResult: bsp_target_results = await MultiGet( Get(GenerateOneBSPBuildTargetResult, GenerateOneBSPBuildTargetRequest(target_internal)) for target_internal in bsp_build_targets.targets_mapping.values()) digest = await Get(Digest, MergeDigests([r.digest for r in bsp_target_results])) if digest != EMPTY_DIGEST: workspace.write_digest(digest, path_prefix=".pants.d/bsp") return WorkspaceBuildTargetsResult(targets=tuple( r.build_target for r in bsp_target_results), )
def run_goal_rule( self, goal: Type[Goal], *, global_args: Iterable[str] | None = None, args: Iterable[str] | None = None, env: Mapping[str, str] | None = None, env_inherit: set[str] | None = None, ) -> GoalRuleResult: merged_args = (*(global_args or []), goal.name, *(args or [])) self.set_options(merged_args, env=env, env_inherit=env_inherit) raw_specs = self.options_bootstrapper.full_options_for_scopes([ GlobalOptions.get_scope_info(), goal.subsystem_cls.get_scope_info() ]).specs specs = SpecsParser(self.build_root).parse_specs(raw_specs) stdout, stderr = StringIO(), StringIO() console = Console(stdout=stdout, stderr=stderr) exit_code = self.scheduler.run_goal_rule( goal, Params( specs, console, Workspace(self.scheduler), InteractiveRunner(self.scheduler), ), ) console.flush() return GoalRuleResult(exit_code, stdout.getvalue(), stderr.getvalue())
def run_goal_rule( self, goal: Type[Goal], *, global_args: Optional[Iterable[str]] = None, args: Optional[Iterable[str]] = None, env: Optional[Mapping[str, str]] = None, ) -> GoalRuleResult: options_bootstrapper = create_options_bootstrapper( args=(*(global_args or []), goal.name, *(args or [])), env=env, ) raw_specs = options_bootstrapper.get_full_options([ *GlobalOptions.known_scope_infos(), *goal.subsystem_cls.known_scope_infos() ]).specs specs = SpecsParser(self.build_root).parse_specs(raw_specs) stdout, stderr = StringIO(), StringIO() console = Console(stdout=stdout, stderr=stderr) exit_code = self.scheduler.run_goal_rule( goal, Params( specs, console, options_bootstrapper, Workspace(self.scheduler), InteractiveRunner(self.scheduler), ), ) console.flush() return GoalRuleResult(exit_code, stdout.getvalue(), stderr.getvalue())
async def create_awslambda( console: Console, options: AWSLambdaOptions, distdir: DistDir, buildroot: BuildRoot, workspace: Workspace, ) -> AWSLambdaGoal: targets_to_valid_configs = await Get[TargetsToValidConfigurations]( TargetsToValidConfigurationsRequest( AWSLambdaConfiguration, goal_description=f"the `{options.name}` goal", error_if_no_valid_targets=True, )) awslambdas = await MultiGet( Get[CreatedAWSLambda](AWSLambdaConfiguration, config) for config in targets_to_valid_configs.configurations) merged_digest = await Get[Digest](DirectoriesToMerge( tuple(awslambda.digest for awslambda in awslambdas))) result = workspace.materialize_directory( DirectoryToMaterialize(merged_digest, path_prefix=str(distdir.relpath))) with options.line_oriented(console) as print_stdout: for awslambda, path in zip(awslambdas, result.output_paths): print_stdout( f"Wrote code bundle to {os.path.relpath(path, buildroot.path)}" ) print_stdout(f" Runtime: {awslambda.runtime}") print_stdout(f" Handler: {awslambda.handler}") print_stdout("") return AWSLambdaGoal(exit_code=0)
def run_console_rules(self, options_bootstrapper, goals, target_roots): """Runs @console_rules sequentially and interactively by requesting their implicit Goal products. For retryable failures, raises scheduler.ExecutionError. :param list goals: The list of requested goal names as passed on the commandline. :param TargetRoots target_roots: The targets root of the request. :returns: An exit code. """ subject = target_roots.specs console = Console( use_colors=options_bootstrapper.bootstrap_options.for_global_scope().colors ) workspace = Workspace(self.scheduler_session) interactive_runner = InteractiveRunner(self.scheduler_session) for goal in goals: goal_product = self.goal_map[goal] params = Params(subject, options_bootstrapper, console, workspace, interactive_runner) logger.debug(f'requesting {goal_product} to satisfy execution of `{goal}` goal') try: exit_code = self.scheduler_session.run_console_rule(goal_product, params) finally: console.flush() if exit_code != PANTS_SUCCEEDED_EXIT_CODE: return exit_code return PANTS_SUCCEEDED_EXIT_CODE
def run_fmt_rule(self, *, targets: List[HydratedTarget]) -> Tuple[Fmt, str]: result_digest = self.request_single_product( Digest, InputFilesContent([ FileContent(path=str(self.formatted_file), content=self.formatted_content.encode()) ])) console = MockConsole(use_colors=False) result: Fmt = run_rule( fmt, rule_args=[ console, HydratedTargets(targets), Workspace(self.scheduler), UnionMembership( union_rules={FormatTarget: [PythonTargetAdaptor]}) ], mock_gets=[ MockGet(product_type=AggregatedFmtResults, subject_type=PythonTargetAdaptor, mock=lambda adaptor: AggregatedFmtResults( (FmtResult(digest=result_digest, stdout=f"Formatted `{adaptor.name}`", stderr=""), ), combined_digest=result_digest)), MockGet(product_type=Digest, subject_type=DirectoriesToMerge, mock=lambda _: result_digest), ], ) return result, console.stdout.getvalue()
async def workspace_console_rule( console: Console, workspace: Workspace, msg: MessageToConsoleRule) -> MockWorkspaceGoal: digest = await Get(Digest, InputFilesContent, msg.input_files_content) output = workspace.materialize_directory(DirectoryToMaterialize(digest)) console.print_stdout(output.output_paths[0], end='') return MockWorkspaceGoal(exit_code=0)
def run_lint_rule( rule_runner: RuleRunner, *, lint_request_types: List[Type[LintRequest]], targets: List[Target], per_file_caching: bool, ) -> Tuple[int, str]: with mock_console(rule_runner.options_bootstrapper) as (console, stdio_reader): union_membership = UnionMembership({LintRequest: lint_request_types}) result: Lint = run_rule_with_mocks( lint, rule_args=[ console, Workspace(rule_runner.scheduler, _enforce_effects=False), Targets(targets), create_goal_subsystem(LintSubsystem, per_file_caching=per_file_caching, per_target_caching=False), union_membership, DistDir(relpath=Path("dist")), ], mock_gets=[ MockGet( output_type=LintResults, input_type=LintRequest, mock=lambda field_set_collection: field_set_collection. lint_results, ) ], union_membership=union_membership, ) assert not stdio_reader.get_stdout() return result.exit_code, stdio_reader.get_stderr()
def workspace_console_rule(console: Console, workspace: Workspace, msg: MessageToConsoleRule) -> MockWorkspaceGoal: digest = yield Get(Digest, InputFilesContent, msg.input_files_content) output = workspace.materialize_directories( (DirectoryToMaterialize(path=msg.tmp_dir, directory_digest=digest), )) output_path = output.dependencies[0].output_paths[0] console.print_stdout(str(Path(msg.tmp_dir, output_path)), end='') yield MockWorkspaceGoal(exit_code=0)
async def run_repl( console: Console, workspace: Workspace, runner: InteractiveRunner, options: ReplOptions, transitive_targets: TransitiveTargets, build_root: BuildRoot, union_membership: UnionMembership, global_options: GlobalOptions, ) -> Repl: default_repl = "python" repl_shell_name = cast(str, options.values.shell) or default_repl implementations: Dict[str, Type[ReplImplementation]] = { impl.name: impl for impl in union_membership[ReplImplementation] } repl_implementation_cls = implementations.get(repl_shell_name) if repl_implementation_cls is None: available = sorted(implementations.keys()) console.print_stderr( f"{repr(repl_shell_name)} is not a registered REPL. Available REPLs (which may " f"be specified through the option `--repl-shell`): {available}") return Repl(-1) repl_impl = repl_implementation_cls(targets=Targets( tgt for tgt in transitive_targets.closure if repl_implementation_cls.is_valid(tgt))) repl_binary = await Get[ReplBinary](ReplImplementation, repl_impl) with temporary_dir(root_dir=global_options.options.pants_workdir, cleanup=False) as tmpdir: path_relative_to_build_root = PurePath(tmpdir).relative_to( build_root.path).as_posix() workspace.materialize_directory( DirectoryToMaterialize(repl_binary.digest, path_prefix=path_relative_to_build_root)) full_path = PurePath(tmpdir, repl_binary.binary_name).as_posix() run_request = InteractiveProcessRequest( argv=(full_path, ), run_in_workspace=True, ) result = runner.run_local_interactive_process(run_request) return Repl(result.process_exit_code)
async def run_go_resolve(targets: UnexpandedTargets, workspace: Workspace) -> GoResolveGoal: # TODO: Use MultiGet to resolve the go_module targets. # TODO: Combine all of the go.sum's into a single Digest to write. for target in targets: if target.has_field(GoModuleSources): resolved_go_module = await Get( ResolvedGoModule, ResolveGoModuleRequest(target.address)) # TODO: Only update the files if they actually changed. workspace.write_digest(resolved_go_module.digest, path_prefix=target.address.spec_path) logger.info(f"{target.address}: Updated go.mod and go.sum.\n") else: logger.info( f"{target.address}: Skipping because target is not a `go_module`.\n" ) return GoResolveGoal(exit_code=0)
async def create_binary(workspace: Workspace, dist_dir: DistDir) -> Binary: targets_to_valid_field_sets = await Get( TargetsToValidFieldSets, TargetsToValidFieldSetsRequest(BinaryFieldSet, goal_description="the `binary` goal", error_if_no_valid_targets=True), ) binaries = await MultiGet( Get(CreatedBinary, BinaryFieldSet, field_set) for field_set in targets_to_valid_field_sets.field_sets) merged_snapshot = await Get( Snapshot, MergeDigests(binary.digest for binary in binaries)) workspace.write_digest(merged_snapshot.digest, path_prefix=str(dist_dir.relpath)) for path in merged_snapshot.files: logger.info(f"Wrote {dist_dir.relpath / path}") return Binary(exit_code=0)
async def tailor( tailor_subsystem: TailorSubsystem, console: Console, workspace: Workspace, union_membership: UnionMembership, specs: Specs, ) -> Tailor: search_paths = PutativeTargetsSearchPaths(specs_to_dirs(specs)) putative_target_request_types: Iterable[type[ PutativeTargetsRequest]] = union_membership[PutativeTargetsRequest] putative_targets_results = await MultiGet( Get(PutativeTargets, PutativeTargetsRequest, req_type(search_paths)) for req_type in putative_target_request_types) putative_targets = PutativeTargets.merge(putative_targets_results) putative_targets = PutativeTargets( pt.realias(tailor_subsystem.alias_for(pt.type_alias)) for pt in putative_targets) fixed_names_ptgts = await Get(UniquelyNamedPutativeTargets, PutativeTargets, putative_targets) fixed_sources_ptgts = await MultiGet( Get(DisjointSourcePutativeTarget, PutativeTarget, ptgt) for ptgt in fixed_names_ptgts.putative_targets) ptgts = [dspt.putative_target for dspt in fixed_sources_ptgts] if ptgts: edited_build_files = await Get( EditedBuildFiles, EditBuildFilesRequest( PutativeTargets(ptgts), tailor_subsystem.build_file_name, tailor_subsystem.build_file_header, tailor_subsystem.build_file_indent, ), ) updated_build_files = set(edited_build_files.updated_paths) workspace.write_digest(edited_build_files.digest) ptgts_by_build_file = group_by_build_file( tailor_subsystem.build_file_name, ptgts) for build_file_path, ptgts in ptgts_by_build_file.items(): verb = "Updated" if build_file_path in updated_build_files else "Created" console.print_stdout(f"{verb} {console.blue(build_file_path)}:") for ptgt in ptgts: console.print_stdout( f" - Added {console.green(ptgt.type_alias)} target " f"{console.cyan(ptgt.name)}") return Tailor(0)
async def internal_render_test_lockfile_fixtures( rendered_fixtures: RenderedJVMLockfileFixtures, workspace: Workspace, console: Console, ) -> InternalGenerateTestLockfileFixturesGoal: if not rendered_fixtures: console.write_stdout("No test lockfile fixtures found.\n") return InternalGenerateTestLockfileFixturesGoal(exit_code=0) digest_contents = [ FileContent(rendered_fixture.path, rendered_fixture.content) for rendered_fixture in rendered_fixtures ] snapshot = await Get(Snapshot, CreateDigest(digest_contents)) console.write_stdout(f"Writing test lockfile fixtures: {snapshot.files}\n") workspace.write_digest(snapshot.digest) return InternalGenerateTestLockfileFixturesGoal(exit_code=0)
async def run_repl( console: Console, workspace: Workspace, interactive_runner: InteractiveRunner, repl_subsystem: ReplSubsystem, all_specified_addresses: Addresses, build_root: BuildRoot, union_membership: UnionMembership, global_options: GlobalOptions, ) -> Repl: transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest(all_specified_addresses)) # TODO: When we support multiple languages, detect the default repl to use based # on the targets. For now we default to the python repl. repl_shell_name = repl_subsystem.shell or "python" implementations: Dict[str, Type[ReplImplementation]] = { impl.name: impl for impl in union_membership[ReplImplementation] } repl_implementation_cls = implementations.get(repl_shell_name) if repl_implementation_cls is None: available = sorted(implementations.keys()) console.print_stderr( f"{repr(repl_shell_name)} is not a registered REPL. Available REPLs (which may " f"be specified through the option `--repl-shell`): {available}") return Repl(-1) with temporary_dir(root_dir=global_options.options.pants_workdir, cleanup=False) as tmpdir: repl_impl = repl_implementation_cls(targets=Targets( transitive_targets.closure), chroot=tmpdir) request = await Get(ReplRequest, ReplImplementation, repl_impl) workspace.write_digest(request.digest, path_prefix=PurePath(tmpdir).relative_to( build_root.path).as_posix()) result = interactive_runner.run( InteractiveProcess(argv=request.args, env=request.extra_env, run_in_workspace=True, hermetic_env=False)) return Repl(result.exit_code)
async def handle_bsp_scalac_options_request( request: HandleScalacOptionsRequest, build_root: BuildRoot, workspace: Workspace, ) -> HandleScalacOptionsResult: bsp_target = await Get(BSPBuildTargetInternal, BuildTargetIdentifier, request.bsp_target_id) targets = await Get( Targets, AddressSpecs, bsp_target.specs.address_specs, ) coarsened_targets = await Get(CoarsenedTargets, Addresses(tgt.address for tgt in targets)) resolve = await Get(CoursierResolveKey, CoarsenedTargets, coarsened_targets) lockfile = await Get(CoursierResolvedLockfile, CoursierResolveKey, resolve) resolve_digest = await Get( Digest, CreateDigest([ FileEntry(entry.file_name, entry.file_digest) for entry in lockfile.entries ]), ) resolve_digest = await Get( Digest, AddPrefix(resolve_digest, f"jvm/resolves/{resolve.name}/lib")) workspace.write_digest(resolve_digest, path_prefix=".pants.d/bsp") classpath = [ build_root.pathlib_path.joinpath( f".pants.d/bsp/jvm/resolves/{resolve.name}/lib/{entry.file_name}"). as_uri() for entry in lockfile.entries ] return HandleScalacOptionsResult( ScalacOptionsItem( target=request.bsp_target_id, options=(), classpath=tuple(classpath), class_directory=build_root.pathlib_path.joinpath( f".pants.d/bsp/jvm/resolves/{resolve.name}/classes").as_uri(), ))