def test_streaming_workunits_expanded_specs(run_tracker: RunTracker) -> None: rule_runner = RuleRunner( target_types=[PythonLibrary], rules=[ QueryRule(ProcessResult, (Process, )), ], ) rule_runner.set_options(["--backend-packages=pants.backend.python"]) rule_runner.create_file("src/python/somefiles/BUILD", "python_library()") rule_runner.create_file("src/python/somefiles/a.py", "print('')") rule_runner.create_file("src/python/somefiles/b.py", "print('')") rule_runner.create_file("src/python/others/BUILD", "python_library()") rule_runner.create_file("src/python/others/a.py", "print('')") rule_runner.create_file("src/python/others/b.py", "print('')") specs = SpecsParser(get_buildroot()).parse_specs( ["src/python/somefiles::", "src/python/others/b.py"]) def callback(**kwargs) -> None: context = kwargs["context"] assert isinstance(context, StreamingWorkunitContext) expanded = context.get_expanded_specs() targets = expanded.targets assert len(targets.keys()) == 2 assert targets["src/python/others/b.py"] == [ TargetInfo(filename="src/python/others/b.py") ] assert set(targets["src/python/somefiles"]) == { TargetInfo(filename="src/python/somefiles/a.py"), TargetInfo(filename="src/python/somefiles/b.py"), } handler = StreamingWorkunitHandler( scheduler=rule_runner.scheduler, run_tracker=run_tracker, callbacks=[callback], report_interval_seconds=0.01, max_workunit_verbosity=LogLevel.INFO, specs=specs, options_bootstrapper=create_options_bootstrapper( ["--backend-packages=pants.backend.python"]), ) stdout_process = Process(argv=("/bin/bash", "-c", "/bin/echo 'stdout output'"), description="Stdout process") with handler.session(): rule_runner.request(ProcessResult, [stdout_process])
def run(self, start_time: float) -> ExitCode: spec_parser = SpecsParser(get_buildroot()) specs = [ str(spec_parser.parse_spec(spec)) for spec in self.options.specs ] self.run_tracker.start(run_start_time=start_time, specs=specs) with maybe_profiled(self.profile_path): global_options = self.options.for_global_scope() if self.options.help_request: return self._print_help(self.options.help_request) streaming_reporter = StreamingWorkunitHandler( self.graph_session.scheduler_session, run_tracker=self.run_tracker, specs=self.specs, options_bootstrapper=self.options_bootstrapper, callbacks=self._get_workunits_callbacks(), report_interval_seconds=global_options. streaming_workunits_report_interval, ) goals = tuple(self.options.goals) with streaming_reporter.session(): if not goals: return PANTS_SUCCEEDED_EXIT_CODE engine_result = PANTS_FAILED_EXIT_CODE try: engine_result = self._perform_run(goals) except Exception as e: ExceptionSink.log_exception(e) metrics = self.graph_session.scheduler_session.metrics() self.run_tracker.set_pantsd_scheduler_metrics(metrics) self.run_tracker.end_run(engine_result) return engine_result
def test_process_digests_on_streaming_workunits( rule_runner: RuleRunner, run_tracker: RunTracker) -> None: scheduler = rule_runner.scheduler tracker = WorkunitTracker() handler = StreamingWorkunitHandler( scheduler, run_tracker=run_tracker, callbacks=[tracker.add], report_interval_seconds=0.01, max_workunit_verbosity=LogLevel.INFO, ) stdout_process = Process(argv=("/bin/bash", "-c", "/bin/echo 'stdout output'"), description="Stdout process") with handler.session(): result = rule_runner.request(ProcessResult, [stdout_process]) assert tracker.finished finished = list( itertools.chain.from_iterable(tracker.finished_workunit_chunks)) process_workunit = next( item for item in finished if item["name"] == "multi_platform_process-running") assert process_workunit is not None stdout_digest = process_workunit["artifacts"]["stdout_digest"] stderr_digest = process_workunit["artifacts"]["stderr_digest"] assert result.stdout == b"stdout output\n" assert stderr_digest == EMPTY_FILE_DIGEST assert stdout_digest.serialized_bytes_length == len(result.stdout) tracker = WorkunitTracker() handler = StreamingWorkunitHandler( scheduler, run_tracker=run_tracker, callbacks=[tracker.add], report_interval_seconds=0.01, max_workunit_verbosity=LogLevel.INFO, ) stderr_process = Process(argv=("/bin/bash", "-c", "1>&2 /bin/echo 'stderr output'"), description="Stderr process") with handler.session(): result = rule_runner.request(ProcessResult, [stderr_process]) assert tracker.finished finished = list( itertools.chain.from_iterable(tracker.finished_workunit_chunks)) process_workunit = next( item for item in finished if item["name"] == "multi_platform_process-running") assert process_workunit is not None stdout_digest = process_workunit["artifacts"]["stdout_digest"] stderr_digest = process_workunit["artifacts"]["stderr_digest"] assert result.stderr == b"stderr output\n" assert stdout_digest == EMPTY_FILE_DIGEST assert stderr_digest.serialized_bytes_length == len(result.stderr) assert process_workunit["metadata"]["exit_code"] == 0 try: scheduler.ensure_remote_has_recursive([stdout_digest, stderr_digest]) except Exception as e: # This is the exception message we should expect from invoking ensure_remote_has_recursive() # in rust. assert str(e) == "Cannot ensure remote has blobs without a remote" byte_outputs = scheduler.single_file_digests_to_bytes( [stdout_digest, stderr_digest]) assert byte_outputs[0] == result.stdout assert byte_outputs[1] == result.stderr