def _run_and_parse(process, job): try: # job.*.que can be used by parsers (via Output.read_line() or directly) out_t, job.out.que = _start_enqueue_thread(process.stdout) err_t, job.err.que = _start_enqueue_thread(process.stderr) try: job_input = _get_input_as_string(job) process.stdin.write(job_input) except IOError as e: utils.put("\nWarning: passing input to %s failed.\n" % job.name) if e.errno not in (errno.EPIPE, errno.EINVAL): raise process.stdin.close() out_t.join() err_t.join() process.wait() # nothing is written to the queues at this point # parse what's left in the queues job.parse() finally: # take care of what is left by the parser job.out.finish_que() job.err.finish_que()
def run_job(self, job, show_progress, new_line=True): if not hasattr(sys.stdout, 'isatty') or not sys.stdout.isatty(): show_progress = False self.jobs.append(job) job_num = len(self.jobs) if new_line: utils.put("\n" + _jobindex_fmt % job_num) utils.put_green(_jobname_fmt % job.name) else: utils.put(" / %d" % job_num) sys.stdout.flush() utils.log_section(job.name) job_idx = len(self.jobs) - 1 if job_idx < self.from_job - 1: # from_job is 1-based # unpickle or skip if self.repl_jobs and len(self.repl_jobs) > job_idx: old_job = self.repl_jobs[job_idx] if old_job.name == job.name: job.data = old_job.data job = old_job utils.put("unpickled") utils.log_value("not_run", "unpickled") self.jobs[-1] = job else: utils.put("skipped (mismatch)") utils.log_value("not_run", "unpickled/mismatch") else: utils.put("skipped") utils.log_value("not_run", "skipped") return job.started = time.time() utils.log_time("start_time", job.started) if job.stdin_file: utils.log_value("stdin", job.stdin_file) elif job.std_input: utils.log_value("input", job.std_input) utils.log_value("prog", job.args[0]) utils.log_value("args", " ".join(pipes.quote(a) for a in job.args[1:])) #job.args[0] = "true" # for debugging try: process = Popen(job.args, stdin=PIPE, stdout=PIPE, stderr=PIPE, cwd=self.output_dir) except OSError as e: if e.errno == errno.ENOENT: raise JobError("Program not found: %s" % job.args[0]) else: raise if self.dry_run: return if show_progress: event = threading.Event() progress_thread = threading.Thread(target=_print_progress, args=(job, event)) progress_thread.daemon = True progress_thread.start() try: if job.parser is not None or show_progress: _run_and_parse(process, job) else: job_input = _get_input_as_string(job) out, err = process.communicate(input=job_input) job.out.lines = out.splitlines(True) job.err.lines = err.splitlines(True) except KeyboardInterrupt: raise JobError("KeyboardInterrupt while running %s" % job.name, note=job.args_as_str()) finally: if show_progress: event.set() progress_thread.join() end_time = time.time() job.total_time = end_time - job.started utils.log_time("end_time", end_time) job.exit_status = process.poll() if new_line: utils.put(_elapsed_fmt % job.total_time) parse_output = job.parse() utils.put("%s" % (parse_output or "")) if parse_output: utils.log_value("info", parse_output) if self.enable_logs: self._write_logs(job) for k, v in job.data.iteritems(): if k != "info": utils.log_value(k, v) if job.exit_status != 0: utils.log_value("exit_status", job.exit_status)