def _print_header(cls, states, threads): print_msg(datetime.datetime.now().strftime("%F %T")) print_msg("Pipeline; %s (press 'h' for help):" % cls._describe_states(states, threads)) logfile = pypeline.logger.get_logfile() if logfile: print_debug(" Log-file located at %r" % (logfile, ))
def _print_header(cls, states, threads): print_msg(datetime.datetime.now().strftime("%F %T")) print_msg("Pipeline; %s (press 'h' for help):" % cls._describe_states(states, threads)) logfile = pypeline.logger.get_logfile() if logfile: print_debug(" Log-file located at %r" % (logfile,))
def finalize(self): """Called by the pipeline at the termination of a run. By default, this function prints the location of the log-file if one was created during the run (e.g. if there were errors), and a summary of all nodes. """ runtime = (self._end_time or 0) - (self._start_time or 0) if self.states[self.ERROR]: print_err("Done; but errors were detected ...") else: print_info("Done ...") print_info() rows = [(" Number of nodes:", sum(self.states)), (" Number of done nodes:", self.states[self.DONE]), (" Number of runable nodes:", self.states[self.RUNABLE]), (" Number of queued nodes:", self.states[self.QUEUED]), (" Number of outdated nodes:", self.states[self.OUTDATED]), (" Number of failed nodes:", self.states[self.ERROR]), (" Pipeline runtime:", _fmt_runtime(round(runtime)))] for line in text.padded_table(rows): print_info(line) print_info() print_info("Use --list-output-files to view status of output files.") logfile = pypeline.logger.get_logfile() if logfile: print_debug("Log-file located at %r" % (logfile, )) print_info()
def finalize(self): """Called by the pipeline at the termination of a run. By default, this function prints the location of the log-file if one was created during the run (e.g. if there were errors), and a summary of all nodes. """ runtime = (self._end_time or 0) - (self._start_time or 0) if self.states[self.ERROR]: print_err("Done; but errors were detected ...") else: print_info("Done ...") print_info() rows = [(" Number of nodes:", sum(self.states)), (" Number of done nodes:", self.states[self.DONE]), (" Number of runable nodes:", self.states[self.RUNABLE]), (" Number of queued nodes:", self.states[self.QUEUED]), (" Number of outdated nodes:", self.states[self.OUTDATED]), (" Number of failed nodes:", self.states[self.ERROR]), (" Pipeline runtime:", _fmt_runtime(round(runtime)))] for line in text.padded_table(rows): print_info(line) print_info() print_info("Use --list-output-files to view status of output files.") logfile = pypeline.logger.get_logfile() if logfile: print_debug("Log-file located at %r" % (logfile,)) print_info()
def _print_summary(self): """Prints a summary of the pipeline progress.""" time_label = datetime.datetime.now().strftime("%T") description = self._describe_states(self.states, self.threads) print_msg("\n%s Pipeline: %s" % (time_label, description)) logfile = pypeline.logger.get_logfile() if logfile: print_debug("Log-file located at %r" % (logfile, ))
def _print_summary(self): """Prints a summary of the pipeline progress.""" time_label = datetime.datetime.now().strftime("%T") description = self._describe_states(self.states, self.threads) print_msg("\n%s Pipeline: %s" % (time_label, description)) logfile = pypeline.logger.get_logfile() if logfile: print_debug("Log-file located at %r" % (logfile,))
def finalize(self): """Called by the pipeline at the termination of a run. By default, this function prints the location of the log-file if one was created during the run (e.g. if there were errors).""" logfile = pypeline.logger.get_logfile() if logfile: print_debug("Log-file located at %r" % (logfile, )) if self.states[self.ERROR]: print_err("Done; but errors were detected ...") else: print_info("Done ...")
def finalize(self): """Called by the pipeline at the termination of a run. By default, this function prints the location of the log-file if one was created during the run (e.g. if there were errors).""" logfile = pypeline.logger.get_logfile() if logfile: print_debug("Log-file located at %r" % (logfile,)) if self.states[self.ERROR]: print_err("Done; but errors were detected ...") else: print_info("Done ...")
def flush(self): time_label = datetime.datetime.now().strftime("%T") runtime = _fmt_runtime(int(time.time() - self._starting_time)) description = self._describe_states(self.states, self.threads) message = "%s Pipeline: %s in %s " % (time_label, description, runtime) self._max_len = max(len(message), self._max_len) print_msg("\r%s" % (message.ljust(self._max_len), ), end="") logfile = pypeline.logger.get_logfile() if logfile and self._new_error: print_debug("\nLog-file located at %r" % (logfile, )) self._new_error = False sys.stdout.flush()
def flush(self): time_label = datetime.datetime.now().strftime("%T") runtime = _fmt_runtime(int(time.time() - self._starting_time)) description = self._describe_states(self.states, self.threads) message = "%s Pipeline: %s in %s " % (time_label, description, runtime) self._max_len = max(len(message), self._max_len) print_msg("\r%s" % (message.ljust(self._max_len),), end="") logfile = pypeline.logger.get_logfile() if logfile and self._new_error: print_debug("\nLog-file located at %r" % (logfile,)) self._new_error = False sys.stdout.flush()