def finalize(self): """Called by the pipeline at the termination of a run. By default, this function prints the location of the log-file if one was created during the run (e.g. if there were errors), and a summary of all nodes. """ runtime = (self._end_time or 0) - (self._start_time or 0) if self.states[self.ERROR]: print_err("Done; but errors were detected ...") else: print_info("Done ...") print_info() rows = [(" Number of nodes:", sum(self.states)), (" Number of done nodes:", self.states[self.DONE]), (" Number of runable nodes:", self.states[self.RUNABLE]), (" Number of queued nodes:", self.states[self.QUEUED]), (" Number of outdated nodes:", self.states[self.OUTDATED]), (" Number of failed nodes:", self.states[self.ERROR]), (" Pipeline runtime:", _fmt_runtime(round(runtime)))] for line in text.padded_table(rows): print_info(line) print_info("\nUse --list-output-files to view status of output files.") logfile = paleomix.logger.get_logfile() if logfile: print_debug("Log-file located at %r" % (logfile,)) print_info()
def finalize(self): """Called by the pipeline at the termination of a run. By default, this function prints the location of the log-file if one was created during the run (e.g. if there were errors), and a summary of all nodes. """ runtime = (self._end_time or 0) - (self.start_time or 0) if self.states[self.ERROR]: print_err("Done; but errors were detected ...") else: print_info("Done ...") print_info() rows = [(" Number of nodes:", sum(self.states)), (" Number of done nodes:", self.states[self.DONE]), (" Number of runable nodes:", self.states[self.RUNABLE]), (" Number of queued nodes:", self.states[self.QUEUED]), (" Number of outdated nodes:", self.states[self.OUTDATED]), (" Number of failed nodes:", self.states[self.ERROR]), (" Pipeline runtime:", _fmt_runtime(runtime))] for line in text.padded_table(rows): print_info(line) print_info("\nUse --list-output-files to view status of output files.") logfile = paleomix.logger.get_logfile() if logfile: print_debug("Log-file located at %r" % (logfile, )) print_info()
def _write_areas_of_interest(self, table, rois): table.write("# Regions Of Interest:\n") rows = [["Genome", "ROI", "Size", "NFeatures", "NIntervals", "Path"]] for (_, roi) in sorted(rois.items()): rows.append([roi[key] for key in ("Genome", "Name", "Size", "NFeatures", "NIntervals", "Path")]) for line in text.padded_table(rows): table.write("# %s\n" % (line,))
def _write_genomes(self, table, genomes): table.write("# Genomes:\n") rows = [["Name", "Label", "Contigs", "Size", "Prefix"]] for (_, prefix) in sorted(self._prefixes.items()): stats = genomes[prefix["Name"]] rows.append((prefix["Name"], prefix.get("Label", "-"), stats["NContigs"], stats["Size"], prefix["Path"])) for line in text.padded_table(rows): table.write("# %s\n" % (line,))
def _write_genomes(self, table, genomes): table.write("# Genomes:\n") rows = [["Name", "Label", "Contigs", "Size", "Prefix"]] for (_, prefix) in sorted(self._prefixes.items()): stats = genomes[prefix["Name"]] rows.append((prefix["Name"], prefix.get("Label", "-"), stats["NContigs"], stats["Size"], prefix["Path"])) for line in text.padded_table(rows): table.write("# %s\n" % (line, ))
def _write_areas_of_interest(self, table, rois): table.write("# Regions Of Interest:\n") rows = [["Genome", "ROI", "Size", "NFeatures", "NIntervals", "Path"]] for (_, roi) in sorted(rois.items()): rows.append([ roi[key] for key in ("Genome", "Name", "Size", "NFeatures", "NIntervals", "Path") ]) for line in text.padded_table(rows): table.write("# %s\n" % (line, ))
def main(argv): """Main function; takes a list of arguments but excluding sys.argv[0].""" args = parse_args(argv) rows = [] for line in fileinput.input(args.file): rows.append(split_line(line)) for line in padded_table(rows): print(line) return 0
def print_table(handle, args, totals): lengths = collect_references(args, handle) if args.outfile == "-": output_handle = sys.stdout else: output_handle = open(args.outfile, "w") with output_handle: rows = build_table(args.target_name, totals, lengths) output_handle.write(_HEADER % datetime.datetime.now().isoformat()) output_handle.write("\n") for line in padded_table(rows): output_handle.write(line) output_handle.write("\n")
def write_table(table, filename): table = calculate_totals(table) rows = build_rows(table) if filename == "-": output_handle = sys.stdout else: output_handle = open(filename, "w") try: output_handle.write(TABLE_HEADER % datetime.datetime.now().isoformat()) for line in padded_table(rows): output_handle.write(line) output_handle.write("\n") finally: if output_handle is not sys.stdout: output_handle.close()
def _summarize_pipeline(self, nodegraph): states = [0] * nodegraph.NUMBER_OF_STATES for node in nodegraph.iterflat(): states[nodegraph.get_node_state(node)] += 1 rows = [ ("Number of nodes:", sum(states)), ("Number of done nodes:", states[nodegraph.DONE]), ("Number of runable nodes:", states[nodegraph.RUNABLE]), ("Number of queued nodes:", states[nodegraph.QUEUED]), ("Number of outdated nodes:", states[nodegraph.OUTDATED]), ("Number of failed nodes:", states[nodegraph.ERROR]), ] for message in padded_table(rows): self._logger.info(message) if states[nodegraph.ERROR]: self._logger.warning("Errors were detected while running pipeline")
def _write_tables(self, out, genomes): rows = [["Target", "Sample", "Library", "Measure", "Value", "# Description"]] for (target, samples) in sorted(self._read_tables(self._prefixes, genomes).iteritems()): for (sample, libraries) in sorted(samples.iteritems()): for (library, prefixes) in sorted(libraries.iteritems()): ordered = [("reads", prefixes.pop("reads"))] if "reads" in prefixes else [] ordered.extend(sorted(prefixes.items())) for (prefix, table) in ordered: table.pop("hits_unique_nts(%s)" % prefix, None) for (key, (value, comment)) in sorted(table.iteritems(), key = _measure_ordering): if isinstance(value, numbers.Number) and math.isnan(value): value = "NA" rows.append((target, sample, library, key, value, comment)) rows.append("") rows.append("") for line in text.padded_table(rows): out.write("%s\n" % line)
def _padded_table(*args, **kwargs): return list(padded_table(*args, **kwargs))