def finalize(self): """Called by the pipeline at the termination of a run. By default, this function prints the location of the log-file if one was created during the run (e.g. if there were errors), and a summary of all nodes. """ runtime = (self._end_time or 0) - (self._start_time or 0) if self.states[self.ERROR]: print_err("Done; but errors were detected ...") else: print_info("Done ...") print_info() rows = [(" Number of nodes:", sum(self.states)), (" Number of done nodes:", self.states[self.DONE]), (" Number of runable nodes:", self.states[self.RUNABLE]), (" Number of queued nodes:", self.states[self.QUEUED]), (" Number of outdated nodes:", self.states[self.OUTDATED]), (" Number of failed nodes:", self.states[self.ERROR]), (" Pipeline runtime:", _fmt_runtime(round(runtime)))] for line in text.padded_table(rows): print_info(line) print_info() print_info("Use --list-output-files to view status of output files.") logfile = pypeline.logger.get_logfile() if logfile: print_debug("Log-file located at %r" % (logfile, )) print_info()
def run(config, args): if not os.path.exists(config.temp_root): try: os.makedirs(config.temp_root) except OSError, error: print_err("ERROR: Could not create temp root:\n\t%s" % (error,)) return 1
def finalize(self): """Called by the pipeline at the termination of a run. By default, this function prints the location of the log-file if one was created during the run (e.g. if there were errors), and a summary of all nodes. """ runtime = (self._end_time or 0) - (self._start_time or 0) if self.states[self.ERROR]: print_err("Done; but errors were detected ...") else: print_info("Done ...") print_info() rows = [(" Number of nodes:", sum(self.states)), (" Number of done nodes:", self.states[self.DONE]), (" Number of runable nodes:", self.states[self.RUNABLE]), (" Number of queued nodes:", self.states[self.QUEUED]), (" Number of outdated nodes:", self.states[self.OUTDATED]), (" Number of failed nodes:", self.states[self.ERROR]), (" Pipeline runtime:", _fmt_runtime(round(runtime)))] for line in text.padded_table(rows): print_info(line) print_info() print_info("Use --list-output-files to view status of output files.") logfile = pypeline.logger.get_logfile() if logfile: print_debug("Log-file located at %r" % (logfile,)) print_info()
def run(config, args): if not os.path.exists(config.temp_root): try: os.makedirs(config.temp_root) except OSError, error: print_err("ERROR: Could not create temp root:\n\t%s" % (error, )) return 1
def main(argv): try: config, args = bam_config.parse_config(argv) if args and args[0].startswith("dry"): config.dry_run = True except bam_config.ConfigError, error: print_err(error) return 1
def main(argv): options, paths = parse_args(argv) records = {} for root in paths: if os.path.isdir(root): filename = os.path.join(root, _FILENAME) else: root, filename = os.path.split(root)[0], root if not os.path.exists(filename): print_err("ERROR: Could not find SampleSheet file: %r" % filename, file=sys.stderr) return 1 for record in read_alignment_records(filename): libraries = records.setdefault(record["SampleID"], {}) barcodes = libraries.setdefault(record["Index"], []) record["Lane"] = int(record["Lane"]) path = "%(SampleID)s_%(Index)s_L%(Lane)03i_R{Pair}_*.fastq.gz" % record record["Path"] = select_path(os.path.join(root, path)) barcodes.append(record) _print_header( timestamp=datetime.datetime.now().isoformat(), full_mkfile=(os.path.basename(sys.argv[0]) != "trim_pipeline"), sample_tmpl=not bool(records), minimal=options.minimal) for (sample, libraries) in records.iteritems(): print "%s:" % sample print " %s:" % sample for (library, barcodes) in libraries.iteritems(): print " %s:" % library for record in barcodes: print " {FCID}_{Lane}: {Path}".format(**record) print print if not argv: print_info( "No directories/files specified, standard makefile printed.", file=sys.stderr) print_info("If the reads have associated %s files, these" % (_FILENAME, ), file=sys.stderr) print_info("may be used to generate a preliminary makefile:", file=sys.stderr) print_info(" Usage: bam_pipeline mkfile [filename/directory] [...]", file=sys.stderr) print_info("Each directory must contain a '%s' file." % _FILENAME, file=sys.stderr) else: print_info( "Makefile printed. Please check for correctness before running pipeline.", file=sys.stderr) return 0
def main(argv): check_python_version() check_pysam_module_version() try: config, args = parse_config(argv) if args and args[0].startswith("dry"): config.dry_run = True except RuntimeError, error: print_err(error) return 1
def finalize(self): """Called by the pipeline at the termination of a run. By default, this function prints the location of the log-file if one was created during the run (e.g. if there were errors).""" logfile = pypeline.logger.get_logfile() if logfile: print_debug("Log-file located at %r" % (logfile, )) if self.states[self.ERROR]: print_err("Done; but errors were detected ...") else: print_info("Done ...")
def finalize(self): """Called by the pipeline at the termination of a run. By default, this function prints the location of the log-file if one was created during the run (e.g. if there were errors).""" logfile = pypeline.logger.get_logfile() if logfile: print_debug("Log-file located at %r" % (logfile,)) if self.states[self.ERROR]: print_err("Done; but errors were detected ...") else: print_info("Done ...")
def main(argv): options, paths = parse_args(argv) records = {} for root in paths: if os.path.isdir(root): filename = os.path.join(root, _FILENAME) else: root, filename = os.path.split(root)[0], root if not os.path.exists(filename): print_err("ERROR: Could not find SampleSheet file: %r" % filename, file=sys.stderr) return 1 for record in read_alignment_records(filename): libraries = records.setdefault(record["SampleID"], {}) barcodes = libraries.setdefault(record["Index"], []) record["Lane"] = int(record["Lane"]) path = "%(SampleID)s_%(Index)s_L%(Lane)03i_R{Pair}_*.fastq.gz" \ % record record["Path"] = select_path(os.path.join(root, path)) barcodes.append(record) is_trim_pipeline = os.path.basename(sys.argv[0]) == "trim_pipeline" print_header(full_mkfile=not is_trim_pipeline, sample_tmpl=not bool(records), minimal=options.minimal) for (sample, libraries) in records.iteritems(): print("%s:" % sample) print(" %s:" % sample) for (library, barcodes) in libraries.iteritems(): print(" %s:" % library) for record in barcodes: print(" {FCID}_{Lane}: {Path}".format(**record)) print() print() if not argv: print_info("No directories/files specified, standard makefile printed.", file = sys.stderr) print_info("If the reads have associated %s files, these" % (_FILENAME,), file = sys.stderr) print_info("may be used to generate a preliminary makefile:", file = sys.stderr) print_info(" Usage: bam_pipeline mkfile [filename/directory] [...]", file = sys.stderr) print_info("Each directory must contain a '%s' file." % _FILENAME, file = sys.stderr) else: print_info("Makefile printed. Please check for correctness before running pipeline.", file = sys.stderr) return 0
" std.out." % basename) print_info(" -- %s run [...] -- Run pipeline on provided " "makefiles." % basename) def main(argv): check_python_version() check_pysam_module_version() try: config, args = parse_config(argv) if args and args[0].startswith("dry"): config.dry_run = True except RuntimeError, error: print_err(error) return 1 commands = ("makefile", "mkfile", "run", "dry_run", "dry-run", "dryrun") if (len(args) == 0) or (args[0] not in commands): _print_usage() return 1 elif args[0] in ("mkfile", "makefile"): return epicreatemkfile.main(args[1:]) elif not args[1:]: _print_usage() print_err("\nPlease specify at least one makefile!") return 1 return run(config, args[1:]) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
files.add(os.path.abspath(fpath)) else: files.add(os.path.abspath(root_filename)) return (files - mkfiles) - frozenset(pipeline.list_output_files()) def run(config, args): if not os.path.exists(config.temp_root): try: os.makedirs(config.temp_root) except OSError, error: print_err("ERROR: Could not create temp root:\n\t%s" % (error,)) return 1 if not os.access(config.temp_root, os.R_OK | os.W_OK | os.X_OK): print_err("ERROR: Insufficient permissions for temp root: '%s'" % (config.temp_root,)) return 1 # Init worker-threads before reading in any more data pipeline = Pypeline(config) try: print_info("Building BAM pipeline ...", file=sys.stderr) makefiles = read_makefiles(config, args) except (MakefileError, pypeline.yaml.YAMLError, IOError), error: print_err( "Error reading makefiles:", "\n %s:\n " % (error.__class__.__name__,), "\n ".join(str(error).split("\n")), file=sys.stderr, )
def main(argv): try: config, args = parse_config(argv) except ConfigError, error: print_err(error) return 1
fpath = os.path.join(dirpath, filename) files.add(os.path.abspath(fpath)) return files - pipeline.list_output_files() def main(argv): try: config, args = parse_config(argv) except ConfigError, error: print_err(error) return 1 if not args or ("help" in args): return 0 elif (len(args) < 2) and ("mkfile" not in args and "makefile" not in args): print_err("\nPlease specify at least one makefile!") return 1 commands = select_commands(args.pop(0)) if any((cmd in ("makefile", "mkfile")) for (cmd, _) in commands): return mkfile.main(args[1:]) if not os.path.exists(config.temp_root): try: os.makedirs(config.temp_root) except OSError, error: print_err("ERROR: Could not create temp root:\n\t%s" % (error, )) return 1 if not os.access(config.temp_root, os.R_OK | os.W_OK | os.X_OK): print_err("ERROR: Insufficient permissions for temp root: '%s'" %